diff --git a/.eslintignore b/.eslintignore
new file mode 100644
index 00000000000000..46a631963a4aa7
--- /dev/null
+++ b/.eslintignore
@@ -0,0 +1,5 @@
+lib/punycode.js
+test/addons/doc-*/
+test/fixtures
+test/**/node_modules
+test/parallel/test-fs-non-number-arguments-throw.js
diff --git a/.eslintrc b/.eslintrc
new file mode 100644
index 00000000000000..e023327fbb7d00
--- /dev/null
+++ b/.eslintrc
@@ -0,0 +1,96 @@
+env:
+ node: true
+
+# enable ECMAScript features
+ecmaFeatures:
+ blockBindings: true
+ templateStrings: true
+ octalLiterals: true
+ binaryLiterals: true
+ generators: true
+ forOf: true
+
+rules:
+ # Possible Errors
+ # list: https://github.com/eslint/eslint/tree/master/docs/rules#possible-errors
+ ## check debugger sentence
+ no-debugger: 2
+ ## check duplicate arguments
+ no-dupe-args: 2
+ ## check duplicate object keys
+ no-dupe-keys: 2
+ ## check duplicate switch-case
+ no-duplicate-case: 2
+ ## disallow assignment of exceptional params
+ no-ex-assign: 2
+ ## disallow use of reserved words as keys like enum, class
+ no-reserved-keys: 2
+ ## disallow unreachable code
+ no-unreachable: 2
+ ## require valid typeof compared string like typeof foo === 'strnig'
+ valid-typeof: 2
+
+ # Best Practices
+ # list: https://github.com/eslint/eslint/tree/master/docs/rules#best-practices
+ ## require falls through comment on switch-case
+ no-fallthrough: 2
+
+ # Stylistic Issues
+ # list: https://github.com/eslint/eslint/tree/master/docs/rules#stylistic-issues
+ ## use single quote, we can use double quote when escape chars
+ quotes:
+ - 2
+ - "single"
+ - "avoid-escape"
+ ## 2 space indentation
+ indent:
+ - 2
+ - 2
+ ## add space after comma
+ ## set to 'warn' because of https://github.com/eslint/eslint/issues/2408
+ comma-spacing: 1
+ ## put semi-colon
+ semi: 2
+ ## require spaces operator like var sum = 1 + 1;
+ space-infix-ops: 2
+ ## require spaces return, throw, case
+ space-return-throw-case: 2
+ ## no space before function, eg. 'function()'
+ space-before-function-paren: [2, "never"]
+ ## require space before blocks, eg 'function() {'
+ space-before-blocks: [2, "always"]
+ ## require parens for Constructor
+ new-parens: 2
+ ## max 80 length
+ max-len:
+ - 2
+ - 80
+ - 2
+
+ # Strict Mode
+ # list: https://github.com/eslint/eslint/tree/master/docs/rules#strict-mode
+ ## 'use strict' on top
+ strict:
+ - 2
+ - "global"
+
+# Global scoped method and vars
+globals:
+ DTRACE_HTTP_CLIENT_REQUEST: true
+ LTTNG_HTTP_CLIENT_REQUEST: true
+ COUNTER_HTTP_CLIENT_REQUEST: true
+ DTRACE_HTTP_CLIENT_RESPONSE: true
+ LTTNG_HTTP_CLIENT_RESPONSE: true
+ COUNTER_HTTP_CLIENT_RESPONSE: true
+ DTRACE_HTTP_SERVER_REQUEST: true
+ LTTNG_HTTP_SERVER_REQUEST: true
+ COUNTER_HTTP_SERVER_REQUEST: true
+ DTRACE_HTTP_SERVER_RESPONSE: true
+ LTTNG_HTTP_SERVER_RESPONSE: true
+ COUNTER_HTTP_SERVER_RESPONSE: true
+ DTRACE_NET_STREAM_END: true
+ LTTNG_NET_STREAM_END: true
+ COUNTER_NET_SERVER_CONNECTION_CLOSE: true
+ DTRACE_NET_SERVER_CONNECTION: true
+ LTTNG_NET_SERVER_CONNECTION: true
+ COUNTER_NET_SERVER_CONNECTION: true
diff --git a/.gitignore b/.gitignore
index 19fb3840039160..ebb9a7bc56375c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -76,3 +76,7 @@ deps/zlib/zlib.target.mk
tools/faketime
icu_config.gypi
test.tap
+
+# Xcode workspaces and project folders
+*.xcodeproj
+*.xcworkspace
diff --git a/.mailmap b/.mailmap
index e4596711ae955b..de9a9bc61ec16f 100644
--- a/.mailmap
+++ b/.mailmap
@@ -92,9 +92,12 @@ Ray Morgan
Ray Solomon
Raymond Feng
Rick Olson
+Roman Klauke
Roman Reiss
Ryan Dahl
Ryan Emery
+Sakthipriyan Vairamani
+Sam Mikes
Sam Shull
Sam Shull
Sambasiva Suda
@@ -129,6 +132,7 @@ Yazhong Liu Yazhong Liu
Yazhong Liu Yorkie
Yazhong Liu Yorkie
Yoshihiro KIKUCHI
+Yosuke Furukawa
Yuichiro MASUI
Zachary Scott
Zoran Tomicic
diff --git a/AUTHORS b/AUTHORS
index a46220775054f6..2995611156925d 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -576,7 +576,7 @@ Rohini Harendra
Chris Barber
Michael Kebe
Nick Muerdter
-Roman Klauke
+Roman Klauke
Xavi Magrinyà
Euan
Ed Morley
@@ -744,5 +744,25 @@ Giovanny Andres Gongora Granada
Jeffrey Jagoda
Kelsey Breseman
Peter Petrov
+Andrew Crites
+Marat Abdullin
+Dan Varga
+Nick Raienko
+Guilherme Souza
+Chris Yip
+Christopher Monsanto
+Alexander Gromnitsky
+Сковорода Никита Андреевич
+Sakthipriyan Vairamani
+AQNOUCH Mohammed
+Ivan Kozik
+Oleg Elifantiev
+Mike MacCana
+Josh Gummersall
+Sam Mikes
+Frederic Hemberger
+Sharat M R
+Rich Trott
+Felipe Batista
# Generated by tools/update-authors.sh
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7eee2e8ba128b0..c75f160942da4d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,15 +1,285 @@
# io.js ChangeLog
+## 2015-05-24, Version 2.1.0, @rvagg
+
+### Notable changes
+
+* **crypto**: Diffie-Hellman key exchange (DHE) parameters (`'dhparams'`) must now be 1024 bits or longer or an error will be thrown. A warning will also be printed to the console if you supply less than 2048 bits. See https://weakdh.org/ for further context on this security concern. (Shigeki Ohtsu) [#1739](https://github.com/nodejs/io.js/pull/1739).
+* **node**: A new `--trace-sync-io` command line flag will print a warning and a stack trace whenever a synchronous API is used. This can be used to track down synchronous calls that may be slowing down an application. (Trevor Norris) [#1707](https://github.com/nodejs/io.js/pull/1707).
+* **node**: To allow for chaining of methods, the `setTimeout()`, `setKeepAlive()`, `setNoDelay()`, `ref()` and `unref()` methods used in `'net'`, `'dgram'`, `'http'`, `'https'` and `'tls'` now return the current instance instead of `undefined` (Roman Reiss & Evan Lucas) [#1699](https://github.com/nodejs/io.js/pull/1699) [#1768](https://github.com/nodejs/io.js/pull/1768) [#1779](https://github.com/nodejs/io.js/pull/1779).
+* **npm**: Upgraded to v2.10.1, release notes can be found in and .
+* **util**: A significant speed-up (in the order of 35%) for the common-case of a single string argument to `util.format()`, used by `console.log()` (Сковорода Никита Андреевич) [#1749](https://github.com/nodejs/io.js/pull/1749).
+
+### Known issues
+
+See https://github.com/nodejs/io.js/labels/confirmed-bug for complete and current list of known issues.
+
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/nodejs/io.js/issues/1435).
+
+### Commits
+
+* [[`9da168b71f`](https://github.com/nodejs/io.js/commit/9da168b71f)] - **buffer**: optimize Buffer.byteLength (Brendan Ashworth) [#1713](https://github.com/nodejs/io.js/pull/1713)
+* [[`2b1c01c2cc`](https://github.com/nodejs/io.js/commit/2b1c01c2cc)] - **build**: refactor pkg-config for shared libraries (Johan Bergström) [#1603](https://github.com/nodejs/io.js/pull/1603)
+* [[`3c44100558`](https://github.com/nodejs/io.js/commit/3c44100558)] - **core**: set PROVIDER type as Persistent class id (Trevor Norris) [#1730](https://github.com/nodejs/io.js/pull/1730)
+* [[`c1de6d249e`](https://github.com/nodejs/io.js/commit/c1de6d249e)] - **(SEMVER-MINOR)** **core**: implement runtime flag to trace sync io (Trevor Norris) [#1707](https://github.com/nodejs/io.js/pull/1707)
+* [[`9e7099fa4e`](https://github.com/nodejs/io.js/commit/9e7099fa4e)] - **deps**: make node-gyp work with io.js (cjihrig) [iojs/io.js#990](https://github.com/iojs/io.js/pull/990)
+* [[`c54d057598`](https://github.com/nodejs/io.js/commit/c54d057598)] - **deps**: upgrade to npm 2.10.1 (Rebecca Turner) [#1763](https://github.com/nodejs/io.js/pull/1763)
+* [[`367ffd167d`](https://github.com/nodejs/io.js/commit/367ffd167d)] - **doc**: update AUTHORS list (Rod Vagg) [#1776](https://github.com/nodejs/io.js/pull/1776)
+* [[`2bb2f06b3e`](https://github.com/nodejs/io.js/commit/2bb2f06b3e)] - **doc**: fix typo in CONTRIBUTING.md (Rich Trott) [#1755](https://github.com/nodejs/io.js/pull/1755)
+* [[`515afc6367`](https://github.com/nodejs/io.js/commit/515afc6367)] - **doc**: path is ignored in url.format (Maurice Butler) [#1753](https://github.com/nodejs/io.js/pull/1753)
+* [[`f0a8bc3f84`](https://github.com/nodejs/io.js/commit/f0a8bc3f84)] - **doc**: fix spelling in CHANGELOG (Felipe Batista)
+* [[`86dd244d9b`](https://github.com/nodejs/io.js/commit/86dd244d9b)] - **doc**: add notes to child_process.fork() and .exec() (Rich Trott) [#1718](https://github.com/nodejs/io.js/pull/1718)
+* [[`066274794c`](https://github.com/nodejs/io.js/commit/066274794c)] - **doc**: update links from iojs/io.js to nodejs/io.js (Frederic Hemberger) [#1715](https://github.com/nodejs/io.js/pull/1715)
+* [[`cb381fe3e0`](https://github.com/nodejs/io.js/commit/cb381fe3e0)] - **(SEMVER-MINOR)** **net**: return this from setNoDelay and setKeepAlive (Roman Reiss) [#1779](https://github.com/nodejs/io.js/pull/1779)
+* [[`85d9983009`](https://github.com/nodejs/io.js/commit/85d9983009)] - **net**: persist net.Socket options before connect (Evan Lucas) [#1518](https://github.com/nodejs/io.js/pull/1518)
+* [[`39dde3222e`](https://github.com/nodejs/io.js/commit/39dde3222e)] - **(SEMVER-MINOR)** **net,dgram**: return this from ref and unref methods (Roman Reiss) [#1768](https://github.com/nodejs/io.js/pull/1768)
+* [[`5773438913`](https://github.com/nodejs/io.js/commit/5773438913)] - **test**: fix jslint error (Michaël Zasso) [#1743](https://github.com/nodejs/io.js/pull/1743)
+* [[`867631986f`](https://github.com/nodejs/io.js/commit/867631986f)] - **test**: fix test-sync-io-option (Santiago Gimeno) [#1734](https://github.com/nodejs/io.js/pull/1734)
+* [[`f29762f4dd`](https://github.com/nodejs/io.js/commit/f29762f4dd)] - **test**: enable linting for tests (Roman Reiss) [#1721](https://github.com/nodejs/io.js/pull/1721)
+* [[`2a71f02988`](https://github.com/nodejs/io.js/commit/2a71f02988)] - **tls**: emit errors happening before handshake finish (Malte-Thorben Bruns) [#1769](https://github.com/nodejs/io.js/pull/1769)
+* [[`80342f649d`](https://github.com/nodejs/io.js/commit/80342f649d)] - **tls**: use `.destroy(err)` instead of destroy+emit (Fedor Indutny) [#1711](https://github.com/nodejs/io.js/pull/1711)
+* [[`9b35be5810`](https://github.com/nodejs/io.js/commit/9b35be5810)] - **tls**: make server not use DHE in less than 1024bits (Shigeki Ohtsu) [#1739](https://github.com/nodejs/io.js/pull/1739)
+* [[`214d02040e`](https://github.com/nodejs/io.js/commit/214d02040e)] - **util**: speed up common case of formatting string (Сковорода Никита Андреевич) [#1749](https://github.com/nodejs/io.js/pull/1749)
+* [[`d144e96fbf`](https://github.com/nodejs/io.js/commit/d144e96fbf)] - **win,node-gyp**: enable delay-load hook by default (Bert Belder) [#1763](https://github.com/nodejs/io.js/pull/1763)
+* [[`0d6d3dda95`](https://github.com/nodejs/io.js/commit/0d6d3dda95)] - **win,node-gyp**: make delay-load hook C89 compliant (Sharat M R) [TooTallNate/node-gyp#616](https://github.com/TooTallNate/node-gyp/pull/616)
+
+## 2015-05-15, Version 2.0.2, @Fishrock123
+
+### Notable changes
+
+* **win,node-gyp**: the delay-load hook for windows addons has now been correctly enabled by default, it had wrongly defaulted to off in the release version of 2.0.0 (Bert Belder) [#1433](https://github.com/nodejs/io.js/pull/1433)
+* **os**: `tmpdir()`'s trailing slash stripping has been refined to fix an issue when the temp directory is at '/'. Also considers which slash is used by the operating system. (cjihrig) [#1673](https://github.com/nodejs/io.js/pull/1673)
+* **tls**: default ciphers have been updated to use gcm and aes128 (Mike MacCana) [#1660](https://github.com/nodejs/io.js/pull/1660)
+* **build**: v8 snapshots have been re-enabled by default as suggested by the v8 team, since prior security issues have been resolved. This should give some perf improvements to both startup and vm context creation. (Trevor Norris) [#1663](https://github.com/nodejs/io.js/pull/1663)
+* **src**: fixed preload modules not working when other flags were used before `--require` (Yosuke Furukawa) [#1694](https://github.com/nodejs/io.js/pull/1694)
+* **dgram**: fixed `send()`'s callback not being asynchronous (Yosuke Furukawa) [#1313](https://github.com/nodejs/io.js/pull/1313)
+* **readline**: emitKeys now keeps buffering data until it has enough to parse. This fixes an issue with parsing split escapes. (Alex Kocharin) [#1601](https://github.com/nodejs/io.js/pull/1601)
+* **cluster**: works now properly emit 'disconnect' to `cluser.worker` (Oleg Elifantiev) [#1386](https://github.com/nodejs/io.js/pull/1386)
+* **events**: uncaught errors now provide some context (Evan Lucas) [#1654](https://github.com/nodejs/io.js/pull/1654)
+
+### Known issues
+
+See https://github.com/nodejs/io.js/labels/confirmed-bug for complete and current list of known issues.
+
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/nodejs/io.js/issues/1435).
+
+### Commits
+
+* [[`8a0e5295b4`](https://github.com/nodejs/io.js/commit/8a0e5295b4)] - **build**: use backslashes for paths on windows (Johan Bergström) [#1698](https://github.com/nodejs/io.js/pull/1698)
+* [[`20c9a52227`](https://github.com/nodejs/io.js/commit/20c9a52227)] - **build**: move --with-intl to intl optgroup (Johan Bergström) [#1680](https://github.com/nodejs/io.js/pull/1680)
+* [[`36cdc7c8ac`](https://github.com/nodejs/io.js/commit/36cdc7c8ac)] - **build**: re-enable V8 snapshots (Trevor Norris) [#1663](https://github.com/nodejs/io.js/pull/1663)
+* [[`5883a59b21`](https://github.com/nodejs/io.js/commit/5883a59b21)] - **cluster**: disconnect event not emitted correctly (Oleg Elifantiev) [#1386](https://github.com/nodejs/io.js/pull/1386)
+* [[`0f850f7ae7`](https://github.com/nodejs/io.js/commit/0f850f7ae7)] - **deps**: provide TXT chunk info in c-ares (Fedor Indutny)
+* [[`7e1c0e75ed`](https://github.com/nodejs/io.js/commit/7e1c0e75ed)] - **deps**: sync with upstream bagder/c-ares@bba4dc5 (Ben Noordhuis) [#1678](https://github.com/nodejs/io.js/pull/1678)
+* [[`18d457bd34`](https://github.com/nodejs/io.js/commit/18d457bd34)] - **dgram**: call send callback asynchronously (Yosuke Furukawa) [#1313](https://github.com/nodejs/io.js/pull/1313)
+* [[`8b9a1537ad`](https://github.com/nodejs/io.js/commit/8b9a1537ad)] - **events**: provide better error message for unhandled error (Evan Lucas) [#1654](https://github.com/nodejs/io.js/pull/1654)
+* [[`19ffb5cf1c`](https://github.com/nodejs/io.js/commit/19ffb5cf1c)] - **lib**: fix eslint styles (Yosuke Furukawa) [#1539](https://github.com/nodejs/io.js/pull/1539)
+* [[`76937051f8`](https://github.com/nodejs/io.js/commit/76937051f8)] - **os**: refine tmpdir() trailing slash stripping (cjihrig) [#1673](https://github.com/nodejs/io.js/pull/1673)
+* [[`aed6bce906`](https://github.com/nodejs/io.js/commit/aed6bce906)] - **readline**: turn emitKeys into a streaming parser (Alex Kocharin) [#1601](https://github.com/nodejs/io.js/pull/1601)
+* [[`0a461e5360`](https://github.com/nodejs/io.js/commit/0a461e5360)] - **src**: fix preload when used with prior flags (Yosuke Furukawa) [#1694](https://github.com/nodejs/io.js/pull/1694)
+* [[`931a0d4634`](https://github.com/nodejs/io.js/commit/931a0d4634)] - **src**: add type check to v8.setFlagsFromString() (Roman Klauke) [#1652](https://github.com/nodejs/io.js/pull/1652)
+* [[`08d08668c9`](https://github.com/nodejs/io.js/commit/08d08668c9)] - **src,deps**: replace LoadLibrary by LoadLibraryW (Cheng Zhao) [#226](https://github.com/nodejs/io.js/pull/226)
+* [[`4e2f999a62`](https://github.com/nodejs/io.js/commit/4e2f999a62)] - **test**: fix infinite loop detection (Yosuke Furukawa) [#1681](https://github.com/nodejs/io.js/pull/1681)
+* [[`5755fc099f`](https://github.com/nodejs/io.js/commit/5755fc099f)] - **tls**: update default ciphers to use gcm and aes128 (Mike MacCana) [#1660](https://github.com/nodejs/io.js/pull/1660)
+* [[`966acb9916`](https://github.com/nodejs/io.js/commit/966acb9916)] - **tools**: remove closure_linter to eslint on windows (Yosuke Furukawa) [#1685](https://github.com/nodejs/io.js/pull/1685)
+* [[`c58264e58b`](https://github.com/nodejs/io.js/commit/c58264e58b)] - **tools**: make eslint work on subdirectories (Roman Reiss) [#1686](https://github.com/nodejs/io.js/pull/1686)
+* [[`0b21ab13b7`](https://github.com/nodejs/io.js/commit/0b21ab13b7)] - **tools**: refactor `make test-npm` into test-npm.sh (Jeremiah Senkpiel) [#1662](https://github.com/nodejs/io.js/pull/1662)
+* [[`f07b3b600b`](https://github.com/nodejs/io.js/commit/f07b3b600b)] - **tools**: set eslint comma-spacing to 'warn' (Roman Reiss) [#1672](https://github.com/nodejs/io.js/pull/1672)
+* [[`f9dd34d301`](https://github.com/nodejs/io.js/commit/f9dd34d301)] - **tools**: replace closure-linter with eslint (Yosuke Furukawa) [#1539](https://github.com/nodejs/io.js/pull/1539)
+* [[`64d3210c98`](https://github.com/nodejs/io.js/commit/64d3210c98)] - **win,node-gyp**: enable delay-load hook by default (Bert Belder) [#1667](https://github.com/nodejs/io.js/issues/1667)
+
+## 2015-05-07, Version 2.0.1, @rvagg
+
+### Notable changes
+
+* **async_wrap**: (Trevor Norris) [#1614](https://github.com/nodejs/io.js/pull/1614)
+ - it is now possible to filter by providers
+ - bit flags have been removed and replaced with method calls on the binding object
+ - _note that this is an unstable API so feature additions and breaking changes won't change io.js semver_
+* **libuv**: resolves numerous io.js issues:
+ - [#862](https://github.com/nodejs/io.js/issues/862) prevent spawning child processes with invalid stdio file descriptors
+ - [#1397](https://github.com/nodejs/io.js/issues/1397) fix EPERM error with fs.access(W_OK) on Windows
+ - [#1621](https://github.com/nodejs/io.js/issues/1621) build errors associated with the bundled libuv
+ - [#1512](https://github.com/nodejs/io.js/issues/1512) should properly fix Windows termination errors
+* **addons**: the `NODE_DEPRECATED` macro was causing problems when compiling addons with older compilers, this should now be resolved (Ben Noordhuis) [#1626](https://github.com/nodejs/io.js/pull/1626)
+* **V8**: upgrade V8 from 4.2.77.18 to 4.2.77.20 with minor fixes, including a bug preventing builds on FreeBSD
+
+### Known issues
+
+See https://github.com/nodejs/io.js/labels/confirmed-bug for complete and current list of known issues.
+
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/nodejs/io.js/issues/1435).
+* readline: split escapes are processed incorrectly, see [#1403](https://github.com/nodejs/io.js/issues/1403)
+
+### Commits
+
+* [[`7dde95a8bd`](https://github.com/nodejs/io.js/commit/7dde95a8bd)] - **async-wrap**: remove before/after calls in init (Trevor Norris) [#1614](https://github.com/nodejs/io.js/pull/1614)
+* [[`bd42ba056a`](https://github.com/nodejs/io.js/commit/bd42ba056a)] - **async-wrap**: set flags using functions (Trevor Norris) [#1614](https://github.com/nodejs/io.js/pull/1614)
+* [[`4b2c786449`](https://github.com/nodejs/io.js/commit/4b2c786449)] - **async-wrap**: pass PROVIDER as first arg to init (Trevor Norris) [#1614](https://github.com/nodejs/io.js/pull/1614)
+* [[`84bf609fd2`](https://github.com/nodejs/io.js/commit/84bf609fd2)] - **async-wrap**: don't call init callback unnecessarily (Trevor Norris) [#1614](https://github.com/nodejs/io.js/pull/1614)
+* [[`04cc03b029`](https://github.com/nodejs/io.js/commit/04cc03b029)] - **deps**: update libuv to 1.5.0 (Saúl Ibarra Corretgé) [#1646](https://github.com/nodejs/io.js/pull/1646)
+* [[`b16d9c28e8`](https://github.com/nodejs/io.js/commit/b16d9c28e8)] - **deps**: upgrade v8 to 4.2.77.20 (Ben Noordhuis) [#1639](https://github.com/nodejs/io.js/pull/1639)
+* [[`9ec3109272`](https://github.com/nodejs/io.js/commit/9ec3109272)] - **doc**: add TC meeting 2015-04-29 minutes (Rod Vagg) [#1585](https://github.com/nodejs/io.js/pull/1585)
+* [[`2c7206254c`](https://github.com/nodejs/io.js/commit/2c7206254c)] - **doc**: fix typo in readme.md (AQNOUCH Mohammed) [#1643](https://github.com/nodejs/io.js/pull/1643)
+* [[`71dc7152ee`](https://github.com/nodejs/io.js/commit/71dc7152ee)] - **doc**: fix PR link in CHANGELOG (Brian White) [#1624](https://github.com/nodejs/io.js/pull/1624)
+* [[`b97b96d05a`](https://github.com/nodejs/io.js/commit/b97b96d05a)] - **install**: fix NameError (thefourtheye) [#1628](https://github.com/nodejs/io.js/pull/1628)
+* [[`6ccbe75384`](https://github.com/nodejs/io.js/commit/6ccbe75384)] - **js_stream**: fix buffer index in DoWrite (Shigeki Ohtsu) [#1635](https://github.com/nodejs/io.js/pull/1635)
+* [[`c43855c49c`](https://github.com/nodejs/io.js/commit/c43855c49c)] - **src**: export the ParseEncoding function on Windows (Ivan Kozik) [#1596](https://github.com/nodejs/io.js/pull/1596)
+* [[`8315b22390`](https://github.com/nodejs/io.js/commit/8315b22390)] - **src**: fix pedantic cpplint whitespace warnings (Ben Noordhuis) [#1640](https://github.com/nodejs/io.js/pull/1640)
+* [[`b712af79a7`](https://github.com/nodejs/io.js/commit/b712af79a7)] - **src**: fix NODE_DEPRECATED macro with old compilers (Ben Noordhuis) [#1626](https://github.com/nodejs/io.js/pull/1626)
+* [[`2ed10f1349`](https://github.com/nodejs/io.js/commit/2ed10f1349)] - **src**: fix minor inefficiency in Buffer::New() call (Ben Noordhuis) [#1577](https://github.com/nodejs/io.js/pull/1577)
+* [[`f696c9efab`](https://github.com/nodejs/io.js/commit/f696c9efab)] - **src**: fix deprecated use of Buffer::New() (Ben Noordhuis) [#1577](https://github.com/nodejs/io.js/pull/1577)
+* [[`0c8f13df8f`](https://github.com/nodejs/io.js/commit/0c8f13df8f)] - **tools**: remove unused GuessWordSize function (thefourtheye) [#1638](https://github.com/nodejs/io.js/pull/1638)
+
+## 2015-05-04, Version 2.0.0, @rvagg
+
+### Breaking changes
+
+Full details at https://github.com/nodejs/io.js/wiki/Breaking-Changes#200-from-1x
+
+* V8 upgrade to 4.2, minor changes to C++ API
+* `os.tmpdir()` is now cross-platform consistent and no longer returns a path with a trailing slash on any platform
+* While not a *breaking change* the 'smalloc' module has been deprecated in anticipation of it becoming unsupportable with a future upgrade to V8 4.4. See [#1451](https://github.com/nodejs/io.js/issues/1451) for further information.
+
+_Note: a new version of the 'url' module was reverted prior to release as it was decided the potential for breakage across the npm ecosystem was too great and that more compatibility work needed to be done before releasing it. See [#1602](https://github.com/nodejs/io.js/pull/1602) for further information._
+
+### Notable changes
+
+* **crypto**: significantly reduced memory usage for TLS (Fedor Indutny & Сковорода Никита Андреевич) [#1529](https://github.com/nodejs/io.js/pull/1529)
+* **net**: `socket.connect()` now accepts a `'lookup'` option for a custom DNS resolution mechanism, defaults to `dns.lookup()` (Evan Lucas) [#1505](https://github.com/nodejs/io.js/pull/1505)
+* **npm**: Upgrade npm to 2.9.0. See the [v2.8.4](https://github.com/npm/npm/releases/tag/v2.8.4) and [v2.9.0](https://github.com/npm/npm/releases/tag/v2.9.0) release notes for details. Notable items:
+ - Add support for default author field to make `npm init -y` work without user-input (@othiym23) [npm/npm/d8eee6cf9d](https://github.com/npm/npm/commit/d8eee6cf9d2ff7aca68dfaed2de76824a3e0d9af)
+ - Include local modules in `npm outdated` and `npm update` (@ArnaudRinquin) [npm/npm#7426](https://github.com/npm/npm/issues/7426)
+ - The prefix used before the version number on `npm version` is now configurable via `tag-version-prefix` (@kkragenbrink) [npm/npm#8014](https://github.com/npm/npm/issues/8014)
+* **os**: `os.tmpdir()` is now cross-platform consistent and will no longer returns a path with a trailling slash on any platform (Christian Tellnes) [#747](https://github.com/nodejs/io.js/pull/747)
+* **process**:
+ - `process.nextTick()` performance has been improved by between 2-42% across the benchmark suite, notable because this is heavily used across core (Brian White) [#1571](https://github.com/nodejs/io.js/pull/1571)
+ - New `process.geteuid()`, `process.seteuid(id)`, `process.getegid()` and `process.setegid(id)` methods allow you to get and set effective UID and GID of the process (Evan Lucas) [#1536](https://github.com/nodejs/io.js/pull/1536)
+* **repl**:
+ - REPL history can be persisted across sessions if the `NODE_REPL_HISTORY_FILE` environment variable is set to a user accessible file, `NODE_REPL_HISTORY_SIZE` can set the maximum history size and defaults to `1000` (Chris Dickinson) [#1513](https://github.com/nodejs/io.js/pull/1513)
+ - The REPL can be placed in to one of three modes using the `NODE_REPL_MODE` environment variable: `sloppy`, `strict` or `magic` (default); the new `magic` mode will automatically run "strict mode only" statements in strict mode (Chris Dickinson) [#1513](https://github.com/nodejs/io.js/pull/1513)
+* **smalloc**: the 'smalloc' module has been deprecated due to changes coming in V8 4.4 that will render it unusable
+* **util**: add Promise, Map and Set inspection support (Christopher Monsanto) [#1471](https://github.com/nodejs/io.js/pull/1471)
+* **V8**: upgrade to 4.2.77.18, see the [ChangeLog](https://chromium.googlesource.com/v8/v8/+/refs/heads/4.2.77/ChangeLog) for full details. Notable items:
+ - Classes have moved out of staging; the `class` keyword is now usable in strict mode without flags
+ - Object literal enhancements have moved out of staging; shorthand method and property syntax is now usable (`{ method() { }, property }`)
+ - Rest parameters (`function(...args) {}`) are implemented in staging behind the `--harmony-rest-parameters` flag
+ - Computed property names (`{['foo'+'bar']:'bam'}`) are implemented in staging behind the `--harmony-computed-property-names` flag
+ - Unicode escapes (`'\u{xxxx}'`) are implemented in staging behind the `--harmony_unicode` flag and the `--harmony_unicode_regexps` flag for use in regular expressions
+* **Windows**:
+ - Random process termination on Windows fixed (Fedor Indutny) [#1512](https://github.com/nodejs/io.js/issues/1512) / [#1563](https://github.com/nodejs/io.js/pull/1563)
+ - The delay-load hook introduced to fix issues with process naming (iojs.exe / node.exe) has been made opt-out for native add-ons. Native add-ons should include `'win_delay_load_hook': 'false'` in their binding.gyp to disable this feature if they experience problems . (Bert Belder) [#1433](https://github.com/nodejs/io.js/pull/1433)
+* **Governance**:
+ - Rod Vagg (@rvagg) was added to the Technical Committee (TC)
+ - Jeremiah Senkpiel (@Fishrock123) was added to the Technical Committee (TC)
+
+### Known issues
+
+See https://github.com/nodejs/io.js/labels/confirmed-bug for complete and current list of known issues.
+
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/nodejs/io.js/issues/1435).
+* readline: split escapes are processed incorrectly, see [#1403](https://github.com/nodejs/io.js/issues/1403)
+
+### Commits
+
+* [[`5404cbc745`](https://github.com/nodejs/io.js/commit/5404cbc745)] - **buffer**: fix copy() segfault with zero arguments (Trevor Norris) [#1520](https://github.com/nodejs/io.js/pull/1520)
+* [[`3d3083b91f`](https://github.com/nodejs/io.js/commit/3d3083b91f)] - **buffer**: little improve for Buffer.concat method (Jackson Tian) [#1437](https://github.com/nodejs/io.js/pull/1437)
+* [[`e67542ae17`](https://github.com/nodejs/io.js/commit/e67542ae17)] - **build**: disable -Og when building with clang (Ben Noordhuis) [#1609](https://github.com/nodejs/io.js/pull/1609)
+* [[`78f4b038f8`](https://github.com/nodejs/io.js/commit/78f4b038f8)] - **build**: turn on debug-safe optimizations with -Og (Ben Noordhuis) [#1569](https://github.com/nodejs/io.js/pull/1569)
+* [[`a5dcff827a`](https://github.com/nodejs/io.js/commit/a5dcff827a)] - **build**: Use option groups in configure output (Johan Bergström) [#1533](https://github.com/nodejs/io.js/pull/1533)
+* [[`2a3c8c187e`](https://github.com/nodejs/io.js/commit/2a3c8c187e)] - **build**: remove -J from test-ci (Rod Vagg) [#1544](https://github.com/nodejs/io.js/pull/1544)
+* [[`e6874dd0f9`](https://github.com/nodejs/io.js/commit/e6874dd0f9)] - **crypto**: track external memory for SSL structures (Fedor Indutny) [#1529](https://github.com/nodejs/io.js/pull/1529)
+* [[`935c9d3fa7`](https://github.com/nodejs/io.js/commit/935c9d3fa7)] - **deps**: make node-gyp work with io.js (cjihrig) [#990](https://github.com/nodejs/io.js/pull/990)
+* [[`56e4255382`](https://github.com/nodejs/io.js/commit/56e4255382)] - **deps**: upgrade npm to 2.9.0 (Forrest L Norvell) [#1573](https://github.com/nodejs/io.js/pull/1573)
+* [[`509b59ea7c`](https://github.com/nodejs/io.js/commit/509b59ea7c)] - **deps**: enable v8 postmortem debugging again (Ben Noordhuis) [#1232](https://github.com/nodejs/io.js/pull/1232)
+* [[`01652c7709`](https://github.com/nodejs/io.js/commit/01652c7709)] - **deps**: upgrade v8 to 4.2.77.18 (Chris Dickinson) [#1506](https://github.com/nodejs/io.js/pull/1506)
+* [[`01e6632d70`](https://github.com/nodejs/io.js/commit/01e6632d70)] - **deps**: upgrade v8 to 4.2.77.15 (Ben Noordhuis) [#1399](https://github.com/nodejs/io.js/pull/1399)
+* [[`db4ded5903`](https://github.com/nodejs/io.js/commit/db4ded5903)] - **deps**: enable v8 postmortem debugging again (Ben Noordhuis) [#1232](https://github.com/nodejs/io.js/pull/1232)
+* [[`36cd5fb9d2`](https://github.com/nodejs/io.js/commit/36cd5fb9d2)] - **(SEMVER-MAJOR)** **deps**: upgrade v8 to 4.2.77.13 (Ben Noordhuis) [#1232](https://github.com/nodejs/io.js/pull/1232)
+* [[`b3a7da1091`](https://github.com/nodejs/io.js/commit/b3a7da1091)] - **deps**: update http_parser to 2.5.0 (Fedor Indutny) [#1517](https://github.com/nodejs/io.js/pull/1517)
+* [[`ac1fb39ce8`](https://github.com/nodejs/io.js/commit/ac1fb39ce8)] - **doc**: add rvagg to the TC (Rod Vagg) [#1613](https://github.com/nodejs/io.js/pull/1613)
+* [[`dacc1fa35c`](https://github.com/nodejs/io.js/commit/dacc1fa35c)] - **doc**: update AUTHORS list (Rod Vagg) [#1586](https://github.com/nodejs/io.js/pull/1586)
+* [[`2a3a1909ab`](https://github.com/nodejs/io.js/commit/2a3a1909ab)] - **doc**: add require() lines to child.stdio example (Nick Raienko) [#1504](https://github.com/nodejs/io.js/pull/1504)
+* [[`02388dbf40`](https://github.com/nodejs/io.js/commit/02388dbf40)] - **doc**: fix some cross-references (Alexander Gromnitsky) [#1584](https://github.com/nodejs/io.js/pull/1584)
+* [[`57c4cc26e2`](https://github.com/nodejs/io.js/commit/57c4cc26e2)] - **doc**: add TC meeting 2015-04-22 minutes (Rod Vagg) [#1556](https://github.com/nodejs/io.js/pull/1556)
+* [[`b4ad5d7050`](https://github.com/nodejs/io.js/commit/b4ad5d7050)] - **doc**: improve http.request and https.request opts (Roman Reiss) [#1551](https://github.com/nodejs/io.js/pull/1551)
+* [[`7dc8eec0a6`](https://github.com/nodejs/io.js/commit/7dc8eec0a6)] - **doc**: deprecate smalloc module (Ben Noordhuis) [#1566](https://github.com/nodejs/io.js/pull/1566)
+* [[`1bcdf46ca7`](https://github.com/nodejs/io.js/commit/1bcdf46ca7)] - **doc**: add TC meeting 2015-04-15 minutes (Rod Vagg) [#1498](https://github.com/nodejs/io.js/pull/1498)
+* [[`391cae3595`](https://github.com/nodejs/io.js/commit/391cae3595)] - **doc**: Add Known issues to v1.7.0/1.7.1 CHANGELOG (Yosuke Furukawa) [#1473](https://github.com/nodejs/io.js/pull/1473)
+* [[`e55fdc47a7`](https://github.com/nodejs/io.js/commit/e55fdc47a7)] - **doc**: fix util.deprecate example (Nick Raienko) [#1535](https://github.com/nodejs/io.js/pull/1535)
+* [[`5178f93bc0`](https://github.com/nodejs/io.js/commit/5178f93bc0)] - **doc**: Add Addon API (NAN) to working group list (Julian Duque) [#1523](https://github.com/nodejs/io.js/pull/1523)
+* [[`f3cc50f811`](https://github.com/nodejs/io.js/commit/f3cc50f811)] - **doc**: add TC meeting 2015-04-08 minutes (Rod Vagg) [#1497](https://github.com/nodejs/io.js/pull/1497)
+* [[`bb254b533b`](https://github.com/nodejs/io.js/commit/bb254b533b)] - **doc**: update branch to master (Roman Reiss) [#1511](https://github.com/nodejs/io.js/pull/1511)
+* [[`22aafa5597`](https://github.com/nodejs/io.js/commit/22aafa5597)] - **doc**: add Fishrock123 to the TC (Jeremiah Senkpiel) [#1507](https://github.com/nodejs/io.js/pull/1507)
+* [[`b16a328ede`](https://github.com/nodejs/io.js/commit/b16a328ede)] - **doc**: add spaces to child.kill example (Nick Raienko) [#1503](https://github.com/nodejs/io.js/pull/1503)
+* [[`26327757f8`](https://github.com/nodejs/io.js/commit/26327757f8)] - **doc**: update AUTHORS list (Rod Vagg) [#1476](https://github.com/nodejs/io.js/pull/1476)
+* [[`f9c681cf62`](https://github.com/nodejs/io.js/commit/f9c681cf62)] - **fs**: validate fd on fs.write (Julian Duque) [#1553](https://github.com/nodejs/io.js/pull/1553)
+* [[`801b47acc5`](https://github.com/nodejs/io.js/commit/801b47acc5)] - **gitignore**: ignore xcode workspaces and projects (Roman Klauke) [#1562](https://github.com/nodejs/io.js/pull/1562)
+* [[`d5ce47e433`](https://github.com/nodejs/io.js/commit/d5ce47e433)] - **(SEMVER-MINOR)** **lib**: deprecate the smalloc module (Ben Noordhuis) [#1564](https://github.com/nodejs/io.js/pull/1564)
+* [[`7384ca83f9`](https://github.com/nodejs/io.js/commit/7384ca83f9)] - **module**: remove '' from Module.globalPaths (Chris Yip) [#1488](https://github.com/nodejs/io.js/pull/1488)
+* [[`b4f5898395`](https://github.com/nodejs/io.js/commit/b4f5898395)] - **net**: ensure Write/ShutdownWrap references handle (Fedor Indutny) [#1590](https://github.com/nodejs/io.js/pull/1590)
+* [[`4abe2fa1cf`](https://github.com/nodejs/io.js/commit/4abe2fa1cf)] - **(SEMVER-MINOR)** **net**: add lookup option to Socket.prototype.connect (Evan Lucas) [#1505](https://github.com/nodejs/io.js/pull/1505)
+* [[`1bef717476`](https://github.com/nodejs/io.js/commit/1bef717476)] - **(SEMVER-MINOR)** **net**: cleanup connect logic (Evan Lucas) [#1505](https://github.com/nodejs/io.js/pull/1505)
+* [[`c7782c0af8`](https://github.com/nodejs/io.js/commit/c7782c0af8)] - **node**: improve nextTick performance (Brian White) [#1571](https://github.com/nodejs/io.js/pull/1571)
+* [[`b57cc51d8d`](https://github.com/nodejs/io.js/commit/b57cc51d8d)] - **(SEMVER-MAJOR)** **os**: remove trailing slash from os.tmpdir() (Christian Tellnes) [#747](https://github.com/nodejs/io.js/pull/747)
+* [[`ca219b00d1`](https://github.com/nodejs/io.js/commit/ca219b00d1)] - **repl**: fix for a+ fd clearing the file on read (Chris Dickinson) [#1605](https://github.com/nodejs/io.js/pull/1605)
+* [[`051d482b15`](https://github.com/nodejs/io.js/commit/051d482b15)] - **repl**: fix \_debugger by properly proxying repl (Chris Dickinson) [#1605](https://github.com/nodejs/io.js/pull/1605)
+* [[`2e2fce0502`](https://github.com/nodejs/io.js/commit/2e2fce0502)] - **repl**: fix persistent history and env variable name (Roman Reiss) [#1593](https://github.com/nodejs/io.js/pull/1593)
+* [[`ea5195ccaf`](https://github.com/nodejs/io.js/commit/ea5195ccaf)] - **repl**: do not save history for non-terminal repl (Fedor Indutny) [#1575](https://github.com/nodejs/io.js/pull/1575)
+* [[`0450ce7db2`](https://github.com/nodejs/io.js/commit/0450ce7db2)] - **repl**: add mode detection, cli persistent history (Chris Dickinson) [#1513](https://github.com/nodejs/io.js/pull/1513)
+* [[`af9fe3bbc7`](https://github.com/nodejs/io.js/commit/af9fe3bbc7)] - **(SEMVER-MAJOR)** **src**: bump NODE_MODULE_VERSION due to V8 API (Rod Vagg) [#1532](https://github.com/nodejs/io.js/pull/1532)
+* [[`279f6116aa`](https://github.com/nodejs/io.js/commit/279f6116aa)] - **src**: fix -Wmissing-field-initializers warning (Ben Noordhuis) [#1606](https://github.com/nodejs/io.js/pull/1606)
+* [[`73062521a4`](https://github.com/nodejs/io.js/commit/73062521a4)] - **src**: deprecate smalloc public functions (Ben Noordhuis) [#1565](https://github.com/nodejs/io.js/pull/1565)
+* [[`ccb199af17`](https://github.com/nodejs/io.js/commit/ccb199af17)] - **src**: fix deprecation warnings (Ben Noordhuis) [#1565](https://github.com/nodejs/io.js/pull/1565)
+* [[`609fa0de03`](https://github.com/nodejs/io.js/commit/609fa0de03)] - **src**: fix NODE_DEPRECATED macro (Ben Noordhuis) [#1565](https://github.com/nodejs/io.js/pull/1565)
+* [[`3c92ca2b5c`](https://github.com/nodejs/io.js/commit/3c92ca2b5c)] - **(SEMVER-MINOR)** **src**: add ability to get/set effective uid/gid (Evan Lucas) [#1536](https://github.com/nodejs/io.js/pull/1536)
+* [[`30b7349176`](https://github.com/nodejs/io.js/commit/30b7349176)] - **stream_base**: dispatch reqs in the stream impl (Fedor Indutny) [#1563](https://github.com/nodejs/io.js/pull/1563)
+* [[`0fa6c4a6fc`](https://github.com/nodejs/io.js/commit/0fa6c4a6fc)] - **string_decoder**: don't cache Buffer.isEncoding (Brian White) [#1548](https://github.com/nodejs/io.js/pull/1548)
+* [[`f9b226c1c1`](https://github.com/nodejs/io.js/commit/f9b226c1c1)] - **test**: extend timeouts for ARMv6 (Rod Vagg) [#1554](https://github.com/nodejs/io.js/pull/1554)
+* [[`bfae8236b1`](https://github.com/nodejs/io.js/commit/bfae8236b1)] - **test**: fix test-net-dns-custom-lookup test assertion (Evan Lucas) [#1531](https://github.com/nodejs/io.js/pull/1531)
+* [[`547213913b`](https://github.com/nodejs/io.js/commit/547213913b)] - **test**: adjust Makefile/test-ci, add to vcbuild.bat (Rod Vagg) [#1530](https://github.com/nodejs/io.js/pull/1530)
+* [[`550c2638c0`](https://github.com/nodejs/io.js/commit/550c2638c0)] - **tls**: use `SSL_set_cert_cb` for async SNI/OCSP (Fedor Indutny) [#1464](https://github.com/nodejs/io.js/pull/1464)
+* [[`1787416376`](https://github.com/nodejs/io.js/commit/1787416376)] - **tls**: destroy singleUse context immediately (Fedor Indutny) [#1529](https://github.com/nodejs/io.js/pull/1529)
+* [[`2684c902c4`](https://github.com/nodejs/io.js/commit/2684c902c4)] - **tls**: zero SSL_CTX freelist for a singleUse socket (Fedor Indutny) [#1529](https://github.com/nodejs/io.js/pull/1529)
+* [[`2d241b3b82`](https://github.com/nodejs/io.js/commit/2d241b3b82)] - **tls**: destroy SSL once it is out of use (Fedor Indutny) [#1529](https://github.com/nodejs/io.js/pull/1529)
+* [[`f7620fb96d`](https://github.com/nodejs/io.js/commit/f7620fb96d)] - **tls_wrap**: Unlink TLSWrap and SecureContext objects (Сковорода Никита Андреевич) [#1580](https://github.com/nodejs/io.js/pull/1580)
+* [[`a7d74633f2`](https://github.com/nodejs/io.js/commit/a7d74633f2)] - **tls_wrap**: use localhost if options.host is empty (Guilherme Souza) [#1493](https://github.com/nodejs/io.js/pull/1493)
+* [[`702997c1f0`](https://github.com/nodejs/io.js/commit/702997c1f0)] - ***Revert*** "**url**: significantly improve the performance of the url module" (Rod Vagg) [#1602](https://github.com/nodejs/io.js/pull/1602)
+* [[`0daed24883`](https://github.com/nodejs/io.js/commit/0daed24883)] - ***Revert*** "**url**: delete href cache on all setter code paths" (Rod Vagg) [#1602](https://github.com/nodejs/io.js/pull/1602)
+* [[`0f39ef4ca1`](https://github.com/nodejs/io.js/commit/0f39ef4ca1)] - ***Revert*** "**url**: fix treatment of some values as non-empty" (Rod Vagg) [#1602](https://github.com/nodejs/io.js/pull/1602)
+* [[`66877216bd`](https://github.com/nodejs/io.js/commit/66877216bd)] - **url**: fix treatment of some values as non-empty (Petka Antonov) [#1589](https://github.com/nodejs/io.js/pull/1589)
+* [[`dbdd81a91b`](https://github.com/nodejs/io.js/commit/dbdd81a91b)] - **url**: delete href cache on all setter code paths (Petka Antonov) [#1589](https://github.com/nodejs/io.js/pull/1589)
+* [[`3fd7fc429c`](https://github.com/nodejs/io.js/commit/3fd7fc429c)] - **url**: significantly improve the performance of the url module (Petka Antonov) [#1561](https://github.com/nodejs/io.js/pull/1561)
+* [[`bf7ac08dd0`](https://github.com/nodejs/io.js/commit/bf7ac08dd0)] - **util**: add Map and Set inspection support (Christopher Monsanto) [#1471](https://github.com/nodejs/io.js/pull/1471)
+* [[`30e83d2e84`](https://github.com/nodejs/io.js/commit/30e83d2e84)] - **win,node-gyp**: optionally allow node.exe/iojs.exe to be renamed (Bert Belder) [#1266](https://github.com/nodejs/io.js/pull/1266)
+* [[`3bda6cbfa4`](https://github.com/nodejs/io.js/commit/3bda6cbfa4)] - **(SEMVER-MAJOR)** **win,node-gyp**: enable delay-load hook by default (Bert Belder) [#1433](https://github.com/nodejs/io.js/pull/1433)
+
## 2015-04-20, Version 1.8.1, @chrisdickinson
### Notable changes
* **NOTICE**: Skipped v1.8.0 due to problems with release tooling.
- See [#1436](https://github.com/iojs/io.js/issues/1436) for details.
-* **build**: Support for building io.js as a static library (Marat Abdullin) [#1341](https://github.com/iojs/io.js/pull/1341)
-* **deps**: Upgrade openssl to 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
+ See [#1436](https://github.com/nodejs/io.js/issues/1436) for details.
+* **build**: Support for building io.js as a static library (Marat Abdullin) [#1341](https://github.com/nodejs/io.js/pull/1341)
+* **deps**: Upgrade openssl to 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
* Users should see performance improvements when using the crypto API.
- See [here](https://github.com/iojs/io.js/wiki/Crypto-Performance-Notes-for-OpenSSL-1.0.2a-on-iojs-v1.8.0)
+ See [here](https://github.com/nodejs/io.js/wiki/Crypto-Performance-Notes-for-OpenSSL-1.0.2a-on-iojs-v1.8.0)
for details.
* **npm**: Upgrade npm to 2.8.3. See the [release notes](https://github.com/npm/npm/releases/tag/v2.8.3) for details. Includes improved git support. Summary:
* [`387f889`](https://github.com/npm/npm/commit/387f889c0e8fb617d9cc9a42ed0a3ec49424ab5d)
@@ -39,118 +309,134 @@
Save shorthand in `package.json`. Try cloning via `git:`, `git+ssh:`, and
`git+https:`, in that order, when supported by the underlying hosting
provider. ([@othiym23](https://github.com/othiym23))
-* **src**: Allow multiple arguments to be passed to process.nextTick (Trevor Norris) [#1077](https://github.com/iojs/io.js/pull/1077)
+* **src**: Allow multiple arguments to be passed to process.nextTick (Trevor Norris) [#1077](https://github.com/nodejs/io.js/pull/1077)
* **module**: The interaction of `require('.')` with `NODE_PATH` has been restored and deprecated. This functionality
-will be removed at a later point. (Roman Reiss) [#1363](https://github.com/iojs/io.js/pull/1363)
+will be removed at a later point. (Roman Reiss) [#1363](https://github.com/nodejs/io.js/pull/1363)
### Known issues
-* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/iojs/io.js/issues/1264).
-* Surrogate pair in REPL can freeze terminal [#690](https://github.com/iojs/io.js/issues/690)
-* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/iojs/io.js/issues/760) and fix in [#774](https://github.com/iojs/io.js/issues/774)
-* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/iojs/io.js/issues/894)
-* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/iojs/io.js/issues/1435).
-* readline: split escapes are processed incorrectly, see [#1403](https://github.com/iojs/io.js/issues/1403)
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* `url.resolve` may transfer the auth portion of the url when resolving between two full hosts, see [#1435](https://github.com/nodejs/io.js/issues/1435).
+* readline: split escapes are processed incorrectly, see [#1403](https://github.com/nodejs/io.js/issues/1403)
### Commits
-* [[`53ed89d927`](https://github.com/iojs/io.js/commit/53ed89d927)] - ***Revert*** "**build**: use %PYTHON% instead of python" (Rod Vagg) [#1475](https://github.com/iojs/io.js/pull/1475)
-* [[`2b744b0ab7`](https://github.com/iojs/io.js/commit/2b744b0ab7)] - **src**: revert NODE_MODULE_VERSION to 43 (Chris Dickinson) [#1460](https://github.com/iojs/io.js/pull/1460)
-* [[`431673ebd1`](https://github.com/iojs/io.js/commit/431673ebd1)] - **buffer**: fast-case for empty string in byteLength (Jackson Tian) [#1441](https://github.com/iojs/io.js/pull/1441)
-* [[`1b22bad35f`](https://github.com/iojs/io.js/commit/1b22bad35f)] - **build**: fix logic for shared library flags (Jeremiah Senkpiel) [#1454](https://github.com/iojs/io.js/pull/1454)
-* [[`91943a99d5`](https://github.com/iojs/io.js/commit/91943a99d5)] - **build**: use %PYTHON% instead of python (Rod Vagg) [#1444](https://github.com/iojs/io.js/pull/1444)
-* [[`c7769d417b`](https://github.com/iojs/io.js/commit/c7769d417b)] - **build**: Expose xz compression level (Johan Bergström) [#1428](https://github.com/iojs/io.js/pull/1428)
-* [[`a530b2baf1`](https://github.com/iojs/io.js/commit/a530b2baf1)] - **build**: fix error message in configure (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`92dfb794f9`](https://github.com/iojs/io.js/commit/92dfb794f9)] - **build**: enable ssl support on arm64 (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`7de0dcde83`](https://github.com/iojs/io.js/commit/7de0dcde83)] - **deps**: make node-gyp work with io.js (cjihrig) [#990](https://github.com/iojs/io.js/pull/990)
-* [[`4870213f9e`](https://github.com/iojs/io.js/commit/4870213f9e)] - **deps**: upgrade npm to 2.8.3 (Forrest L Norvell)
-* [[`49bb7ded2c`](https://github.com/iojs/io.js/commit/49bb7ded2c)] - **deps**: fix git case sensitivity issue in npm (Chris Dickinson) [#1456](https://github.com/iojs/io.js/pull/1456)
-* [[`4830b4bce8`](https://github.com/iojs/io.js/commit/4830b4bce8)] - **deps**: add docs to upgrade openssl (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`11bec72c87`](https://github.com/iojs/io.js/commit/11bec72c87)] - **deps**: update asm files for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`53924d8ebe`](https://github.com/iojs/io.js/commit/53924d8ebe)] - **deps**: update asm Makefile for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`418e839456`](https://github.com/iojs/io.js/commit/418e839456)] - **deps**: update openssl.gyp/gypi for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`02f12ab666`](https://github.com/iojs/io.js/commit/02f12ab666)] - **deps**: update opensslconf.h for 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`eb7a23595f`](https://github.com/iojs/io.js/commit/eb7a23595f)] - **deps**: add x32 and arm64 support for opensslconf.h (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`033a663127`](https://github.com/iojs/io.js/commit/033a663127)] - **deps**: replace all headers in openssl (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`ae8831f240`](https://github.com/iojs/io.js/commit/ae8831f240)] - **deps**: backport openssl patch of alt cert chains 1 (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`71316c46d9`](https://github.com/iojs/io.js/commit/71316c46d9)] - **deps**: fix asm build error of openssl in x86_win32 (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`d293a4f096`](https://github.com/iojs/io.js/commit/d293a4f096)] - **deps**: fix openssl assembly error on ia32 win32 (Fedor Indutny) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`e4872d7405`](https://github.com/iojs/io.js/commit/e4872d7405)] - **deps**: upgrade openssl to 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`a1c9ef3142`](https://github.com/iojs/io.js/commit/a1c9ef3142)] - **deps, build**: add support older assembler (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`76f219c128`](https://github.com/iojs/io.js/commit/76f219c128)] - **doc**: Document forced pushing with git (Johan Bergström) [#1420](https://github.com/iojs/io.js/pull/1420)
-* [[`12e51d56c1`](https://github.com/iojs/io.js/commit/12e51d56c1)] - **doc**: add Addon API WG (Rod Vagg) [#1226](https://github.com/iojs/io.js/pull/1226)
-* [[`7956a13dad`](https://github.com/iojs/io.js/commit/7956a13dad)] - **http**: logically respect maxSockets (fengmk2) [#1242](https://github.com/iojs/io.js/pull/1242)
-* [[`5b844e140b`](https://github.com/iojs/io.js/commit/5b844e140b)] - **module**: fix style (Roman Reiss) [#1453](https://github.com/iojs/io.js/pull/1453)
-* [[`3ad82c335d`](https://github.com/iojs/io.js/commit/3ad82c335d)] - **(SEMVER-MINOR)** **module**: handle NODE_PATH in require('.') (Roman Reiss) [#1363](https://github.com/iojs/io.js/pull/1363)
-* [[`cd60ff0328`](https://github.com/iojs/io.js/commit/cd60ff0328)] - **net**: add fd into listen2 debug info (Jackson Tian) [#1442](https://github.com/iojs/io.js/pull/1442)
-* [[`10e31ba56c`](https://github.com/iojs/io.js/commit/10e31ba56c)] - **(SEMVER-MINOR)** **node**: allow multiple arguments passed to nextTick (Trevor Norris) [#1077](https://github.com/iojs/io.js/pull/1077)
-* [[`116c54692a`](https://github.com/iojs/io.js/commit/116c54692a)] - **openssl**: fix keypress requirement in apps on win32 (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`62f5f4cec9`](https://github.com/iojs/io.js/commit/62f5f4cec9)] - **src**: remove duplicate byteLength from Buffer (Jackson Tian) [#1438](https://github.com/iojs/io.js/pull/1438)
-* [[`51d0808c90`](https://github.com/iojs/io.js/commit/51d0808c90)] - **stream**: remove duplicated expression (Yazhong Liu) [#1444](https://github.com/iojs/io.js/pull/1444)
-* [[`deb9d23d7b`](https://github.com/iojs/io.js/commit/deb9d23d7b)] - **test**: fix error message check for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/iojs/io.js/pull/1389)
-* [[`ca8c9ec2c8`](https://github.com/iojs/io.js/commit/ca8c9ec2c8)] - **win,node-gyp**: optionally allow node.exe/iojs.exe to be renamed (Bert Belder) [#1266](https://github.com/iojs/io.js/pull/1266)
+* [[`53ed89d927`](https://github.com/nodejs/io.js/commit/53ed89d927)] - ***Revert*** "**build**: use %PYTHON% instead of python" (Rod Vagg) [#1475](https://github.com/nodejs/io.js/pull/1475)
+* [[`2b744b0ab7`](https://github.com/nodejs/io.js/commit/2b744b0ab7)] - **src**: revert NODE_MODULE_VERSION to 43 (Chris Dickinson) [#1460](https://github.com/nodejs/io.js/pull/1460)
+* [[`431673ebd1`](https://github.com/nodejs/io.js/commit/431673ebd1)] - **buffer**: fast-case for empty string in byteLength (Jackson Tian) [#1441](https://github.com/nodejs/io.js/pull/1441)
+* [[`1b22bad35f`](https://github.com/nodejs/io.js/commit/1b22bad35f)] - **build**: fix logic for shared library flags (Jeremiah Senkpiel) [#1454](https://github.com/nodejs/io.js/pull/1454)
+* [[`91943a99d5`](https://github.com/nodejs/io.js/commit/91943a99d5)] - **build**: use %PYTHON% instead of python (Rod Vagg) [#1444](https://github.com/nodejs/io.js/pull/1444)
+* [[`c7769d417b`](https://github.com/nodejs/io.js/commit/c7769d417b)] - **build**: Expose xz compression level (Johan Bergström) [#1428](https://github.com/nodejs/io.js/pull/1428)
+* [[`a530b2baf1`](https://github.com/nodejs/io.js/commit/a530b2baf1)] - **build**: fix error message in configure (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`92dfb794f9`](https://github.com/nodejs/io.js/commit/92dfb794f9)] - **build**: enable ssl support on arm64 (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`7de0dcde83`](https://github.com/nodejs/io.js/commit/7de0dcde83)] - **deps**: make node-gyp work with io.js (cjihrig) [#990](https://github.com/nodejs/io.js/pull/990)
+* [[`4870213f9e`](https://github.com/nodejs/io.js/commit/4870213f9e)] - **deps**: upgrade npm to 2.8.3 (Forrest L Norvell)
+* [[`49bb7ded2c`](https://github.com/nodejs/io.js/commit/49bb7ded2c)] - **deps**: fix git case sensitivity issue in npm (Chris Dickinson) [#1456](https://github.com/nodejs/io.js/pull/1456)
+* [[`4830b4bce8`](https://github.com/nodejs/io.js/commit/4830b4bce8)] - **deps**: add docs to upgrade openssl (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`11bec72c87`](https://github.com/nodejs/io.js/commit/11bec72c87)] - **deps**: update asm files for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`53924d8ebe`](https://github.com/nodejs/io.js/commit/53924d8ebe)] - **deps**: update asm Makefile for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`418e839456`](https://github.com/nodejs/io.js/commit/418e839456)] - **deps**: update openssl.gyp/gypi for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`02f12ab666`](https://github.com/nodejs/io.js/commit/02f12ab666)] - **deps**: update opensslconf.h for 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`eb7a23595f`](https://github.com/nodejs/io.js/commit/eb7a23595f)] - **deps**: add x32 and arm64 support for opensslconf.h (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`033a663127`](https://github.com/nodejs/io.js/commit/033a663127)] - **deps**: replace all headers in openssl (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`ae8831f240`](https://github.com/nodejs/io.js/commit/ae8831f240)] - **deps**: backport openssl patch of alt cert chains 1 (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`71316c46d9`](https://github.com/nodejs/io.js/commit/71316c46d9)] - **deps**: fix asm build error of openssl in x86_win32 (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`d293a4f096`](https://github.com/nodejs/io.js/commit/d293a4f096)] - **deps**: fix openssl assembly error on ia32 win32 (Fedor Indutny) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`e4872d7405`](https://github.com/nodejs/io.js/commit/e4872d7405)] - **deps**: upgrade openssl to 1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`a1c9ef3142`](https://github.com/nodejs/io.js/commit/a1c9ef3142)] - **deps, build**: add support older assembler (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`76f219c128`](https://github.com/nodejs/io.js/commit/76f219c128)] - **doc**: Document forced pushing with git (Johan Bergström) [#1420](https://github.com/nodejs/io.js/pull/1420)
+* [[`12e51d56c1`](https://github.com/nodejs/io.js/commit/12e51d56c1)] - **doc**: add Addon API WG (Rod Vagg) [#1226](https://github.com/nodejs/io.js/pull/1226)
+* [[`7956a13dad`](https://github.com/nodejs/io.js/commit/7956a13dad)] - **http**: logically respect maxSockets (fengmk2) [#1242](https://github.com/nodejs/io.js/pull/1242)
+* [[`5b844e140b`](https://github.com/nodejs/io.js/commit/5b844e140b)] - **module**: fix style (Roman Reiss) [#1453](https://github.com/nodejs/io.js/pull/1453)
+* [[`3ad82c335d`](https://github.com/nodejs/io.js/commit/3ad82c335d)] - **(SEMVER-MINOR)** **module**: handle NODE_PATH in require('.') (Roman Reiss) [#1363](https://github.com/nodejs/io.js/pull/1363)
+* [[`cd60ff0328`](https://github.com/nodejs/io.js/commit/cd60ff0328)] - **net**: add fd into listen2 debug info (Jackson Tian) [#1442](https://github.com/nodejs/io.js/pull/1442)
+* [[`10e31ba56c`](https://github.com/nodejs/io.js/commit/10e31ba56c)] - **(SEMVER-MINOR)** **node**: allow multiple arguments passed to nextTick (Trevor Norris) [#1077](https://github.com/nodejs/io.js/pull/1077)
+* [[`116c54692a`](https://github.com/nodejs/io.js/commit/116c54692a)] - **openssl**: fix keypress requirement in apps on win32 (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`62f5f4cec9`](https://github.com/nodejs/io.js/commit/62f5f4cec9)] - **src**: remove duplicate byteLength from Buffer (Jackson Tian) [#1438](https://github.com/nodejs/io.js/pull/1438)
+* [[`51d0808c90`](https://github.com/nodejs/io.js/commit/51d0808c90)] - **stream**: remove duplicated expression (Yazhong Liu) [#1444](https://github.com/nodejs/io.js/pull/1444)
+* [[`deb9d23d7b`](https://github.com/nodejs/io.js/commit/deb9d23d7b)] - **test**: fix error message check for openssl-1.0.2a (Shigeki Ohtsu) [#1389](https://github.com/nodejs/io.js/pull/1389)
+* [[`ca8c9ec2c8`](https://github.com/nodejs/io.js/commit/ca8c9ec2c8)] - **win,node-gyp**: optionally allow node.exe/iojs.exe to be renamed (Bert Belder) [#1266](https://github.com/nodejs/io.js/pull/1266)
## 2015-04-14, Version 1.7.1, @rvagg
### Notable changes
-* **build**: A syntax error in the Makefile for release builds caused 1.7.0 to be DOA and unreleased. (Rod Vagg) [#1421](https://github.com/iojs/io.js/pull/1421).
+* **build**: A syntax error in the Makefile for release builds caused 1.7.0 to be DOA and unreleased. (Rod Vagg) [#1421](https://github.com/nodejs/io.js/pull/1421).
+
+### Known issues
+
+* Some problems with unreferenced timers running during `beforeExit` are still to be resolved. See [#1264](https://github.com/nodejs/io.js/issues/1264).
+* Surrogate pair in REPL can freeze terminal [#690](https://github.com/nodejs/io.js/issues/690)
+* `process.send()` is not synchronous as the docs suggest, a regression introduced in 1.0.2, see [#760](https://github.com/nodejs/io.js/issues/760) and fix in [#774](https://github.com/nodejs/io.js/issues/774)
+* Calling `dns.setServers()` while a DNS query is in progress can cause the process to crash on a failed assertion [#894](https://github.com/nodejs/io.js/issues/894)
+* readline: split escapes are processed incorrectly, see [#1403](https://github.com/nodejs/io.js/issues/1403)
### Commits
-* [[`aee86a21f2`](https://github.com/iojs/io.js/commit/aee86a21f2)] - **build**: fix RELEASE check (Rod Vagg) [#1421](https://github.com/iojs/io.js/pull/1421)
+* [[`aee86a21f2`](https://github.com/nodejs/io.js/commit/aee86a21f2)] - **build**: fix RELEASE check (Rod Vagg) [#1421](https://github.com/nodejs/io.js/pull/1421)
## 2015-04-14, Version 1.7.0, @rvagg
### Notable changes
-* **C++ API**: Fedor Indutny contributed a feature to V8 which has been backported to the V8 bundled in io.js. `SealHandleScope` allows a C++ add-on author to _seal_ a `HandleScope` to prevent further, unintended allocations within it. Currently only enabled for debug builds of io.js. This feature helped detect the leak in [#1075](https://github.com/iojs/io.js/issues/1075) and is now activated on the root `HandleScope` in io.js. (Fedor Indutny) [#1395](https://github.com/iojs/io.js/pull/1395).
+* **C++ API**: Fedor Indutny contributed a feature to V8 which has been backported to the V8 bundled in io.js. `SealHandleScope` allows a C++ add-on author to _seal_ a `HandleScope` to prevent further, unintended allocations within it. Currently only enabled for debug builds of io.js. This feature helped detect the leak in [#1075](https://github.com/nodejs/io.js/issues/1075) and is now activated on the root `HandleScope` in io.js. (Fedor Indutny) [#1395](https://github.com/nodejs/io.js/pull/1395).
* **ARM**: This release includes significant work to improve the state of ARM support for builds and tests. The io.js CI cluster's ARMv6, ARMv7 and ARMv8 build servers are now all (mostly) reporting passing builds and tests.
- * ARMv8 64-bit (AARCH64) is now properly supported, including a backported fix in libuv that was mistakenly detecting the existence of `epoll_wait()`. (Ben Noordhuis) [#1365](https://github.com/iojs/io.js/pull/1365).
- * ARMv6: [#1376](https://github.com/iojs/io.js/issues/1376) reported a problem with `Math.exp()` on ARMv6 (incl Raspberry Pi). The culprit is erroneous codegen for ARMv6 when using the "fast math" feature of V8. `--nofast_math` has been turned on for all ARMv6 variants by default to avoid this, fast math can be turned back on with `--fast_math`. (Ben Noordhuis) [#1398](https://github.com/iojs/io.js/pull/1398).
- * Tests: timeouts have been tuned specifically for slower platforms, detected as ARMv6 and ARMv7. (Roman Reiss) [#1366](https://github.com/iojs/io.js/pull/1366).
+ * ARMv8 64-bit (AARCH64) is now properly supported, including a backported fix in libuv that was mistakenly detecting the existence of `epoll_wait()`. (Ben Noordhuis) [#1365](https://github.com/nodejs/io.js/pull/1365).
+ * ARMv6: [#1376](https://github.com/nodejs/io.js/issues/1376) reported a problem with `Math.exp()` on ARMv6 (incl Raspberry Pi). The culprit is erroneous codegen for ARMv6 when using the "fast math" feature of V8. `--nofast_math` has been turned on for all ARMv6 variants by default to avoid this, fast math can be turned back on with `--fast_math`. (Ben Noordhuis) [#1398](https://github.com/nodejs/io.js/pull/1398).
+ * Tests: timeouts have been tuned specifically for slower platforms, detected as ARMv6 and ARMv7. (Roman Reiss) [#1366](https://github.com/nodejs/io.js/pull/1366).
* **npm**: Upgrade npm to 2.7.6. See the [release notes](https://github.com/npm/npm/releases/tag/v2.7.6) for details. Summary:
* [`b747593`](https://github.com/npm/npm/commit/b7475936f473f029e6a027ba1b16277523747d0b)[#7630](https://github.com/npm/npm/issues/7630) Don't automatically log all git failures as errors. `maybeGithub` needs to be able to fail without logging to support its fallback logic. ([@othiym23](https://github.com/othiym23))
* [`78005eb`](https://github.com/npm/npm/commit/78005ebb6f4103c20f077669c3929b7ea46a4c0d)[#7743](https://github.com/npm/npm/issues/7743) Always quote arguments passed to `npm run-script`. This allows build systems and the like to safely escape glob patterns passed as arguments to `run-scripts` with `npm run-script
-
-
-
-
-
Add a package as a git submodule
-
SYNOPSIS
-
npm.commands.submodule(packages, callback)
-DESCRIPTION
-
For each package specified, npm will check if it has a git repository url
-in its package.json description then add it as a git submodule at
-node_modules/<pkg name>.
-
This is a convenience only. From then on, it's up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a .git subfolder
-in it.
-
This command also does not install missing dependencies, if the package
-does not include them in its git repository. If npm ls reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do npm explore <pkgname> -- npm install to install the
-dependencies into the submodule folder.
-
SEE ALSO
-
-npm help json
-git help submodule
-
-
-
-
-
-
-
diff --git a/deps/npm/html/doc/api/npm-tag.html b/deps/npm/html/doc/api/npm-tag.html
index e157bd131187e5..a7a8e47f105429 100644
--- a/deps/npm/html/doc/api/npm-tag.html
+++ b/deps/npm/html/doc/api/npm-tag.html
@@ -36,5 +36,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-test.html b/deps/npm/html/doc/api/npm-test.html
index 1f3bbe23d0c835..9f8a9d1095bff5 100644
--- a/deps/npm/html/doc/api/npm-test.html
+++ b/deps/npm/html/doc/api/npm-test.html
@@ -30,5 +30,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-uninstall.html b/deps/npm/html/doc/api/npm-uninstall.html
index 32d299a9774d6f..30133eb34c28dd 100644
--- a/deps/npm/html/doc/api/npm-uninstall.html
+++ b/deps/npm/html/doc/api/npm-uninstall.html
@@ -30,5 +30,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-unpublish.html b/deps/npm/html/doc/api/npm-unpublish.html
index de7d77a3d0bf43..aa75a23e090703 100644
--- a/deps/npm/html/doc/api/npm-unpublish.html
+++ b/deps/npm/html/doc/api/npm-unpublish.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-update.html b/deps/npm/html/doc/api/npm-update.html
index 94826eef431200..be507883943fa4 100644
--- a/deps/npm/html/doc/api/npm-update.html
+++ b/deps/npm/html/doc/api/npm-update.html
@@ -33,5 +33,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-version.html b/deps/npm/html/doc/api/npm-version.html
index a158e06f218d93..a0fd511901dc7b 100644
--- a/deps/npm/html/doc/api/npm-version.html
+++ b/deps/npm/html/doc/api/npm-version.html
@@ -32,5 +32,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-view.html b/deps/npm/html/doc/api/npm-view.html
index 453ef174d4d0c5..89c3b74180ebff 100644
--- a/deps/npm/html/doc/api/npm-view.html
+++ b/deps/npm/html/doc/api/npm-view.html
@@ -81,5 +81,5 @@ RETURN VALUE
-
+
diff --git a/deps/npm/html/doc/api/npm-whoami.html b/deps/npm/html/doc/api/npm-whoami.html
index 5435332f241634..1d03071231eed1 100644
--- a/deps/npm/html/doc/api/npm-whoami.html
+++ b/deps/npm/html/doc/api/npm-whoami.html
@@ -29,5 +29,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm.html b/deps/npm/html/doc/api/npm.html
index 2299227784cfba..7b57fcd0c1b1fb 100644
--- a/deps/npm/html/doc/api/npm.html
+++ b/deps/npm/html/doc/api/npm.html
@@ -23,7 +23,7 @@ SYNOPSIS
npm.commands.install(["package"], cb)
})
VERSION
-2.8.3
+2.10.1
DESCRIPTION
This is the API documentation for npm.
To find documentation of the command line
@@ -109,5 +109,5 @@
ABBREVS
-
+
diff --git a/deps/npm/html/doc/cli/npm-access.html b/deps/npm/html/doc/cli/npm-access.html
index 43e6b2642f85a9..e7a604bec74022 100644
--- a/deps/npm/html/doc/cli/npm-access.html
+++ b/deps/npm/html/doc/cli/npm-access.html
@@ -75,5 +75,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-adduser.html b/deps/npm/html/doc/cli/npm-adduser.html
index f75c35d3ce8458..1175859955c299 100644
--- a/deps/npm/html/doc/cli/npm-adduser.html
+++ b/deps/npm/html/doc/cli/npm-adduser.html
@@ -68,5 +68,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-bin.html b/deps/npm/html/doc/cli/npm-bin.html
index 2f562026fb9e98..d33a757d55b7fc 100644
--- a/deps/npm/html/doc/cli/npm-bin.html
+++ b/deps/npm/html/doc/cli/npm-bin.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-bugs.html b/deps/npm/html/doc/cli/npm-bugs.html
index b2d72bcc64e464..217c9724fed4a1 100644
--- a/deps/npm/html/doc/cli/npm-bugs.html
+++ b/deps/npm/html/doc/cli/npm-bugs.html
@@ -54,5 +54,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-build.html b/deps/npm/html/doc/cli/npm-build.html
index aa75b01a4185ec..b7b72121c6aaca 100644
--- a/deps/npm/html/doc/cli/npm-build.html
+++ b/deps/npm/html/doc/cli/npm-build.html
@@ -18,8 +18,10 @@ SYNOPSIS
DESCRIPTION
This is the plumbing command called by npm link and npm install.
-It should generally not be called directly.
-SEE ALSO
+It should generally be called during installation, but if you need to run it
+directly, run:
+npm run-script build
+SEE ALSO
npm-install(1)
npm-link(1)
@@ -38,5 +40,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-bundle.html b/deps/npm/html/doc/cli/npm-bundle.html
index 68275575400400..3343f79992ba61 100644
--- a/deps/npm/html/doc/cli/npm-bundle.html
+++ b/deps/npm/html/doc/cli/npm-bundle.html
@@ -31,5 +31,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-cache.html b/deps/npm/html/doc/cli/npm-cache.html
index 92f09d70e145e4..991be9f641a0f5 100644
--- a/deps/npm/html/doc/cli/npm-cache.html
+++ b/deps/npm/html/doc/cli/npm-cache.html
@@ -81,5 +81,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-completion.html b/deps/npm/html/doc/cli/npm-completion.html
index 4e22ba9057a1a0..7e0fc62afc82da 100644
--- a/deps/npm/html/doc/cli/npm-completion.html
+++ b/deps/npm/html/doc/cli/npm-completion.html
@@ -42,5 +42,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-config.html b/deps/npm/html/doc/cli/npm-config.html
index 413c16a88db046..378472312aad88 100644
--- a/deps/npm/html/doc/cli/npm-config.html
+++ b/deps/npm/html/doc/cli/npm-config.html
@@ -66,5 +66,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-dedupe.html b/deps/npm/html/doc/cli/npm-dedupe.html
index 4174f5e76972a3..ce864af51c2512 100644
--- a/deps/npm/html/doc/cli/npm-dedupe.html
+++ b/deps/npm/html/doc/cli/npm-dedupe.html
@@ -63,5 +63,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-deprecate.html b/deps/npm/html/doc/cli/npm-deprecate.html
index c96186dff40992..8a7ec9a2d64d92 100644
--- a/deps/npm/html/doc/cli/npm-deprecate.html
+++ b/deps/npm/html/doc/cli/npm-deprecate.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-dist-tag.html b/deps/npm/html/doc/cli/npm-dist-tag.html
index aebcb5070c8ae0..1211dfecf5bff4 100644
--- a/deps/npm/html/doc/cli/npm-dist-tag.html
+++ b/deps/npm/html/doc/cli/npm-dist-tag.html
@@ -77,5 +77,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-docs.html b/deps/npm/html/doc/cli/npm-docs.html
index b6feed86787c53..be146f0cf763b1 100644
--- a/deps/npm/html/doc/cli/npm-docs.html
+++ b/deps/npm/html/doc/cli/npm-docs.html
@@ -56,5 +56,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-edit.html b/deps/npm/html/doc/cli/npm-edit.html
index d0edf53fe6fb97..6a903c4df26668 100644
--- a/deps/npm/html/doc/cli/npm-edit.html
+++ b/deps/npm/html/doc/cli/npm-edit.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-explore.html b/deps/npm/html/doc/cli/npm-explore.html
index cb5d1dc6985ed4..0a85fc2a08af5e 100644
--- a/deps/npm/html/doc/cli/npm-explore.html
+++ b/deps/npm/html/doc/cli/npm-explore.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-help-search.html b/deps/npm/html/doc/cli/npm-help-search.html
index ec48228341b669..afe18a73a59af9 100644
--- a/deps/npm/html/doc/cli/npm-help-search.html
+++ b/deps/npm/html/doc/cli/npm-help-search.html
@@ -46,5 +46,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-help.html b/deps/npm/html/doc/cli/npm-help.html
index c3669525015243..9f9c1de6559513 100644
--- a/deps/npm/html/doc/cli/npm-help.html
+++ b/deps/npm/html/doc/cli/npm-help.html
@@ -52,5 +52,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-init.html b/deps/npm/html/doc/cli/npm-init.html
index 65d1c4b0c31063..4b98a7b72b1996 100644
--- a/deps/npm/html/doc/cli/npm-init.html
+++ b/deps/npm/html/doc/cli/npm-init.html
@@ -48,5 +48,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-install.html b/deps/npm/html/doc/cli/npm-install.html
index 371dd591f729e7..c475cc74601ef4 100644
--- a/deps/npm/html/doc/cli/npm-install.html
+++ b/deps/npm/html/doc/cli/npm-install.html
@@ -131,50 +131,50 @@ SYNOPSIS
npm install sax@">=0.1.0 <0.2.0"
npm install @myorg/privatepackage@">=0.1.0 <0.2.0"
-npm install <githubname>/<githubrepo>:
+npm install <git remote url>:
+ Install a package by cloning a git remote url. The format of the git
+ url is:
+ <protocol>://[<user>[:<password>]@]<hostname>[:<port>][:/]<path>[#<commit-ish>]
+ <protocol> is one of git, git+ssh, git+http, or
+ git+https. If no <commit-ish> is specified, then master is
+ used.
+ Examples:
+ git+ssh://git@github.com:npm/npm.git#v1.0.27
+ git+https://isaacs@github.com/npm/npm.git
+ git://github.com/npm/npm.git#v1.0.27
+
+npm install <githubname>/<githubrepo>[#<commit-ish>]:
+
+npm install github:<githubname>/<githubrepo>[#<commit-ish>]:
Install the package at https://github.com/githubname/githubrepo by
attempting to clone it using git.
- Example:
+ If you don't specify a commit-ish then master will be used.
+ Examples:
npm install mygithubuser/myproject
- To reference a package in a generic git repo (not on GitHub), see git remote
- urls below.
-
-npm install github:<githubname>/<githubrepo>:
- The same as the above, but explicitly marked as a GitHub dependency.
- Example:
- npm install github:npm/npm
+ npm install github:mygithubuser/myproject
-npm install gist:[<githubname>/]<gistID>:
+npm install gist:[<githubname>/]<gistID>[#<commit-ish>]:
Install the package at https://gist.github.com/gistID by attempting to
clone it using git. The GitHub username associated with the gist is
optional and will not be saved in package.json if --save is used.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install gist:101a11beef
-npm install bitbucket:<bitbucketname>/<bitbucketrepo>:
+npm install bitbucket:<bitbucketname>/<bitbucketrepo>[#<commit-ish>]:
Install the package at https://bitbucket.org/bitbucketname/bitbucketrepo
by attempting to clone it using git.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install bitbucket:mybitbucketuser/myproject
-npm install gitlab:<gitlabname>/<gitlabrepo>:
+npm install gitlab:<gitlabname>/<gitlabrepo>[#<commit-ish>]:
Install the package at https://gitlab.com/gitlabname/gitlabrepo
by attempting to clone it using git.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install gitlab:mygitlabuser/myproject
-npm install <git remote url>:
- Install a package by cloning a git remote url. The format of the git
- url is:
- <protocol>://[<user>[:<password>]@]<hostname><separator><path>[#<commit-ish>]
- <protocol> is one of git, git+ssh, git+http, or
- git+https. If no <commit-ish> is specified, then master is
- used.
- Examples:
- git+ssh://git@github.com:npm/npm.git#v1.0.27
- git+https://isaacs@github.com/npm/npm.git
- git://github.com/npm/npm.git#v1.0.27
-
You may combine multiple arguments, and even multiple types of arguments.
For example:
@@ -264,5 +264,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-link.html b/deps/npm/html/doc/cli/npm-link.html
index 65f1bd98d405ac..022aaf4108efa8 100644
--- a/deps/npm/html/doc/cli/npm-link.html
+++ b/deps/npm/html/doc/cli/npm-link.html
@@ -72,5 +72,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/cli/npm-logout.html b/deps/npm/html/doc/cli/npm-logout.html
index 2c45cfb70cd200..f9a4c095fbbce5 100644
--- a/deps/npm/html/doc/cli/npm-logout.html
+++ b/deps/npm/html/doc/cli/npm-logout.html
@@ -55,5 +55,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-ls.html b/deps/npm/html/doc/cli/npm-ls.html
index 42c3ae2d20e467..0b9c8e91fa0eaf 100644
--- a/deps/npm/html/doc/cli/npm-ls.html
+++ b/deps/npm/html/doc/cli/npm-ls.html
@@ -22,7 +22,7 @@ SYNOPSIS
limit the results to only the paths to the packages named. Note that
nested packages will also show the paths to the specified packages.
For example, running npm ls promzard in npm's source tree will show:
-npm@2.8.3 /path/to/npm
+npm@2.10.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
It will print out extraneous, missing, and invalid packages.
@@ -97,5 +97,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-outdated.html b/deps/npm/html/doc/cli/npm-outdated.html
index f6c2eddfa4e992..8b1a867e0e8119 100644
--- a/deps/npm/html/doc/cli/npm-outdated.html
+++ b/deps/npm/html/doc/cli/npm-outdated.html
@@ -67,5 +67,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-owner.html b/deps/npm/html/doc/cli/npm-owner.html
index 984cd37b97e06a..70e8aeccdc3f3d 100644
--- a/deps/npm/html/doc/cli/npm-owner.html
+++ b/deps/npm/html/doc/cli/npm-owner.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-pack.html b/deps/npm/html/doc/cli/npm-pack.html
index e88bb4f3a85580..fa118a73883eeb 100644
--- a/deps/npm/html/doc/cli/npm-pack.html
+++ b/deps/npm/html/doc/cli/npm-pack.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prefix.html b/deps/npm/html/doc/cli/npm-prefix.html
index b497c36c552733..d01848a1c7b46b 100644
--- a/deps/npm/html/doc/cli/npm-prefix.html
+++ b/deps/npm/html/doc/cli/npm-prefix.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prune.html b/deps/npm/html/doc/cli/npm-prune.html
index 24949bf30fa790..1fce2b556d1e3d 100644
--- a/deps/npm/html/doc/cli/npm-prune.html
+++ b/deps/npm/html/doc/cli/npm-prune.html
@@ -39,5 +39,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-publish.html b/deps/npm/html/doc/cli/npm-publish.html
index ffbda126f08f92..c840ff41f7168d 100644
--- a/deps/npm/html/doc/cli/npm-publish.html
+++ b/deps/npm/html/doc/cli/npm-publish.html
@@ -66,5 +66,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-rebuild.html b/deps/npm/html/doc/cli/npm-rebuild.html
index 1eb733de41c291..f4c8435c36e64d 100644
--- a/deps/npm/html/doc/cli/npm-rebuild.html
+++ b/deps/npm/html/doc/cli/npm-rebuild.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-repo.html b/deps/npm/html/doc/cli/npm-repo.html
index e1bdcb9345fb70..998986f9162541 100644
--- a/deps/npm/html/doc/cli/npm-repo.html
+++ b/deps/npm/html/doc/cli/npm-repo.html
@@ -42,5 +42,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-restart.html b/deps/npm/html/doc/cli/npm-restart.html
index 18b064a0005cf1..f291d5a7e13a77 100644
--- a/deps/npm/html/doc/cli/npm-restart.html
+++ b/deps/npm/html/doc/cli/npm-restart.html
@@ -53,5 +53,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-rm.html b/deps/npm/html/doc/cli/npm-rm.html
index 4bc35ba78e99e6..a67f89a1c24241 100644
--- a/deps/npm/html/doc/cli/npm-rm.html
+++ b/deps/npm/html/doc/cli/npm-rm.html
@@ -39,5 +39,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-root.html b/deps/npm/html/doc/cli/npm-root.html
index 5b94e969755e11..5457c3fd9cdd47 100644
--- a/deps/npm/html/doc/cli/npm-root.html
+++ b/deps/npm/html/doc/cli/npm-root.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-run-script.html b/deps/npm/html/doc/cli/npm-run-script.html
index 5d8f17ae1c83e9..4d21c023b83cfa 100644
--- a/deps/npm/html/doc/cli/npm-run-script.html
+++ b/deps/npm/html/doc/cli/npm-run-script.html
@@ -56,5 +56,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-search.html b/deps/npm/html/doc/cli/npm-search.html
index e253efba7b1542..d402ba4fe6eb56 100644
--- a/deps/npm/html/doc/cli/npm-search.html
+++ b/deps/npm/html/doc/cli/npm-search.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-shrinkwrap.html b/deps/npm/html/doc/cli/npm-shrinkwrap.html
index 77973115db364b..78b744222b0092 100644
--- a/deps/npm/html/doc/cli/npm-shrinkwrap.html
+++ b/deps/npm/html/doc/cli/npm-shrinkwrap.html
@@ -15,9 +15,9 @@ SYNOPSIS
DESCRIPTION
This command locks down the versions of a package's dependencies so
that you can control exactly which versions of each dependency will be
-used when your package is installed. The "package.json" file is still
-required if you want to use "npm install".
-By default, "npm install" recursively installs the target's
+used when your package is installed. The package.json file is still
+required if you want to use npm install.
+By default, npm install recursively installs the target's
dependencies (as specified in package.json), choosing the latest
available version that satisfies the dependency's semver pattern. In
some situations, particularly when shipping software where each change
@@ -51,11 +51,11 @@
SYNOPSIS
"version": "0.0.1"
}
If these are the only versions of A, B, and C available in the
-registry, then a normal "npm install A" will install:
+registry, then a normal npm install A will install:
A@0.1.0
`-- B@0.0.1
`-- C@0.0.1
-However, if B@0.0.2 is published, then a fresh "npm install A" will
+
However, if B@0.0.2 is published, then a fresh npm install A will
install:
A@0.1.0
`-- B@0.0.2
@@ -85,7 +85,7 @@ SYNOPSIS
}
}
The shrinkwrap command has locked down the dependencies based on
-what's currently installed in node_modules. When "npm install"
+what's currently installed in node_modules. When npm install
installs a package with a npm-shrinkwrap.json file in the package
root, the shrinkwrap file (rather than package.json files) completely
drives the installation of that package and all of its dependencies
@@ -95,41 +95,41 @@
SYNOPSIS
files.
Using shrinkwrapped packages
Using a shrinkwrapped package is no different than using any other
-package: you can "npm install" it by hand, or add a dependency to your
-package.json file and "npm install" it.
+package: you can npm install it by hand, or add a dependency to your
+package.json file and npm install it.
Building shrinkwrapped packages
To shrinkwrap an existing package:
-Run "npm install" in the package root to install the current
+ Run npm install in the package root to install the current
versions of all dependencies.
Validate that the package works as expected with these versions.
-Run "npm shrinkwrap", add npm-shrinkwrap.json to git, and publish
+ Run npm shrinkwrap, add npm-shrinkwrap.json to git, and publish
your package.
To add or update a dependency in a shrinkwrapped package:
-Run "npm install" in the package root to install the current
+ Run npm install in the package root to install the current
versions of all dependencies.
-Add or update dependencies. "npm install" each new or updated
+ Add or update dependencies. npm install each new or updated
package individually and then update package.json. Note that they
must be explicitly named in order to be installed: running npm
install with no arguments will merely reproduce the existing
shrinkwrap.
Validate that the package works as expected with the new
dependencies.
-Run "npm shrinkwrap", commit the new npm-shrinkwrap.json, and
+ Run npm shrinkwrap, commit the new npm-shrinkwrap.json, and
publish your package.
You can use npm-outdated(1) to view dependencies with newer versions
available.
Other Notes
A shrinkwrap file must be consistent with the package's package.json
-file. "npm shrinkwrap" will fail if required dependencies are not
+file. npm shrinkwrap will fail if required dependencies are not
already installed, since that would result in a shrinkwrap that
wouldn't actually work. Similarly, the command will fail if there are
extraneous packages (not referenced by package.json), since that would
indicate that package.json is not correct.
-Since "npm shrinkwrap" is intended to lock down your dependencies for
+
Since npm shrinkwrap is intended to lock down your dependencies for
production use, devDependencies will not be included unless you
explicitly set the --dev flag when you run npm shrinkwrap. If
installed devDependencies are excluded, then npm will print a
@@ -164,5 +164,5 @@
SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-star.html b/deps/npm/html/doc/cli/npm-star.html
index cf97584072dfa1..9a5099d485e336 100644
--- a/deps/npm/html/doc/cli/npm-star.html
+++ b/deps/npm/html/doc/cli/npm-star.html
@@ -36,5 +36,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-stars.html b/deps/npm/html/doc/cli/npm-stars.html
index 7e1117b0b866ae..9892b2521e1410 100644
--- a/deps/npm/html/doc/cli/npm-stars.html
+++ b/deps/npm/html/doc/cli/npm-stars.html
@@ -37,5 +37,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-start.html b/deps/npm/html/doc/cli/npm-start.html
index d9360e322ac7a0..406bbc85a5e627 100644
--- a/deps/npm/html/doc/cli/npm-start.html
+++ b/deps/npm/html/doc/cli/npm-start.html
@@ -34,5 +34,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-stop.html b/deps/npm/html/doc/cli/npm-stop.html
index e9011bed4127f2..71c0005bdc1d26 100644
--- a/deps/npm/html/doc/cli/npm-stop.html
+++ b/deps/npm/html/doc/cli/npm-stop.html
@@ -34,5 +34,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-submodule.html b/deps/npm/html/doc/cli/npm-submodule.html
deleted file mode 100644
index 6716c4a11cce24..00000000000000
--- a/deps/npm/html/doc/cli/npm-submodule.html
+++ /dev/null
@@ -1,46 +0,0 @@
-
-
- npm-submodule
-
-
-
-
-
-
-
-
-
Add a package as a git submodule
-
SYNOPSIS
-
npm submodule <pkg>
-DESCRIPTION
-
If the specified package has a git repository url in its package.json
-description, then this command will add it as a git submodule at
-node_modules/<pkg name>.
-
This is a convenience only. From then on, it's up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a .git subfolder
-in it.
-
This command also does not install missing dependencies, if the package
-does not include them in its git repository. If npm ls reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do npm explore <pkgname> -- npm install to install the
-dependencies into the submodule folder.
-
SEE ALSO
-
-
-
-
-
-
-
diff --git a/deps/npm/html/doc/cli/npm-tag.html b/deps/npm/html/doc/cli/npm-tag.html
index 51bb513e90aac8..f3f581c33ecc75 100644
--- a/deps/npm/html/doc/cli/npm-tag.html
+++ b/deps/npm/html/doc/cli/npm-tag.html
@@ -62,5 +62,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-test.html b/deps/npm/html/doc/cli/npm-test.html
index 1e4a64a43afcc4..87bd2328455a17 100644
--- a/deps/npm/html/doc/cli/npm-test.html
+++ b/deps/npm/html/doc/cli/npm-test.html
@@ -37,5 +37,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-uninstall.html b/deps/npm/html/doc/cli/npm-uninstall.html
index de7d6d632890b2..86a7aaee519e71 100644
--- a/deps/npm/html/doc/cli/npm-uninstall.html
+++ b/deps/npm/html/doc/cli/npm-uninstall.html
@@ -57,5 +57,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/cli/npm-unpublish.html b/deps/npm/html/doc/cli/npm-unpublish.html
index dd214abe9a980b..77276455550b92 100644
--- a/deps/npm/html/doc/cli/npm-unpublish.html
+++ b/deps/npm/html/doc/cli/npm-unpublish.html
@@ -47,5 +47,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-update.html b/deps/npm/html/doc/cli/npm-update.html
index f3f7f9e02768c6..7a090ee49c228e 100644
--- a/deps/npm/html/doc/cli/npm-update.html
+++ b/deps/npm/html/doc/cli/npm-update.html
@@ -119,5 +119,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-version.html b/deps/npm/html/doc/cli/npm-version.html
index b4c1a704e184f0..36f2c1d75d383e 100644
--- a/deps/npm/html/doc/cli/npm-version.html
+++ b/deps/npm/html/doc/cli/npm-version.html
@@ -65,5 +65,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-view.html b/deps/npm/html/doc/cli/npm-view.html
index a9b76c38153efe..bb3104f35aff9c 100644
--- a/deps/npm/html/doc/cli/npm-view.html
+++ b/deps/npm/html/doc/cli/npm-view.html
@@ -82,5 +82,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-whoami.html b/deps/npm/html/doc/cli/npm-whoami.html
index 00112e842117e2..6a84ce4879eff5 100644
--- a/deps/npm/html/doc/cli/npm-whoami.html
+++ b/deps/npm/html/doc/cli/npm-whoami.html
@@ -33,5 +33,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm.html b/deps/npm/html/doc/cli/npm.html
index 04bb47f8f70fe1..fe8a3e85af6e6a 100644
--- a/deps/npm/html/doc/cli/npm.html
+++ b/deps/npm/html/doc/cli/npm.html
@@ -13,7 +13,7 @@ javascript package manager
SYNOPSIS
npm <command> [args]
VERSION
-2.8.3
+2.10.1
DESCRIPTION
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
@@ -110,7 +110,7 @@
CONTRIBUTIONS
the issues list or ask on the mailing list.
BUGS
When you find issues, please report them:
@@ -118,7 +118,7 @@ BUGS
web:
http://github.com/npm/npm/issues
email:
-npm-@googlegroups.com
+npm-@googlegroups.com
Be sure to include all of the output from the npm command that didn't work
as expected. The npm-debug.log file is also helpful to provide.
@@ -128,7 +128,7 @@ AUTHOR
Isaac Z. Schlueter ::
isaacs ::
@izs ::
-i@izs.me
+i@izs.me
SEE ALSO
npm-help(1)
@@ -154,5 +154,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-folders.html b/deps/npm/html/doc/files/npm-folders.html
index c87b431c248938..3eac6ce8e82fc9 100644
--- a/deps/npm/html/doc/files/npm-folders.html
+++ b/deps/npm/html/doc/files/npm-folders.html
@@ -184,5 +184,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-global.html b/deps/npm/html/doc/files/npm-global.html
index 4ae4a0c35c828a..2ddeeddd507624 100644
--- a/deps/npm/html/doc/files/npm-global.html
+++ b/deps/npm/html/doc/files/npm-global.html
@@ -184,5 +184,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-json.html b/deps/npm/html/doc/files/npm-json.html
index 67b5e2cdf6bd28..c99da5c43a3bfc 100644
--- a/deps/npm/html/doc/files/npm-json.html
+++ b/deps/npm/html/doc/files/npm-json.html
@@ -21,14 +21,22 @@ name
them. The name and version together form an identifier that is assumed
to be completely unique. Changes to the package should come along with
changes to the version.
-The name is what your thing is called. Some tips:
+The name is what your thing is called.
+Some rules:
+The name must be shorter than 214 characters. This includes the scope for
+scoped packages.
+The name can't start with a dot or an underscore.
+New packages must not have uppercase letters in the name.
+The name ends up being part of a URL, an argument on the command line, and a
+folder name. Therefore, the name can't contain any non-URL-safe characters.
+
+Some tips:
+
+Don't use the same name as a core Node module.
Don't put "js" or "node" in the name. It's assumed that it's js, since you're
writing a package.json file, and you can specify the engine using the "engines"
field. (See below.)
-The name ends up being part of a URL, an argument on the command line, and a
-folder name. Any name with non-url-safe characters will be rejected.
-Also, it can't start with a dot or an underscore.
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive.
You may want to check the npm registry to see if there's something by that name
@@ -72,16 +80,44 @@ bugs
license
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it.
-The simplest way, assuming you're using a common license such as BSD-3-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD-2-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
{ "license" : "BSD-3-Clause" }
You can check the full list of SPDX license IDs .
Ideally you should pick one that is
OSI approved.
-It's also a good idea to include a LICENSE file at the top level in
-your package.
-people fields: author, contributors
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2.0 string , like this:
+{ "license" : "(ISC OR GPL-3.0)" }
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+{ "license" : "LicenseRef-LICENSE" }
+Then include a LICENSE file at the top level of the package.
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www.opensource.org/licenses/mit-license.php"
+ }
+ , { "type": "Apache-2.0"
+ , "url": "http://opensource.org/licenses/apache2.0.php"
+ }
+ ]
+}
+Those styles are now deprecated. Instead, use SPDX expressions, like this:
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache-2.0)" }
+people fields: author, contributors
The "author" is one person. "contributors" is an array of people. A "person"
is an object with a "name" field and optionally "url" and "email", like this:
{ "name" : "Barney Rubble"
@@ -502,5 +538,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npmrc.html b/deps/npm/html/doc/files/npmrc.html
index 2f494a1a62fd0c..d113aed73b147b 100644
--- a/deps/npm/html/doc/files/npmrc.html
+++ b/deps/npm/html/doc/files/npmrc.html
@@ -21,7 +21,7 @@ FILES
per-project config file (/path/to/my/project/.npmrc)
per-user config file (~/.npmrc)
-global config file ($PREFIX/npmrc)
+global config file ($PREFIX/etc/npmrc)
npm builtin config file (/path/to/npm/npmrc)
All npm config files are an ini-formatted list of key = value
@@ -77,5 +77,5 @@
SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/package.json.html b/deps/npm/html/doc/files/package.json.html
index 7cf774e6ab9588..e914991e01e5ea 100644
--- a/deps/npm/html/doc/files/package.json.html
+++ b/deps/npm/html/doc/files/package.json.html
@@ -21,14 +21,22 @@ name
them. The name and version together form an identifier that is assumed
to be completely unique. Changes to the package should come along with
changes to the version.
-The name is what your thing is called. Some tips:
+The name is what your thing is called.
+Some rules:
+The name must be shorter than 214 characters. This includes the scope for
+scoped packages.
+The name can't start with a dot or an underscore.
+New packages must not have uppercase letters in the name.
+The name ends up being part of a URL, an argument on the command line, and a
+folder name. Therefore, the name can't contain any non-URL-safe characters.
+
+Some tips:
+
+Don't use the same name as a core Node module.
Don't put "js" or "node" in the name. It's assumed that it's js, since you're
writing a package.json file, and you can specify the engine using the "engines"
field. (See below.)
-The name ends up being part of a URL, an argument on the command line, and a
-folder name. Any name with non-url-safe characters will be rejected.
-Also, it can't start with a dot or an underscore.
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive.
You may want to check the npm registry to see if there's something by that name
@@ -72,16 +80,44 @@ bugs
license
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it.
-The simplest way, assuming you're using a common license such as BSD-3-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD-2-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
{ "license" : "BSD-3-Clause" }
You can check the full list of SPDX license IDs .
Ideally you should pick one that is
OSI approved.
-It's also a good idea to include a LICENSE file at the top level in
-your package.
-people fields: author, contributors
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2.0 string , like this:
+{ "license" : "(ISC OR GPL-3.0)" }
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+{ "license" : "LicenseRef-LICENSE" }
+Then include a LICENSE file at the top level of the package.
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www.opensource.org/licenses/mit-license.php"
+ }
+ , { "type": "Apache-2.0"
+ , "url": "http://opensource.org/licenses/apache2.0.php"
+ }
+ ]
+}
+Those styles are now deprecated. Instead, use SPDX expressions, like this:
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache-2.0)" }
+people fields: author, contributors
The "author" is one person. "contributors" is an array of people. A "person"
is an object with a "name" field and optionally "url" and "email", like this:
{ "name" : "Barney Rubble"
@@ -502,5 +538,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/index.html b/deps/npm/html/doc/index.html
index f75545fe7b1b81..c62dd3509df605 100644
--- a/deps/npm/html/doc/index.html
+++ b/deps/npm/html/doc/index.html
@@ -236,5 +236,5 @@
Joe emails Bob, explaining the situation as respectfully as
possible , and what he would like to do with the module name. He
-adds the npm support staff support@npmjs.com to the CC list of
+adds the npm support staff support@npmjs.com to the CC list of
the email. Mention in the email that Bob can run npm owner add
joe foo to add Joe as an owner of the foo package.
After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can't come to any sort of resolution, email support
-support@npmjs.com and we'll sort it out. ("Reasonable" is
+support@npmjs.com and we'll sort it out. ("Reasonable" is
usually at least 4 weeks, but extra time is allowed around common
holidays.)
@@ -112,5 +112,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-faq.html b/deps/npm/html/doc/misc/npm-faq.html
index 2a9e3ad208da7c..3754a93a512f1c 100644
--- a/deps/npm/html/doc/misc/npm-faq.html
+++ b/deps/npm/html/doc/misc/npm-faq.html
@@ -236,7 +236,7 @@ I get ECONNREFUSED a lot. What'
To check if the registry is down, open up
https://registry.npmjs.org/ in a web browser. This will also tell
you if you are just unable to access the internet for some reason.
-If the registry IS down, let us know by emailing support@npmjs.com
+
If the registry IS down, let us know by emailing support@npmjs.com
or posting an issue at https://github.com/npm/npm/issues . If it's
down for the world (and not just on your local network) then we're
probably already being pinged about it.
@@ -307,5 +307,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-index.html b/deps/npm/html/doc/misc/npm-index.html
index 6abdedbb1c2136..196fdae4877d3e 100644
--- a/deps/npm/html/doc/misc/npm-index.html
+++ b/deps/npm/html/doc/misc/npm-index.html
@@ -236,5 +236,5 @@ s
-
+
diff --git a/deps/npm/html/doc/misc/npm-registry.html b/deps/npm/html/doc/misc/npm-registry.html
index 71f34b4ef4ba62..9c87ab42dfdf69 100644
--- a/deps/npm/html/doc/misc/npm-registry.html
+++ b/deps/npm/html/doc/misc/npm-registry.html
@@ -32,9 +32,9 @@ Can I run my own private registry?
If you set up continuous replication from the official CouchDB, and then
set your internal CouchDB as the registry config, then you'll be able
to read any published packages, in addition to your private ones, and by
-default will only publish internally. If you then want to publish a
-package for the whole world to see, you can simply override the
---registry config for that command.
+default will only publish internally.
+If you then want to publish a package for the whole world to see, you can
+simply override the --registry option for that publish command.
I don't want my package published in the official registry. It's private.
Set "private": true in your package.json to prevent it from being
published at all, or
@@ -70,5 +70,5 @@
SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-scope.html b/deps/npm/html/doc/misc/npm-scope.html
index b9c84f8fed36b2..cee696187c40b7 100644
--- a/deps/npm/html/doc/misc/npm-scope.html
+++ b/deps/npm/html/doc/misc/npm-scope.html
@@ -18,9 +18,9 @@ DESCRIPTION
@somescope/somepackagename
Scopes are a way of grouping related packages together, and also affect a few
things about the way npm treats the package.
-As of 2014-09-03, scoped packages are not supported by the public npm registry .
-However, the npm client is backwards-compatible with un-scoped registries, so
-it can be used to work with scoped and un-scoped registries at the same time.
+Scoped packages are supported by the public npm registry. The npm
+client is backwards-compatible with un-scoped registries, so it can be
+used to work with scoped and un-scoped registries at the same time.
Installing scoped packages
Scoped packages are installed to a sub-folder of the regular installation
folder, e.g. if your other packages are installed in node_modules/packagename,
@@ -43,10 +43,22 @@
Requiring scoped packages
There is nothing special about the way Node treats scope folders, this is
just specifying to require the module mypackage in the folder called @myorg.
Publishing scoped packages
-Scoped packages can be published to any registry that supports them.
-As of 2014-09-03, the public npm registry does not support scoped packages ,
-so attempting to publish a scoped package to the registry will fail unless
-you have associated that scope with a different registry, see below.
+Scoped packages can be published to any registry that supports them, including
+the public npm registry.
+(As of 2015-04-19, the public npm registry does support scoped packages)
+If you wish, you may associate a scope with a registry; see below.
+Publishing public scoped packages to the public npm registry
+To publish a public scoped package, you must specify --access public with
+the initial publication. This will publish the package and set access
+to public as if you had run npm access public after publishing.
+Publishing private scoped packages to the npm registry
+To publish a private scoped package to the npm registry, you must have
+an npm Private Modules
+account.
+You can then publish the module with npm publish or npm publish
+--access restricted, and it will be present in the npm registry, with
+restricted access. You can then change the access permissions, if
+desired, with npm access or on the npmjs.com website.
Associating a scope with a registry
Scopes can be associated with a separate registry. This allows you to
seamlessly use a mix of packages from the public npm registry and one or more
@@ -65,6 +77,7 @@
SEE ALSO
@@ -78,5 +91,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-scripts.html b/deps/npm/html/doc/misc/npm-scripts.html
index 5ce0cbf6f9dfa8..526459e30ba6e6 100644
--- a/deps/npm/html/doc/misc/npm-scripts.html
+++ b/deps/npm/html/doc/misc/npm-scripts.html
@@ -203,5 +203,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/removing-npm.html b/deps/npm/html/doc/misc/removing-npm.html
index a8e35f198eb49d..21ce3c53b51b5e 100644
--- a/deps/npm/html/doc/misc/removing-npm.html
+++ b/deps/npm/html/doc/misc/removing-npm.html
@@ -57,5 +57,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/semver.html b/deps/npm/html/doc/misc/semver.html
index 0bcd392d0f9d00..ab65a6dc3f610d 100644
--- a/deps/npm/html/doc/misc/semver.html
+++ b/deps/npm/html/doc/misc/semver.html
@@ -282,5 +282,5 @@ Ranges
-
+
diff --git a/deps/npm/html/partial/doc/README.html b/deps/npm/html/partial/doc/README.html
index 0f6c86be068148..303ba9adc71bff 100644
--- a/deps/npm/html/partial/doc/README.html
+++ b/deps/npm/html/partial/doc/README.html
@@ -115,7 +115,7 @@ Legal Stuff
If you have a complaint about a package in the public npm registry,
and cannot resolve it with the package
owner , please email
-support@npmjs.com and explain the situation.
+support@npmjs.com and explain the situation.
Any data published to The npm Registry (including user account
information) may be removed or modified at the sole discretion of the
npm server administrators.
diff --git a/deps/npm/html/partial/doc/api/npm-submodule.html b/deps/npm/html/partial/doc/api/npm-submodule.html
deleted file mode 100644
index 669841402f6b37..00000000000000
--- a/deps/npm/html/partial/doc/api/npm-submodule.html
+++ /dev/null
@@ -1,22 +0,0 @@
-npm-submodule Add a package as a git submodule
-SYNOPSIS
-npm.commands.submodule(packages, callback)
-DESCRIPTION
-For each package specified, npm will check if it has a git repository url
-in its package.json description then add it as a git submodule at
-node_modules/<pkg name>.
-This is a convenience only. From then on, it's up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a .git subfolder
-in it.
-This command also does not install missing dependencies, if the package
-does not include them in its git repository. If npm ls reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do npm explore <pkgname> -- npm install to install the
-dependencies into the submodule folder.
-SEE ALSO
-
-npm help json
-git help submodule
-
-
diff --git a/deps/npm/html/partial/doc/api/npm.html b/deps/npm/html/partial/doc/api/npm.html
index 50510e797b24c9..044a65624b2f44 100644
--- a/deps/npm/html/partial/doc/api/npm.html
+++ b/deps/npm/html/partial/doc/api/npm.html
@@ -12,7 +12,7 @@ SYNOPSIS
npm.commands.install(["package"], cb)
})
VERSION
-2.8.3
+2.10.1
DESCRIPTION
This is the API documentation for npm.
To find documentation of the command line
diff --git a/deps/npm/html/partial/doc/cli/npm-build.html b/deps/npm/html/partial/doc/cli/npm-build.html
index 51f2e32960e8e4..11ba89b11f8c0a 100644
--- a/deps/npm/html/partial/doc/cli/npm-build.html
+++ b/deps/npm/html/partial/doc/cli/npm-build.html
@@ -7,8 +7,10 @@
SYNOPSIS
DESCRIPTION
This is the plumbing command called by npm link and npm install.
-It should generally not be called directly.
-SEE ALSO
+It should generally be called during installation, but if you need to run it
+directly, run:
+ npm run-script build
+SEE ALSO
npm-install(1)
npm-link(1)
diff --git a/deps/npm/html/partial/doc/cli/npm-install.html b/deps/npm/html/partial/doc/cli/npm-install.html
index 9dc85785dee7a6..1f6aef82499e85 100644
--- a/deps/npm/html/partial/doc/cli/npm-install.html
+++ b/deps/npm/html/partial/doc/cli/npm-install.html
@@ -120,50 +120,50 @@ SYNOPSIS
npm install sax@">=0.1.0 <0.2.0"
npm install @myorg/privatepackage@">=0.1.0 <0.2.0"
-npm install <githubname>/<githubrepo>:
+npm install <git remote url>:
+ Install a package by cloning a git remote url. The format of the git
+ url is:
+ <protocol>://[<user>[:<password>]@]<hostname>[:<port>][:/]<path>[#<commit-ish>]
+ <protocol> is one of git, git+ssh, git+http, or
+ git+https. If no <commit-ish> is specified, then master is
+ used.
+ Examples:
+ git+ssh://git@github.com:npm/npm.git#v1.0.27
+ git+https://isaacs@github.com/npm/npm.git
+ git://github.com/npm/npm.git#v1.0.27
+
+npm install <githubname>/<githubrepo>[#<commit-ish>]:
+
+npm install github:<githubname>/<githubrepo>[#<commit-ish>]:
Install the package at https://github.com/githubname/githubrepo by
attempting to clone it using git.
- Example:
+ If you don't specify a commit-ish then master will be used.
+ Examples:
npm install mygithubuser/myproject
- To reference a package in a generic git repo (not on GitHub), see git remote
- urls below.
-
-npm install github:<githubname>/<githubrepo>:
- The same as the above, but explicitly marked as a GitHub dependency.
- Example:
- npm install github:npm/npm
+ npm install github:mygithubuser/myproject
-npm install gist:[<githubname>/]<gistID>:
+npm install gist:[<githubname>/]<gistID>[#<commit-ish>]:
Install the package at https://gist.github.com/gistID by attempting to
clone it using git. The GitHub username associated with the gist is
optional and will not be saved in package.json if --save is used.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install gist:101a11beef
-npm install bitbucket:<bitbucketname>/<bitbucketrepo>:
+npm install bitbucket:<bitbucketname>/<bitbucketrepo>[#<commit-ish>]:
Install the package at https://bitbucket.org/bitbucketname/bitbucketrepo
by attempting to clone it using git.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install bitbucket:mybitbucketuser/myproject
-npm install gitlab:<gitlabname>/<gitlabrepo>:
+npm install gitlab:<gitlabname>/<gitlabrepo>[#<commit-ish>]:
Install the package at https://gitlab.com/gitlabname/gitlabrepo
by attempting to clone it using git.
+ If you don't specify a commit-ish then master will be used.
Example:
npm install gitlab:mygitlabuser/myproject
-npm install <git remote url>:
- Install a package by cloning a git remote url. The format of the git
- url is:
- <protocol>://[<user>[:<password>]@]<hostname><separator><path>[#<commit-ish>]
- <protocol> is one of git, git+ssh, git+http, or
- git+https. If no <commit-ish> is specified, then master is
- used.
- Examples:
- git+ssh://git@github.com:npm/npm.git#v1.0.27
- git+https://isaacs@github.com/npm/npm.git
- git://github.com/npm/npm.git#v1.0.27
-
You may combine multiple arguments, and even multiple types of arguments.
For example:
diff --git a/deps/npm/html/partial/doc/cli/npm-ls.html b/deps/npm/html/partial/doc/cli/npm-ls.html
index 4971b971ca8a66..448aae4d11bd41 100644
--- a/deps/npm/html/partial/doc/cli/npm-ls.html
+++ b/deps/npm/html/partial/doc/cli/npm-ls.html
@@ -11,7 +11,7 @@ SYNOPSIS
limit the results to only the paths to the packages named. Note that
nested packages will also show the paths to the specified packages.
For example, running npm ls promzard in npm's source tree will show:
-npm@2.8.3 /path/to/npm
+npm@2.10.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
It will print out extraneous, missing, and invalid packages.
diff --git a/deps/npm/html/partial/doc/cli/npm-shrinkwrap.html b/deps/npm/html/partial/doc/cli/npm-shrinkwrap.html
index 82bb81f168baec..7857cf85ee8cd3 100644
--- a/deps/npm/html/partial/doc/cli/npm-shrinkwrap.html
+++ b/deps/npm/html/partial/doc/cli/npm-shrinkwrap.html
@@ -4,9 +4,9 @@ SYNOPSIS
DESCRIPTION
This command locks down the versions of a package's dependencies so
that you can control exactly which versions of each dependency will be
-used when your package is installed. The "package.json" file is still
-required if you want to use "npm install".
-By default, "npm install" recursively installs the target's
+used when your package is installed. The package.json file is still
+required if you want to use npm install.
+By default, npm install recursively installs the target's
dependencies (as specified in package.json), choosing the latest
available version that satisfies the dependency's semver pattern. In
some situations, particularly when shipping software where each change
@@ -40,11 +40,11 @@
SYNOPSIS
"version": "0.0.1"
}
If these are the only versions of A, B, and C available in the
-registry, then a normal "npm install A" will install:
+registry, then a normal npm install A will install:
A@0.1.0
`-- B@0.0.1
`-- C@0.0.1
-However, if B@0.0.2 is published, then a fresh "npm install A" will
+
However, if B@0.0.2 is published, then a fresh npm install A will
install:
A@0.1.0
`-- B@0.0.2
@@ -74,7 +74,7 @@ SYNOPSIS
}
}
The shrinkwrap command has locked down the dependencies based on
-what's currently installed in node_modules. When "npm install"
+what's currently installed in node_modules. When npm install
installs a package with a npm-shrinkwrap.json file in the package
root, the shrinkwrap file (rather than package.json files) completely
drives the installation of that package and all of its dependencies
@@ -84,41 +84,41 @@
SYNOPSIS
files.
Using shrinkwrapped packages
Using a shrinkwrapped package is no different than using any other
-package: you can "npm install" it by hand, or add a dependency to your
-package.json file and "npm install" it.
+package: you can npm install it by hand, or add a dependency to your
+package.json file and npm install it.
Building shrinkwrapped packages
To shrinkwrap an existing package:
-Run "npm install" in the package root to install the current
+ Run npm install in the package root to install the current
versions of all dependencies.
Validate that the package works as expected with these versions.
-Run "npm shrinkwrap", add npm-shrinkwrap.json to git, and publish
+ Run npm shrinkwrap, add npm-shrinkwrap.json to git, and publish
your package.
To add or update a dependency in a shrinkwrapped package:
-Run "npm install" in the package root to install the current
+ Run npm install in the package root to install the current
versions of all dependencies.
-Add or update dependencies. "npm install" each new or updated
+ Add or update dependencies. npm install each new or updated
package individually and then update package.json. Note that they
must be explicitly named in order to be installed: running npm
install with no arguments will merely reproduce the existing
shrinkwrap.
Validate that the package works as expected with the new
dependencies.
-Run "npm shrinkwrap", commit the new npm-shrinkwrap.json, and
+ Run npm shrinkwrap, commit the new npm-shrinkwrap.json, and
publish your package.
You can use npm-outdated(1) to view dependencies with newer versions
available.
Other Notes
A shrinkwrap file must be consistent with the package's package.json
-file. "npm shrinkwrap" will fail if required dependencies are not
+file. npm shrinkwrap will fail if required dependencies are not
already installed, since that would result in a shrinkwrap that
wouldn't actually work. Similarly, the command will fail if there are
extraneous packages (not referenced by package.json), since that would
indicate that package.json is not correct.
-Since "npm shrinkwrap" is intended to lock down your dependencies for
+
Since npm shrinkwrap is intended to lock down your dependencies for
production use, devDependencies will not be included unless you
explicitly set the --dev flag when you run npm shrinkwrap. If
installed devDependencies are excluded, then npm will print a
diff --git a/deps/npm/html/partial/doc/cli/npm-submodule.html b/deps/npm/html/partial/doc/cli/npm-submodule.html
deleted file mode 100644
index 1e259e1f2f6911..00000000000000
--- a/deps/npm/html/partial/doc/cli/npm-submodule.html
+++ /dev/null
@@ -1,22 +0,0 @@
-
npm-submodule Add a package as a git submodule
-SYNOPSIS
-npm submodule <pkg>
-DESCRIPTION
-If the specified package has a git repository url in its package.json
-description, then this command will add it as a git submodule at
-node_modules/<pkg name>.
-This is a convenience only. From then on, it's up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a .git subfolder
-in it.
-This command also does not install missing dependencies, if the package
-does not include them in its git repository. If npm ls reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do npm explore <pkgname> -- npm install to install the
-dependencies into the submodule folder.
-SEE ALSO
-
-
diff --git a/deps/npm/html/partial/doc/cli/npm.html b/deps/npm/html/partial/doc/cli/npm.html
index 104e3cdc02af48..da99841671dd05 100644
--- a/deps/npm/html/partial/doc/cli/npm.html
+++ b/deps/npm/html/partial/doc/cli/npm.html
@@ -2,7 +2,7 @@ npm javascript package manager
SYNOPSIS
npm <command> [args]
VERSION
-2.8.3
+2.10.1
DESCRIPTION
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
@@ -99,7 +99,7 @@
CONTRIBUTIONS
the issues list or ask on the mailing list.
BUGS
When you find issues, please report them:
@@ -107,7 +107,7 @@ BUGS
web:
http://github.com/npm/npm/issues
email:
-npm-@googlegroups.com
+npm-@googlegroups.com
Be sure to include all of the output from the npm command that didn't work
as expected. The npm-debug.log file is also helpful to provide.
@@ -117,7 +117,7 @@ AUTHOR
Isaac Z. Schlueter ::
isaacs ::
@izs ::
-i@izs.me
+i@izs.me
SEE ALSO
npm-help(1)
diff --git a/deps/npm/html/partial/doc/files/npm-json.html b/deps/npm/html/partial/doc/files/npm-json.html
index ac4bfbc98031b4..055f8c486053a3 100644
--- a/deps/npm/html/partial/doc/files/npm-json.html
+++ b/deps/npm/html/partial/doc/files/npm-json.html
@@ -10,14 +10,22 @@ name
them. The name and version together form an identifier that is assumed
to be completely unique. Changes to the package should come along with
changes to the version.
-The name is what your thing is called. Some tips:
+The name is what your thing is called.
+Some rules:
+The name must be shorter than 214 characters. This includes the scope for
+scoped packages.
+The name can't start with a dot or an underscore.
+New packages must not have uppercase letters in the name.
+The name ends up being part of a URL, an argument on the command line, and a
+folder name. Therefore, the name can't contain any non-URL-safe characters.
+
+Some tips:
+
+Don't use the same name as a core Node module.
Don't put "js" or "node" in the name. It's assumed that it's js, since you're
writing a package.json file, and you can specify the engine using the "engines"
field. (See below.)
-The name ends up being part of a URL, an argument on the command line, and a
-folder name. Any name with non-url-safe characters will be rejected.
-Also, it can't start with a dot or an underscore.
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive.
You may want to check the npm registry to see if there's something by that name
@@ -61,16 +69,44 @@ bugs
license
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it.
-The simplest way, assuming you're using a common license such as BSD-3-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD-2-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
{ "license" : "BSD-3-Clause" }
You can check the full list of SPDX license IDs .
Ideally you should pick one that is
OSI approved.
-It's also a good idea to include a LICENSE file at the top level in
-your package.
-people fields: author, contributors
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2.0 string , like this:
+{ "license" : "(ISC OR GPL-3.0)" }
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+{ "license" : "LicenseRef-LICENSE" }
+Then include a LICENSE file at the top level of the package.
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www.opensource.org/licenses/mit-license.php"
+ }
+ , { "type": "Apache-2.0"
+ , "url": "http://opensource.org/licenses/apache2.0.php"
+ }
+ ]
+}
+Those styles are now deprecated. Instead, use SPDX expressions, like this:
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache-2.0)" }
+people fields: author, contributors
The "author" is one person. "contributors" is an array of people. A "person"
is an object with a "name" field and optionally "url" and "email", like this:
{ "name" : "Barney Rubble"
diff --git a/deps/npm/html/partial/doc/files/npmrc.html b/deps/npm/html/partial/doc/files/npmrc.html
index 988920a4896283..1dd7919df17d63 100644
--- a/deps/npm/html/partial/doc/files/npmrc.html
+++ b/deps/npm/html/partial/doc/files/npmrc.html
@@ -10,7 +10,7 @@ FILES
per-project config file (/path/to/my/project/.npmrc)
per-user config file (~/.npmrc)
-global config file ($PREFIX/npmrc)
+global config file ($PREFIX/etc/npmrc)
npm builtin config file (/path/to/npm/npmrc)
All npm config files are an ini-formatted list of key = value
diff --git a/deps/npm/html/partial/doc/files/package.json.html b/deps/npm/html/partial/doc/files/package.json.html
index ac4bfbc98031b4..055f8c486053a3 100644
--- a/deps/npm/html/partial/doc/files/package.json.html
+++ b/deps/npm/html/partial/doc/files/package.json.html
@@ -10,14 +10,22 @@
name
them. The name and version together form an identifier that is assumed
to be completely unique. Changes to the package should come along with
changes to the version.
-The name is what your thing is called. Some tips:
+The name is what your thing is called.
+Some rules:
+The name must be shorter than 214 characters. This includes the scope for
+scoped packages.
+The name can't start with a dot or an underscore.
+New packages must not have uppercase letters in the name.
+The name ends up being part of a URL, an argument on the command line, and a
+folder name. Therefore, the name can't contain any non-URL-safe characters.
+
+Some tips:
+
+Don't use the same name as a core Node module.
Don't put "js" or "node" in the name. It's assumed that it's js, since you're
writing a package.json file, and you can specify the engine using the "engines"
field. (See below.)
-The name ends up being part of a URL, an argument on the command line, and a
-folder name. Any name with non-url-safe characters will be rejected.
-Also, it can't start with a dot or an underscore.
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive.
You may want to check the npm registry to see if there's something by that name
@@ -61,16 +69,44 @@ bugs
license
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it.
-The simplest way, assuming you're using a common license such as BSD-3-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD-2-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
{ "license" : "BSD-3-Clause" }
You can check the full list of SPDX license IDs .
Ideally you should pick one that is
OSI approved.
-It's also a good idea to include a LICENSE file at the top level in
-your package.
-people fields: author, contributors
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2.0 string , like this:
+{ "license" : "(ISC OR GPL-3.0)" }
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+{ "license" : "LicenseRef-LICENSE" }
+Then include a LICENSE file at the top level of the package.
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www.opensource.org/licenses/mit-license.php"
+ }
+ , { "type": "Apache-2.0"
+ , "url": "http://opensource.org/licenses/apache2.0.php"
+ }
+ ]
+}
+Those styles are now deprecated. Instead, use SPDX expressions, like this:
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache-2.0)" }
+people fields: author, contributors
The "author" is one person. "contributors" is an array of people. A "person"
is an object with a "name" field and optionally "url" and "email", like this:
{ "name" : "Barney Rubble"
diff --git a/deps/npm/html/partial/doc/misc/npm-config.html b/deps/npm/html/partial/doc/misc/npm-config.html
index 0c334591db8c70..2694cb8b2d6ea0 100644
--- a/deps/npm/html/partial/doc/misc/npm-config.html
+++ b/deps/npm/html/partial/doc/misc/npm-config.html
@@ -1,6 +1,6 @@
npm-config More than you probably want to know about npm configuration
DESCRIPTION
-npm gets its configuration values from 6 sources, in this priority:
+npm gets its configuration values from the following sources, sorted by priority:
Command Line Flags
Putting --foo bar on the command line sets the foo configuration
parameter to "bar". A -- argument tells the cli parser to stop
@@ -678,6 +678,17 @@
tag
it will install the specified tag.
Also the tag that is added to the package@version specified by the npm
tag command, if no explicit tag is given.
+tag-version-prefix
+
+Default: "v"
+Type: String
+
+If set, alters the prefix used when tagging a new version when performing a
+version increment using npm-version. To remove the prefix altogether, set it
+to the empty string: "".
+Because other tools may rely on the convention that npm version tags look like
+v1.0.0, only use this property if it is absolutely necessary . In
+particular, use care when overriding this setting for public packages.
tmp
Joe emails Bob, explaining the situation as respectfully as
possible , and what he would like to do with the module name. He
-adds the npm support staff support@npmjs.com to the CC list of
+adds the npm support staff support@npmjs.com to the CC list of
the email. Mention in the email that Bob can run npm owner add
joe foo to add Joe as an owner of the foo package.
After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can't come to any sort of resolution, email support
-support@npmjs.com and we'll sort it out. ("Reasonable" is
+support@npmjs.com and we'll sort it out. ("Reasonable" is
usually at least 4 weeks, but extra time is allowed around common
holidays.)
diff --git a/deps/npm/html/partial/doc/misc/npm-faq.html b/deps/npm/html/partial/doc/misc/npm-faq.html
index 26beeeaed81ac6..5dedebd9d75464 100644
--- a/deps/npm/html/partial/doc/misc/npm-faq.html
+++ b/deps/npm/html/partial/doc/misc/npm-faq.html
@@ -225,7 +225,7 @@ I get ECONNREFUSED a lot. What'
To check if the registry is down, open up
https://registry.npmjs.org/ in a web browser. This will also tell
you if you are just unable to access the internet for some reason.
-If the registry IS down, let us know by emailing support@npmjs.com
+
If the registry IS down, let us know by emailing support@npmjs.com
or posting an issue at https://github.com/npm/npm/issues . If it's
down for the world (and not just on your local network) then we're
probably already being pinged about it.
diff --git a/deps/npm/html/partial/doc/misc/npm-registry.html b/deps/npm/html/partial/doc/misc/npm-registry.html
index 2a7c16014586b6..8dd80d4d25b281 100644
--- a/deps/npm/html/partial/doc/misc/npm-registry.html
+++ b/deps/npm/html/partial/doc/misc/npm-registry.html
@@ -21,9 +21,9 @@ Can I run my own private registry?
If you set up continuous replication from the official CouchDB, and then
set your internal CouchDB as the registry config, then you'll be able
to read any published packages, in addition to your private ones, and by
-default will only publish internally. If you then want to publish a
-package for the whole world to see, you can simply override the
---registry config for that command.
+default will only publish internally.
+If you then want to publish a package for the whole world to see, you can
+simply override the --registry option for that publish command.
I don't want my package published in the official registry. It's private.
Set "private": true in your package.json to prevent it from being
published at all, or
diff --git a/deps/npm/html/partial/doc/misc/npm-scope.html b/deps/npm/html/partial/doc/misc/npm-scope.html
index 5616efdcb8c2ea..f65c44036761fb 100644
--- a/deps/npm/html/partial/doc/misc/npm-scope.html
+++ b/deps/npm/html/partial/doc/misc/npm-scope.html
@@ -7,9 +7,9 @@
DESCRIPTION
@somescope/somepackagename
Scopes are a way of grouping related packages together, and also affect a few
things about the way npm treats the package.
-As of 2014-09-03, scoped packages are not supported by the public npm registry .
-However, the npm client is backwards-compatible with un-scoped registries, so
-it can be used to work with scoped and un-scoped registries at the same time.
+Scoped packages are supported by the public npm registry. The npm
+client is backwards-compatible with un-scoped registries, so it can be
+used to work with scoped and un-scoped registries at the same time.
Installing scoped packages
Scoped packages are installed to a sub-folder of the regular installation
folder, e.g. if your other packages are installed in node_modules/packagename,
@@ -32,10 +32,22 @@
Requiring scoped packages
There is nothing special about the way Node treats scope folders, this is
just specifying to require the module mypackage in the folder called @myorg.
Publishing scoped packages
-Scoped packages can be published to any registry that supports them.
-As of 2014-09-03, the public npm registry does not support scoped packages ,
-so attempting to publish a scoped package to the registry will fail unless
-you have associated that scope with a different registry, see below.
+Scoped packages can be published to any registry that supports them, including
+the public npm registry.
+(As of 2015-04-19, the public npm registry does support scoped packages)
+If you wish, you may associate a scope with a registry; see below.
+Publishing public scoped packages to the public npm registry
+To publish a public scoped package, you must specify --access public with
+the initial publication. This will publish the package and set access
+to public as if you had run npm access public after publishing.
+Publishing private scoped packages to the npm registry
+To publish a private scoped package to the npm registry, you must have
+an npm Private Modules
+account.
+You can then publish the module with npm publish or npm publish
+--access restricted, and it will be present in the npm registry, with
+restricted access. You can then change the access permissions, if
+desired, with npm access or on the npmjs.com website.
Associating a scope with a registry
Scopes can be associated with a separate registry. This allows you to
seamlessly use a mix of packages from the public npm registry and one or more
@@ -54,5 +66,6 @@
SEE ALSO
diff --git a/deps/npm/lib/access.js b/deps/npm/lib/access.js
index a479a971c35462..cf960a67d8b8aa 100644
--- a/deps/npm/lib/access.js
+++ b/deps/npm/lib/access.js
@@ -119,5 +119,5 @@ function ls (args, cb) {
}
function edit (args, cb) {
- return cb(new Error("npm edit ls isn't implemented yet!"))
+ return cb(new Error("npm access edit isn't implemented yet!"))
}
diff --git a/deps/npm/lib/bin.js b/deps/npm/lib/bin.js
index 719e8870dda9c7..5465112d8891e8 100644
--- a/deps/npm/lib/bin.js
+++ b/deps/npm/lib/bin.js
@@ -1,13 +1,14 @@
module.exports = bin
var npm = require("./npm.js")
+var osenv = require("osenv")
bin.usage = "npm bin\nnpm bin -g\n(just prints the bin folder)"
function bin (args, silent, cb) {
if (typeof cb !== "function") cb = silent, silent = false
var b = npm.bin
- , PATH = (process.env.PATH || "").split(":")
+ , PATH = osenv.path()
if (!silent) console.log(b)
process.nextTick(cb.bind(this, null, b))
diff --git a/deps/npm/lib/cache/add-named.js b/deps/npm/lib/cache/add-named.js
index 07d5b7f01b36f9..cd06aa28893dd8 100644
--- a/deps/npm/lib/cache/add-named.js
+++ b/deps/npm/lib/cache/add-named.js
@@ -17,6 +17,18 @@ var path = require("path")
module.exports = addNamed
function getOnceFromRegistry (name, from, next, done) {
+ function fixName(err, data, json, resp) {
+ // this is only necessary until npm/npm-registry-client#80 is fixed
+ if (err && err.pkgid && err.pkgid !== name) {
+ err.message = err.message.replace(
+ new RegExp(': ' + err.pkgid.replace(/(\W)/g, '\\$1') + '$'),
+ ': ' + name
+ )
+ err.pkgid = name
+ }
+ next(err, data, json, resp)
+ }
+
mapToRegistry(name, npm.config, function (er, uri, auth) {
if (er) return done(er)
@@ -25,7 +37,7 @@ function getOnceFromRegistry (name, from, next, done) {
if (!next) return log.verbose(from, key, "already in flight; waiting")
else log.verbose(from, key, "not in flight; fetching")
- npm.registry.get(uri, { auth : auth }, next)
+ npm.registry.get(uri, { auth : auth }, fixName)
})
}
diff --git a/deps/npm/lib/cache/add-remote-git.js b/deps/npm/lib/cache/add-remote-git.js
index 3ec9c46d1e7359..d4281a8aeaef43 100644
--- a/deps/npm/lib/cache/add-remote-git.js
+++ b/deps/npm/lib/cache/add-remote-git.js
@@ -115,7 +115,7 @@ function tryClone (from, combinedURL, silent, cb) {
// ensure that similarly-named remotes don't collide
var repoID = cloneURL.replace(/[^a-zA-Z0-9]+/g, '-') + '-' +
- crypto.createHash('sha1').update(cloneURL).digest('hex').slice(0, 8)
+ crypto.createHash('sha1').update(combinedURL).digest('hex').slice(0, 8)
var cachedRemote = path.join(remotes, repoID)
cb = inflight(repoID, cb)
@@ -267,7 +267,7 @@ function updateRemote (from, cloneURL, treeish, cachedRemote, cb) {
}
// branches and tags are both symbolic labels that can be attached to different
-// commits, so resolve the commitish to the current actual treeish the label
+// commits, so resolve the commit-ish to the current actual treeish the label
// corresponds to
//
// important for shrinkwrap
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index e5744772ed5d1a..a90d4c22b0a87e 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -196,6 +196,7 @@ Object.defineProperty(exports, "defaults", {get: function () {
, spin: true
, "strict-ssl": true
, tag : "latest"
+ , "tag-version-prefix" : "v"
, tmp : temp
, unicode : true
, "unsafe-perm" : process.platform === "win32"
@@ -308,15 +309,26 @@ exports.types =
, userconfig : path
, umask: Umask
, version : Boolean
+ , "tag-version-prefix" : String
, versions : Boolean
, viewer: String
, _exit : Boolean
}
-function getLocalAddresses() {
- Object.keys(os.networkInterfaces()).map(function (nic) {
- return os.networkInterfaces()[nic].filter(function (addr) {
- return addr.family === "IPv4"
+function getLocalAddresses () {
+ var interfaces
+ // #8094: some environments require elevated permissions to enumerate
+ // interfaces, and synchronously throw EPERM when run without
+ // elevated privileges
+ try {
+ interfaces = os.networkInterfaces()
+ } catch (e) {
+ interfaces = {}
+ }
+
+ return Object.keys(interfaces).map(function (nic) {
+ return interfaces[nic].filter(function (addr) {
+ return addr.family === 'IPv4'
})
.map(function (addr) {
return addr.address
diff --git a/deps/npm/lib/help.js b/deps/npm/lib/help.js
index cb1d5715f9d3e8..942d27b41e17b8 100644
--- a/deps/npm/lib/help.js
+++ b/deps/npm/lib/help.js
@@ -63,8 +63,11 @@ function help (args, cb) {
else if (section === "json") section = "package.json"
// find either /section.n or /npm-section.n
- var compext = "\\.+(gz|bz2|lzma|[FYzZ]|xz)$"
- var f = "+(npm-" + section + "|" + section + ").[0-9]?(" + compext + ")"
+ // The glob is used in the glob. The regexp is used much
+ // further down. Globs and regexps are different
+ var compextglob = ".+(gz|bz2|lzma|[FYzZ]|xz)"
+ var compextre = "\\.(gz|bz2|lzma|[FYzZ]|xz)$"
+ var f = "+(npm-" + section + "|" + section + ").[0-9]?(" + compextglob + ")"
return glob(manroot + "/*/" + f, function (er, mans) {
if (er) return cb(er)
@@ -72,7 +75,7 @@ function help (args, cb) {
mans = mans.map(function (man) {
var ext = path.extname(man)
- if (man.match(new RegExp(compext))) man = path.basename(man, ext)
+ if (man.match(new RegExp(compextre))) man = path.basename(man, ext)
return man
})
diff --git a/deps/npm/lib/init.js b/deps/npm/lib/init.js
index 401e700af3cde2..a889774b02b939 100644
--- a/deps/npm/lib/init.js
+++ b/deps/npm/lib/init.js
@@ -17,7 +17,7 @@ function init (args, cb) {
if (!initJson.yes(npm.config)) {
console.log(
["This utility will walk you through creating a package.json file."
- ,"It only covers the most common items, and tries to guess sane defaults."
+ ,"It only covers the most common items, and tries to guess sensible defaults."
,""
,"See `npm help json` for definitive documentation on these fields"
,"and exactly what they do."
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 799f0de943a109..4ab248a492d5e3 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -644,7 +644,7 @@ function installManyTop_ (what, where, context, cb) {
return path.resolve(nm, p, "package.json")
}), function (jsonPath, cb) {
log.verbose('installManyTop', 'reading scoped package data from', jsonPath)
- readJson(jsonPath, log.warn, function (er, data) {
+ readJson(jsonPath, log.info, function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb(null, [])
cb(null, [[data.name, data.version]])
@@ -687,8 +687,6 @@ function installMany (what, where, context, cb) {
var parent = data
- var d = data.dependencies || {}
-
// if we're explicitly installing "what" into "where", then the shrinkwrap
// for "where" doesn't apply. This would be the case if someone were adding
// a new package to a shrinkwrapped package. (data.dependencies will not be
@@ -696,10 +694,13 @@ function installMany (what, where, context, cb) {
// there's no harm in using that.)
if (context.explicit) wrap = null
+ var deps = data.dependencies || {}
+ var devDeps = data.devDependencies || {}
+
// what is a list of things.
// resolve each one.
asyncMap( what
- , targetResolver(where, context, d)
+ , targetResolver(where, context, deps, devDeps)
, function (er, targets) {
if (er) return cb(er)
@@ -774,7 +775,7 @@ function installMany (what, where, context, cb) {
})
}
-function targetResolver (where, context, deps) {
+function targetResolver (where, context, deps, devDeps) {
var alreadyInstalledManually = []
, resolveLeft = 0
, nm = path.resolve(where, "node_modules")
@@ -798,7 +799,7 @@ function targetResolver (where, context, deps) {
asyncMap(inst, function (pkg, cb) {
var jsonPath = path.resolve(name, pkg, 'package.json')
log.verbose('targetResolver', 'reading package data from', jsonPath)
- readJson(jsonPath, log.warn, function (er, d) {
+ readJson(jsonPath, log.info, function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
// error means it's not a package, most likely.
if (er) return cb(null, [])
@@ -807,7 +808,8 @@ function targetResolver (where, context, deps) {
// otherwise, make sure that it's a semver match with what we want.
var bd = parent.bundleDependencies
var isBundled = bd && bd.indexOf(d.name) !== -1
- var currentIsSatisfactory = semver.satisfies(d.version, deps[d.name] || "*", true)
+ var expectedVersion = deps[d.name] || (devDeps && devDeps[d.name]) || "*"
+ var currentIsSatisfactory = semver.satisfies(d.version, expectedVersion, true)
if (isBundled || currentIsSatisfactory || deps[d.name] === d._resolved) {
return cb(null, d.name)
}
@@ -890,7 +892,8 @@ function targetResolver (where, context, deps) {
return cb(null, [])
}
- var isGit = npa(what).type === "git"
+ var type = npa(what).type
+ var isGit = type === "git" || type === "hosted"
if (!er &&
data &&
@@ -916,7 +919,8 @@ function installOne (target, where, context, cb) {
// the --link flag makes this a "link" command if it's at the
// the top level.
var isGit = false
- if (target && target._from) isGit = npa(target._from).type === 'git'
+ var type = npa(target._from).type
+ if (target && target._from) isGit = type === 'git' || type === 'hosted'
if (where === npm.prefix && npm.config.get("link")
&& !npm.config.get("global") && !isGit) {
@@ -1093,6 +1097,7 @@ function write (target, targetFolder, context, cb_) {
// before continuing to installing dependencies, check for a shrinkwrap.
var opt = { dev: npm.config.get("dev") }
readDependencies(context, targetFolder, opt, function (er, data, wrap) {
+ if (er) return cb(er);
var deps = prepareForInstallMany(data, "dependencies", bundled, wrap,
family)
var depsTargetFolder = targetFolder
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
index 05166ee6ab0aac..c7877b925e6be7 100644
--- a/deps/npm/lib/ls.js
+++ b/deps/npm/lib/ls.js
@@ -293,8 +293,11 @@ function makeArchy_ (data, long, dir, depth, parent, d) {
// add giturl to name@version
if (data._resolved) {
- if (npa(data._resolved).type === "git")
- out.label += " (" + data._resolved + ")"
+ var type = npa(data._resolved).type
+ var isGit = type === 'git' || type === 'hosted'
+ if (isGit) {
+ out.label += ' (' + data._resolved + ')'
+ }
}
if (long) {
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index 192f474da6e6fe..12db7cb7c79376 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -301,7 +301,7 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
}
if (args.length && args.indexOf(dep) === -1) return skip()
- var parsed = npa(req)
+ var parsed = npa(dep + '@' + req)
if (parsed.type === "git" || (parsed.hosted && parsed.hosted.type === "github")) {
return doIt("git", "git")
}
@@ -313,8 +313,35 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
npm.registry.get(uri, { auth : auth }, updateDeps)
})
+ function updateLocalDeps (latestRegistryVersion) {
+ readJson(path.resolve(parsed.spec, 'package.json'), function (er, localDependency) {
+ if (er) return cb()
+
+ var wanted = localDependency.version
+ var latest = localDependency.version
+
+ if (latestRegistryVersion) {
+ latest = latestRegistryVersion
+ if (semver.lt(wanted, latestRegistryVersion)) {
+ wanted = latestRegistryVersion
+ req = dep + '@' + latest
+ }
+ }
+
+ if (curr.version !== wanted) {
+ doIt(wanted, latest)
+ } else {
+ skip()
+ }
+ })
+ }
+
function updateDeps (er, d) {
- if (er) return cb()
+ if (er) {
+ if (parsed.type !== 'local') return cb()
+ return updateLocalDeps()
+ }
+
if (!d || !d["dist-tags"] || !d.versions) return cb()
var l = d.versions[d["dist-tags"].latest]
if (!l) return cb()
@@ -355,6 +382,8 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
if (!curr || dFromUrl && cFromUrl && d._from !== curr.from
|| d.version !== curr.version
|| d.version !== l.version) {
+ if (parsed.type === 'local') return updateLocalDeps(l.version)
+
doIt(d.version, l.version)
}
else {
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
index 92a9a9b6715bc9..8f1c73c3c98fe5 100644
--- a/deps/npm/lib/publish.js
+++ b/deps/npm/lib/publish.js
@@ -7,13 +7,12 @@ var npm = require("./npm.js")
, readJson = require("read-package-json")
, lifecycle = require("./utils/lifecycle.js")
, chain = require("slide").chain
- , Conf = require("./config/core.js").Conf
- , CachingRegClient = require("./cache/caching-client.js")
, mapToRegistry = require("./utils/map-to-registry.js")
, cachedPackageRoot = require("./cache/cached-package-root.js")
, createReadStream = require("graceful-fs").createReadStream
, npa = require("npm-package-arg")
, semver = require('semver')
+ , getPublishConfig = require("./utils/get-publish-config.js")
publish.usage = "npm publish [--tag ]"
+ "\nnpm publish [--tag ]"
@@ -83,22 +82,13 @@ function cacheAddPublish (dir, didPre, isRetry, cb) {
function publish_ (arg, data, isRetry, cachedir, cb) {
if (!data) return cb(new Error("no package.json file found"))
- var registry = npm.registry
- var config = npm.config
-
- // check for publishConfig hash
- if (data.publishConfig) {
- config = new Conf(npm.config)
- config.save = npm.config.save.bind(npm.config)
-
- // don't modify the actual publishConfig object, in case we have
- // to set a login token or some other data.
- config.unshift(Object.keys(data.publishConfig).reduce(function (s, k) {
- s[k] = data.publishConfig[k]
- return s
- }, {}))
- registry = new CachingRegClient(config)
- }
+ var mappedConfig = getPublishConfig(
+ data.publishConfig,
+ npm.config,
+ npm.registry
+ )
+ var config = mappedConfig.config
+ var registry = mappedConfig.client
data._npmVersion = npm.version
data._nodeVersion = process.versions.node
diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js
index bf72bf814d1a02..057af2bc69cb73 100644
--- a/deps/npm/lib/run-script.js
+++ b/deps/npm/lib/run-script.js
@@ -13,7 +13,6 @@ runScript.completion = function (opts, cb) {
// see if there's already a package specified.
var argv = opts.conf.argv.remain
- , installedShallow = require("./utils/completion/installed-shallow.js")
if (argv.length >= 4) return cb()
@@ -41,33 +40,11 @@ runScript.completion = function (opts, cb) {
})
}
- // complete against the installed-shallow, and the pwd's scripts.
- // but only packages that have scripts
- var installed
- , scripts
- installedShallow(opts, function (d) {
- return d.scripts
- }, function (er, inst) {
- installed = inst
- next()
- })
-
- if (npm.config.get("global")) {
- scripts = []
- next()
- }
- else readJson(path.join(npm.localPrefix, "package.json"), function (er, d) {
+ readJson(path.join(npm.localPrefix, "package.json"), function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
d = d || {}
- scripts = Object.keys(d.scripts || {})
- next()
+ cb(null, Object.keys(d.scripts || {}))
})
-
- function next () {
- if (!installed || !scripts) return
-
- cb(null, scripts.concat(installed))
- }
}
function runScript (args, cb) {
diff --git a/deps/npm/lib/unpublish.js b/deps/npm/lib/unpublish.js
index da03b0dcc86948..111f27aa2d2997 100644
--- a/deps/npm/lib/unpublish.js
+++ b/deps/npm/lib/unpublish.js
@@ -2,11 +2,12 @@
module.exports = unpublish
var log = require("npmlog")
- , npm = require("./npm.js")
- , readJson = require("read-package-json")
- , path = require("path")
- , mapToRegistry = require("./utils/map-to-registry.js")
- , npa = require("npm-package-arg")
+var npm = require("./npm.js")
+var readJson = require("read-package-json")
+var path = require("path")
+var mapToRegistry = require("./utils/map-to-registry.js")
+var npa = require("npm-package-arg")
+var getPublishConfig = require("./utils/get-publish-config.js")
unpublish.usage = "npm unpublish [@]"
@@ -71,19 +72,29 @@ function unpublish (args, cb) {
return readJson(cwdJson, function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb("Usage:\n" + unpublish.usage)
- gotProject(data.name, data.version, cb)
+ log.verbose('unpublish', data)
+ gotProject(data.name, data.version, data.publishConfig, cb)
})
}
return gotProject(project, version, cb)
}
-function gotProject (project, version, cb_) {
+function gotProject (project, version, publishConfig, cb_) {
+ if (typeof cb_ !== 'function') {
+ cb_ = publishConfig
+ publishConfig = null
+ }
+
function cb (er) {
if (er) return cb_(er)
console.log("- " + project + (version ? "@" + version : ""))
cb_()
}
+ var mappedConfig = getPublishConfig(publishConfig, npm.config, npm.registry)
+ var config = mappedConfig.config
+ var registry = mappedConfig.client
+
// remove from the cache first
npm.commands.cache(["clean", project, version], function (er) {
if (er) {
@@ -91,14 +102,14 @@ function gotProject (project, version, cb_) {
return cb(er)
}
- mapToRegistry(project, npm.config, function (er, uri, auth) {
+ mapToRegistry(project, config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
- version : version,
- auth : auth
+ version: version,
+ auth: auth
}
- npm.registry.unpublish(uri, params, cb)
+ registry.unpublish(uri, params, cb)
})
})
}
diff --git a/deps/npm/lib/utils/get-publish-config.js b/deps/npm/lib/utils/get-publish-config.js
new file mode 100644
index 00000000000000..dcbb7b9c0c7bfc
--- /dev/null
+++ b/deps/npm/lib/utils/get-publish-config.js
@@ -0,0 +1,25 @@
+var Conf = require('../config/core.js').Conf
+var CachingRegClient = require('../cache/caching-client.js')
+var log = require('npmlog')
+
+module.exports = getPublishConfig
+
+function getPublishConfig (publishConfig, defaultConfig, defaultClient) {
+ var config = defaultConfig
+ var client = defaultClient
+ log.verbose('getPublishConfig', publishConfig)
+ if (publishConfig) {
+ config = new Conf(defaultConfig)
+ config.save = defaultConfig.save.bind(defaultConfig)
+
+ // don't modify the actual publishConfig object, in case we have
+ // to set a login token or some other data.
+ config.unshift(Object.keys(publishConfig).reduce(function (s, k) {
+ s[k] = publishConfig[k]
+ return s
+ }, {}))
+ client = new CachingRegClient(config)
+ }
+
+ return { config: config, client: client }
+}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index d4af41be5727d2..294cfd620be12a 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -155,7 +155,7 @@ function commit (version, hasShrinkwrap, cb) {
git.chainableExec([ "add", "package.json" ], options),
hasShrinkwrap && git.chainableExec([ "add", "npm-shrinkwrap.json" ] , options),
git.chainableExec([ "commit", "-m", message ], options),
- git.chainableExec([ "tag", "v" + version, flag, message ], options)
+ git.chainableExec([ "tag", npm.config.get("tag-version-prefix") + version, flag, message ], options)
],
cb
)
diff --git a/deps/npm/man/man1/npm-README.1 b/deps/npm/man/man1/npm-README.1
index 3e34011cf13f51..09ffc12b2fa5cc 100644
--- a/deps/npm/man/man1/npm-README.1
+++ b/deps/npm/man/man1/npm-README.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "April 2015" "" ""
+.TH "NPM" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm\fR \- a JavaScript package manager
.P
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 68c3233b31b587..9c4f414cfd66cb 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ACCESS" "1" "April 2015" "" ""
+.TH "NPM\-ACCESS" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-access\fR \- Set access level on published packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index ba2248a2b13571..74861e5b5960e1 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ADDUSER" "1" "April 2015" "" ""
+.TH "NPM\-ADDUSER" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-adduser\fR \- Add a registry user account
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bin.1 b/deps/npm/man/man1/npm-bin.1
index 830eae6e314d7f..d14219f1b161a6 100644
--- a/deps/npm/man/man1/npm-bin.1
+++ b/deps/npm/man/man1/npm-bin.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BIN" "1" "April 2015" "" ""
+.TH "NPM\-BIN" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-bin\fR \- Display npm bin folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index 05fdcb1cf18a97..8623d5630e6e56 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUGS" "1" "April 2015" "" ""
+.TH "NPM\-BUGS" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-bugs\fR \- Bugs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-build.1 b/deps/npm/man/man1/npm-build.1
index f40c185af0e128..23c8b1d4076679 100644
--- a/deps/npm/man/man1/npm-build.1
+++ b/deps/npm/man/man1/npm-build.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUILD" "1" "April 2015" "" ""
+.TH "NPM\-BUILD" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-build\fR \- Build a package
.SH SYNOPSIS
@@ -18,7 +18,14 @@ A folder containing a \fBpackage\.json\fR file in its root\.
.P
This is the plumbing command called by \fBnpm link\fR and \fBnpm install\fR\|\.
.P
-It should generally not be called directly\.
+It should generally be called during installation, but if you need to run it
+directly, run:
+.P
+.RS 2
+.nf
+npm run\-script build
+.fi
+.RE
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-bundle.1 b/deps/npm/man/man1/npm-bundle.1
index b5dad2f4e6fd45..df5d456faab669 100644
--- a/deps/npm/man/man1/npm-bundle.1
+++ b/deps/npm/man/man1/npm-bundle.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUNDLE" "1" "April 2015" "" ""
+.TH "NPM\-BUNDLE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-bundle\fR \- REMOVED
.SH DESCRIPTION
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index 84d952c1939694..f6857ee04c020c 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CACHE" "1" "April 2015" "" ""
+.TH "NPM\-CACHE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-cache\fR \- Manipulates packages cache
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index 3dfa1bd8e20d1c..f06e86231d958e 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM\-COMPLETION" "1" "April 2015" "" ""
+.TH "NPM\-COMPLETION" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-completion\fR \- Tab Completion for npm
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index 8f6ff03fb57be2..e940055b084ddc 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "1" "April 2015" "" ""
+.TH "NPM\-CONFIG" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- Manage the npm configuration files
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index ab4ad6947569d9..810241f2dcd11d 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEDUPE" "1" "April 2015" "" ""
+.TH "NPM\-DEDUPE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-dedupe\fR \- Reduce duplication
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index d9b8c40831cc6e..150c1fa3dece9b 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEPRECATE" "1" "April 2015" "" ""
+.TH "NPM\-DEPRECATE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-deprecate\fR \- Deprecate a version of a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 568b6aeccdde44..6195797a2aa5d6 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DIST\-TAG" "1" "April 2015" "" ""
+.TH "NPM\-DIST\-TAG" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR \- Modify package distribution tags
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index fc3c6dff56c655..9a7cd896f39584 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCS" "1" "April 2015" "" ""
+.TH "NPM\-DOCS" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-docs\fR \- Docs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index c8aad7e4108a20..f00673c0be87ff 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EDIT" "1" "April 2015" "" ""
+.TH "NPM\-EDIT" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-edit\fR \- Edit an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index 318075d38f297e..7d80c0542f6e8e 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EXPLORE" "1" "April 2015" "" ""
+.TH "NPM\-EXPLORE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-explore\fR \- Browse an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index 80bb25f47217c7..5ed69ca812fb27 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP\-SEARCH" "1" "April 2015" "" ""
+.TH "NPM\-HELP\-SEARCH" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-help-search\fR \- Search npm help documentation
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index 45533fb6d13340..dcfca8009e2889 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP" "1" "April 2015" "" ""
+.TH "NPM\-HELP" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-help\fR \- Get help on npm
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index 112e727695a2b2..7adb6e5e16682b 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INIT" "1" "April 2015" "" ""
+.TH "NPM\-INIT" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-init\fR \- Interactively create a package\.json file
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index c27de4cadc2f6d..5e9345cfd926b8 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INSTALL" "1" "April 2015" "" ""
+.TH "NPM\-INSTALL" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-install\fR \- Install a package
.SH SYNOPSIS
@@ -181,33 +181,48 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install /\fR:
- Install the package at \fBhttps://github\.com/githubname/githubrepo\fR by
- attempting to clone it using \fBgit\fR\|\.
- Example:
+\fBnpm install \fR:
+ Install a package by cloning a git remote url\. The format of the git
+ url is:
.P
.RS 2
.nf
- npm install mygithubuser/myproject
+ ://[[:]@][:][:/][#]
+.fi
+.RE
+ \fB\fR is one of \fBgit\fR, \fBgit+ssh\fR, \fBgit+http\fR, or
+ \fBgit+https\fR\|\. If no \fB\fR is specified, then \fBmaster\fR is
+ used\.
+ Examples:
+.P
+.RS 2
+.nf
+ git+ssh://git@github\.com:npm/npm\.git#v1\.0\.27
+ git+https://isaacs@github\.com/npm/npm\.git
+ git://github\.com/npm/npm\.git#v1\.0\.27
.fi
.RE
- To reference a package in a generic git repo (not on GitHub), see git remote
- urls below\.
.IP \(bu 2
-\fBnpm install github:/\fR:
- The same as the above, but explicitly marked as a GitHub dependency\.
- Example:
+\fBnpm install /[#]\fR:
+.IP \(bu 2
+\fBnpm install github:/[#]\fR:
+ Install the package at \fBhttps://github\.com/githubname/githubrepo\fR by
+ attempting to clone it using \fBgit\fR\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
+ Examples:
.P
.RS 2
.nf
- npm install github:npm/npm
+ npm install mygithubuser/myproject
+ npm install github:mygithubuser/myproject
.fi
.RE
.IP \(bu 2
-\fBnpm install gist:[/]\fR:
+\fBnpm install gist:[/][#]\fR:
Install the package at \fBhttps://gist\.github\.com/gistID\fR by attempting to
clone it using \fBgit\fR\|\. The GitHub username associated with the gist is
optional and will not be saved in \fBpackage\.json\fR if \fB\-\-save\fR is used\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
Example:
.P
.RS 2
@@ -216,9 +231,10 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install bitbucket:/\fR:
+\fBnpm install bitbucket:/[#]\fR:
Install the package at \fBhttps://bitbucket\.org/bitbucketname/bitbucketrepo\fR
by attempting to clone it using \fBgit\fR\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
Example:
.P
.RS 2
@@ -227,9 +243,10 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install gitlab:/\fR:
+\fBnpm install gitlab:/[#]\fR:
Install the package at \fBhttps://gitlab\.com/gitlabname/gitlabrepo\fR
by attempting to clone it using \fBgit\fR\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
Example:
.P
.RS 2
@@ -237,28 +254,6 @@ fetch the package by name if it is not valid\.
npm install gitlab:mygitlabuser/myproject
.fi
.RE
-.IP \(bu 2
-\fBnpm install \fR:
- Install a package by cloning a git remote url\. The format of the git
- url is:
-.P
-.RS 2
-.nf
- ://[[:]@][#]
-.fi
-.RE
- \fB\fR is one of \fBgit\fR, \fBgit+ssh\fR, \fBgit+http\fR, or
- \fBgit+https\fR\|\. If no \fB\fR is specified, then \fBmaster\fR is
- used\.
- Examples:
-.P
-.RS 2
-.nf
- git+ssh://git@github\.com:npm/npm\.git#v1\.0\.27
- git+https://isaacs@github\.com/npm/npm\.git
- git://github\.com/npm/npm\.git#v1\.0\.27
-.fi
-.RE
.RE
.P
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index 7a0c1fc183d35a..6663bcc0a82843 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LINK" "1" "April 2015" "" ""
+.TH "NPM\-LINK" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-link\fR \- Symlink a package folder
.SH SYNOPSIS
@@ -46,7 +46,7 @@ npm link redis # link\-install the package
.P
Now, any changes to ~/projects/node\-redis will be reflected in
~/projects/node\-bloggy/node_modules/node\-redis/\. Note that the link should
-be to the package name, not the directory name for that package\.
+be to the package name, not the directory name for that package\.
.P
You may also shortcut the two steps in one\. For example, to do the
above use\-case in a shorter way:
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 2f3f82465234a0..1e02887dc5b8ef 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LOGOUT" "1" "April 2015" "" ""
+.TH "NPM\-LOGOUT" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-logout\fR \- Log out of the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 35c5ae46d9634f..32f13d8ef5447c 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LS" "1" "April 2015" "" ""
+.TH "NPM\-LS" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-ls\fR \- List installed packages
.SH SYNOPSIS
@@ -23,7 +23,7 @@ For example, running \fBnpm ls promzard\fR in npm's source tree will show:
.P
.RS 2
.nf
-npm@2.8.3 /path/to/npm
+npm@2.10.1 /path/to/npm
└─┬ init\-package\-json@0\.0\.4
└── promzard@0\.1\.5
.fi
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index 87514f4ef5c24a..59f530e86885a5 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OUTDATED" "1" "April 2015" "" ""
+.TH "NPM\-OUTDATED" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-outdated\fR \- Check for outdated packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index f60a8e53984a51..a7ef4537bc23d6 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OWNER" "1" "April 2015" "" ""
+.TH "NPM\-OWNER" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-owner\fR \- Manage package owners
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 444e2b5d015748..95136e484c8e17 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PACK" "1" "April 2015" "" ""
+.TH "NPM\-PACK" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-pack\fR \- Create a tarball from a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index e8fd211416eb99..ceffef86c4284a 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PREFIX" "1" "April 2015" "" ""
+.TH "NPM\-PREFIX" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-prefix\fR \- Display prefix
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index db458cc5235b6b..9a3b728245936c 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PRUNE" "1" "April 2015" "" ""
+.TH "NPM\-PRUNE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-prune\fR \- Remove extraneous packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 496e287a4ae3bf..b83ecbb40ae729 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PUBLISH" "1" "April 2015" "" ""
+.TH "NPM\-PUBLISH" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-publish\fR \- Publish a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index 0aa05dbecb6fce..38a735eace00a8 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REBUILD" "1" "April 2015" "" ""
+.TH "NPM\-REBUILD" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-rebuild\fR \- Rebuild a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index 06e4cff9cd5862..8c438d174f3c59 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REPO" "1" "April 2015" "" ""
+.TH "NPM\-REPO" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-repo\fR \- Open package repository page in the browser
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index 0213b47f4fe19f..8b06be14be3f31 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RESTART" "1" "April 2015" "" ""
+.TH "NPM\-RESTART" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-restart\fR \- Restart a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-rm.1 b/deps/npm/man/man1/npm-rm.1
index 4fa2d9e32b13de..0268cabd0518b2 100644
--- a/deps/npm/man/man1/npm-rm.1
+++ b/deps/npm/man/man1/npm-rm.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RM" "1" "April 2015" "" ""
+.TH "NPM\-RM" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-rm\fR \- Remove a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 2828c50eb9ce0a..5ff7d8e80b3445 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ROOT" "1" "April 2015" "" ""
+.TH "NPM\-ROOT" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-root\fR \- Display npm root
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index aa1ba4e56fe6a8..6a27a41a5fe0c0 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RUN\-SCRIPT" "1" "April 2015" "" ""
+.TH "NPM\-RUN\-SCRIPT" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-run-script\fR \- Run arbitrary package scripts
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index f815c5116f9014..5ae42be3151bbd 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SEARCH" "1" "April 2015" "" ""
+.TH "NPM\-SEARCH" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-search\fR \- Search for packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index 40eb480fc795cf..98440e02472f8a 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP" "1" "April 2015" "" ""
+.TH "NPM\-SHRINKWRAP" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR \- Lock down dependency versions
.SH SYNOPSIS
@@ -12,10 +12,10 @@ npm shrinkwrap
.P
This command locks down the versions of a package's dependencies so
that you can control exactly which versions of each dependency will be
-used when your package is installed\. The "package\.json" file is still
-required if you want to use "npm install"\.
+used when your package is installed\. The \fBpackage\.json\fR file is still
+required if you want to use \fBnpm install\fR\|\.
.P
-By default, "npm install" recursively installs the target's
+By default, \fBnpm install\fR recursively installs the target's
dependencies (as specified in package\.json), choosing the latest
available version that satisfies the dependency's semver pattern\. In
some situations, particularly when shipping software where each change
@@ -68,7 +68,7 @@ and package C:
.RE
.P
If these are the only versions of A, B, and C available in the
-registry, then a normal "npm install A" will install:
+registry, then a normal \fBnpm install A\fR will install:
.P
.RS 2
.nf
@@ -78,7 +78,7 @@ A@0\.1\.0
.fi
.RE
.P
-However, if B@0\.0\.2 is published, then a fresh "npm install A" will
+However, if B@0\.0\.2 is published, then a fresh \fBnpm install A\fR will
install:
.P
.RS 2
@@ -127,7 +127,7 @@ This generates npm\-shrinkwrap\.json, which will look something like this:
.RE
.P
The shrinkwrap command has locked down the dependencies based on
-what's currently installed in node_modules\. When "npm install"
+what's currently installed in node_modules\. When \fBnpm install\fR
installs a package with a npm\-shrinkwrap\.json file in the package
root, the shrinkwrap file (rather than package\.json files) completely
drives the installation of that package and all of its dependencies
@@ -138,19 +138,19 @@ files\.
.SS Using shrinkwrapped packages
.P
Using a shrinkwrapped package is no different than using any other
-package: you can "npm install" it by hand, or add a dependency to your
-package\.json file and "npm install" it\.
+package: you can \fBnpm install\fR it by hand, or add a dependency to your
+package\.json file and \fBnpm install\fR it\.
.SS Building shrinkwrapped packages
.P
To shrinkwrap an existing package:
.RS 0
.IP 1. 3
-Run "npm install" in the package root to install the current
+Run \fBnpm install\fR in the package root to install the current
versions of all dependencies\.
.IP 2. 3
Validate that the package works as expected with these versions\.
.IP 3. 3
-Run "npm shrinkwrap", add npm\-shrinkwrap\.json to git, and publish
+Run \fBnpm shrinkwrap\fR, add npm\-shrinkwrap\.json to git, and publish
your package\.
.RE
@@ -158,10 +158,10 @@ your package\.
To add or update a dependency in a shrinkwrapped package:
.RS 0
.IP 1. 3
-Run "npm install" in the package root to install the current
+Run \fBnpm install\fR in the package root to install the current
versions of all dependencies\.
.IP 2. 3
-Add or update dependencies\. "npm install" each new or updated
+Add or update dependencies\. \fBnpm install\fR each new or updated
package individually and then update package\.json\. Note that they
must be explicitly named in order to be installed: running \fBnpm
install\fR with no arguments will merely reproduce the existing
@@ -170,7 +170,7 @@ shrinkwrap\.
Validate that the package works as expected with the new
dependencies\.
.IP 4. 3
-Run "npm shrinkwrap", commit the new npm\-shrinkwrap\.json, and
+Run \fBnpm shrinkwrap\fR, commit the new npm\-shrinkwrap\.json, and
publish your package\.
.RE
@@ -180,13 +180,13 @@ available\.
.SS Other Notes
.P
A shrinkwrap file must be consistent with the package's package\.json
-file\. "npm shrinkwrap" will fail if required dependencies are not
+file\. \fBnpm shrinkwrap\fR will fail if required dependencies are not
already installed, since that would result in a shrinkwrap that
wouldn't actually work\. Similarly, the command will fail if there are
extraneous packages (not referenced by package\.json), since that would
indicate that package\.json is not correct\.
.P
-Since "npm shrinkwrap" is intended to lock down your dependencies for
+Since \fBnpm shrinkwrap\fR is intended to lock down your dependencies for
production use, \fBdevDependencies\fR will not be included unless you
explicitly set the \fB\-\-dev\fR flag when you run \fBnpm shrinkwrap\fR\|\. If
installed \fBdevDependencies\fR are excluded, then npm will print a
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index a942d6da1bc73f..2a31a261448293 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STAR" "1" "April 2015" "" ""
+.TH "NPM\-STAR" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-star\fR \- Mark your favorite packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index 24ead178b80837..a861db225c121f 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STARS" "1" "April 2015" "" ""
+.TH "NPM\-STARS" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-stars\fR \- View packages marked as favorites
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index d14bfa1485f079..0875d0462dfed2 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM\-START" "1" "April 2015" "" ""
+.TH "NPM\-START" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-start\fR \- Start a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index 4de2ed3a58ac09..538aaad9f47e89 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STOP" "1" "April 2015" "" ""
+.TH "NPM\-STOP" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-stop\fR \- Stop a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-submodule.1 b/deps/npm/man/man1/npm-submodule.1
deleted file mode 100644
index 71853335c59984..00000000000000
--- a/deps/npm/man/man1/npm-submodule.1
+++ /dev/null
@@ -1,42 +0,0 @@
-.\" Generated with Ronnjs 0.3.8
-.\" http://github.com/kapouer/ronnjs/
-.
-.TH "NPM\-SUBMODULE" "1" "September 2014" "" ""
-.
-.SH "NAME"
-\fBnpm-submodule\fR \-\- Add a package as a git submodule
-.
-.SH "SYNOPSIS"
-.
-.nf
-npm submodule
-.
-.fi
-.
-.SH "DESCRIPTION"
-If the specified package has a git repository url in its package\.json
-description, then this command will add it as a git submodule at \fBnode_modules/\fR\|\.
-.
-.P
-This is a convenience only\. From then on, it\'s up to you to manage
-updates by using the appropriate git commands\. npm will stubbornly
-refuse to update, modify, or remove anything with a \fB\|\.git\fR subfolder
-in it\.
-.
-.P
-This command also does not install missing dependencies, if the package
-does not include them in its git repository\. If \fBnpm ls\fR reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do \fBnpm explore \-\- npm install\fR to install the
-dependencies into the submodule folder\.
-.
-.SH "SEE ALSO"
-.
-.IP "\(bu" 4
-npm help 5 package\.json
-.
-.IP "\(bu" 4
-git help submodule
-.
-.IP "" 0
-
diff --git a/deps/npm/man/man1/npm-tag.1 b/deps/npm/man/man1/npm-tag.1
index 6c31801c4906c1..ecd377742864cc 100644
--- a/deps/npm/man/man1/npm-tag.1
+++ b/deps/npm/man/man1/npm-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TAG" "1" "April 2015" "" ""
+.TH "NPM\-TAG" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-tag\fR \- Tag a published version
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index c447088e0ace99..716365dfeaf375 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TEST" "1" "April 2015" "" ""
+.TH "NPM\-TEST" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-test\fR \- Test a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index c81f251ef2a6aa..0544bef5f9f3bb 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RM" "1" "April 2015" "" ""
+.TH "NPM\-RM" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-rm\fR \- Remove a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index 914d02aa5c4337..cd2782c62ab1dc 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UNPUBLISH" "1" "April 2015" "" ""
+.TH "NPM\-UNPUBLISH" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-unpublish\fR \- Remove a package from the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index dd5a4efd53d15b..2dead0b342ecf5 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UPDATE" "1" "April 2015" "" ""
+.TH "NPM\-UPDATE" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-update\fR \- Update a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index 9e8d74c62feaaf..8fbfd4d07180a5 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VERSION" "1" "April 2015" "" ""
+.TH "NPM\-VERSION" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-version\fR \- Bump a package version
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 2d659ebc398664..db440ee6a2a458 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VIEW" "1" "April 2015" "" ""
+.TH "NPM\-VIEW" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-view\fR \- View registry info
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index b97bc0c449b775..02bb07be8ecdcb 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM\-WHOAMI" "1" "April 2015" "" ""
+.TH "NPM\-WHOAMI" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-whoami\fR \- Display npm username
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index e11973239ee827..9305fef29d9df1 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "April 2015" "" ""
+.TH "NPM" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm\fR \- javascript package manager
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm [args]
.RE
.SH VERSION
.P
-2.8.3
+2.10.1
.SH DESCRIPTION
.P
npm is the package manager for the Node JavaScript platform\. It puts
diff --git a/deps/npm/man/man3/npm-bin.3 b/deps/npm/man/man3/npm-bin.3
index 9b6f869c85f5e0..11de68684e1d88 100644
--- a/deps/npm/man/man3/npm-bin.3
+++ b/deps/npm/man/man3/npm-bin.3
@@ -1,4 +1,4 @@
-.TH "NPM\-BIN" "3" "April 2015" "" ""
+.TH "NPM\-BIN" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-bin\fR \- Display npm bin folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-bugs.3 b/deps/npm/man/man3/npm-bugs.3
index ab93f58e6ca2eb..8bb18b19ba2368 100644
--- a/deps/npm/man/man3/npm-bugs.3
+++ b/deps/npm/man/man3/npm-bugs.3
@@ -1,4 +1,4 @@
-.TH "NPM\-BUGS" "3" "April 2015" "" ""
+.TH "NPM\-BUGS" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-bugs\fR \- Bugs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-cache.3 b/deps/npm/man/man3/npm-cache.3
index 127555dcb079d9..cefdc1df17d40d 100644
--- a/deps/npm/man/man3/npm-cache.3
+++ b/deps/npm/man/man3/npm-cache.3
@@ -1,4 +1,4 @@
-.TH "NPM\-CACHE" "3" "April 2015" "" ""
+.TH "NPM\-CACHE" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-cache\fR \- manage the npm cache programmatically
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-commands.3 b/deps/npm/man/man3/npm-commands.3
index cde69b4c4d3023..14d0a3a791f07b 100644
--- a/deps/npm/man/man3/npm-commands.3
+++ b/deps/npm/man/man3/npm-commands.3
@@ -1,4 +1,4 @@
-.TH "NPM\-COMMANDS" "3" "April 2015" "" ""
+.TH "NPM\-COMMANDS" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-commands\fR \- npm commands
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-config.3 b/deps/npm/man/man3/npm-config.3
index c6a152d9d99743..38640e987848ad 100644
--- a/deps/npm/man/man3/npm-config.3
+++ b/deps/npm/man/man3/npm-config.3
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "3" "April 2015" "" ""
+.TH "NPM\-CONFIG" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- Manage the npm configuration files
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-deprecate.3 b/deps/npm/man/man3/npm-deprecate.3
index 2c4d22f55ee66d..17d131a6c83d7f 100644
--- a/deps/npm/man/man3/npm-deprecate.3
+++ b/deps/npm/man/man3/npm-deprecate.3
@@ -1,4 +1,4 @@
-.TH "NPM\-DEPRECATE" "3" "April 2015" "" ""
+.TH "NPM\-DEPRECATE" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-deprecate\fR \- Deprecate a version of a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-docs.3 b/deps/npm/man/man3/npm-docs.3
index bcdfe3f705e491..2c94c5d5914b6c 100644
--- a/deps/npm/man/man3/npm-docs.3
+++ b/deps/npm/man/man3/npm-docs.3
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCS" "3" "April 2015" "" ""
+.TH "NPM\-DOCS" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-docs\fR \- Docs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-edit.3 b/deps/npm/man/man3/npm-edit.3
index 5db96fd12c439a..0fa59c83000ca2 100644
--- a/deps/npm/man/man3/npm-edit.3
+++ b/deps/npm/man/man3/npm-edit.3
@@ -1,4 +1,4 @@
-.TH "NPM\-EDIT" "3" "April 2015" "" ""
+.TH "NPM\-EDIT" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-edit\fR \- Edit an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-explore.3 b/deps/npm/man/man3/npm-explore.3
index c9e1e69b32e916..1e2691946dd336 100644
--- a/deps/npm/man/man3/npm-explore.3
+++ b/deps/npm/man/man3/npm-explore.3
@@ -1,4 +1,4 @@
-.TH "NPM\-EXPLORE" "3" "April 2015" "" ""
+.TH "NPM\-EXPLORE" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-explore\fR \- Browse an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-help-search.3 b/deps/npm/man/man3/npm-help-search.3
index 42b58d3f47f986..d453c6b694431f 100644
--- a/deps/npm/man/man3/npm-help-search.3
+++ b/deps/npm/man/man3/npm-help-search.3
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP\-SEARCH" "3" "April 2015" "" ""
+.TH "NPM\-HELP\-SEARCH" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-help-search\fR \- Search the help pages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-init.3 b/deps/npm/man/man3/npm-init.3
index 1fe406f2589dec..4dc3c12e517a60 100644
--- a/deps/npm/man/man3/npm-init.3
+++ b/deps/npm/man/man3/npm-init.3
@@ -1,4 +1,4 @@
-.TH "NPM" "" "April 2015" "" ""
+.TH "NPM" "" "May 2015" "" ""
.SH "NAME"
\fBnpm\fR
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-install.3 b/deps/npm/man/man3/npm-install.3
index 7eaa388536a0fc..bc91fd6388f37d 100644
--- a/deps/npm/man/man3/npm-install.3
+++ b/deps/npm/man/man3/npm-install.3
@@ -1,4 +1,4 @@
-.TH "NPM\-INSTALL" "3" "April 2015" "" ""
+.TH "NPM\-INSTALL" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-install\fR \- install a package programmatically
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-link.3 b/deps/npm/man/man3/npm-link.3
index 050a42b13f3f42..de51faa5579c5c 100644
--- a/deps/npm/man/man3/npm-link.3
+++ b/deps/npm/man/man3/npm-link.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LINK" "3" "April 2015" "" ""
+.TH "NPM\-LINK" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-link\fR \- Symlink a package folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-load.3 b/deps/npm/man/man3/npm-load.3
index aa344d025eadbf..f1bc1a867d2628 100644
--- a/deps/npm/man/man3/npm-load.3
+++ b/deps/npm/man/man3/npm-load.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LOAD" "3" "April 2015" "" ""
+.TH "NPM\-LOAD" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-load\fR \- Load config settings
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-ls.3 b/deps/npm/man/man3/npm-ls.3
index df7c3c77ca2eac..31f863d82347a5 100644
--- a/deps/npm/man/man3/npm-ls.3
+++ b/deps/npm/man/man3/npm-ls.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LS" "3" "April 2015" "" ""
+.TH "NPM\-LS" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-ls\fR \- List installed packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-outdated.3 b/deps/npm/man/man3/npm-outdated.3
index 83409223f1fe35..797d47209ccd77 100644
--- a/deps/npm/man/man3/npm-outdated.3
+++ b/deps/npm/man/man3/npm-outdated.3
@@ -1,4 +1,4 @@
-.TH "NPM\-OUTDATED" "3" "April 2015" "" ""
+.TH "NPM\-OUTDATED" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-outdated\fR \- Check for outdated packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-owner.3 b/deps/npm/man/man3/npm-owner.3
index 2508a312f4e689..1c3e979074da98 100644
--- a/deps/npm/man/man3/npm-owner.3
+++ b/deps/npm/man/man3/npm-owner.3
@@ -1,4 +1,4 @@
-.TH "NPM\-OWNER" "3" "April 2015" "" ""
+.TH "NPM\-OWNER" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-owner\fR \- Manage package owners
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-pack.3 b/deps/npm/man/man3/npm-pack.3
index 4a2401136fe5b2..ac959e98b7c418 100644
--- a/deps/npm/man/man3/npm-pack.3
+++ b/deps/npm/man/man3/npm-pack.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PACK" "3" "April 2015" "" ""
+.TH "NPM\-PACK" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-pack\fR \- Create a tarball from a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-prefix.3 b/deps/npm/man/man3/npm-prefix.3
index e0ceb93b3f33f2..c2fca3049fcf69 100644
--- a/deps/npm/man/man3/npm-prefix.3
+++ b/deps/npm/man/man3/npm-prefix.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PREFIX" "3" "April 2015" "" ""
+.TH "NPM\-PREFIX" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-prefix\fR \- Display prefix
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-prune.3 b/deps/npm/man/man3/npm-prune.3
index 024e933b6f5bea..adbef5ad0828bf 100644
--- a/deps/npm/man/man3/npm-prune.3
+++ b/deps/npm/man/man3/npm-prune.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PRUNE" "3" "April 2015" "" ""
+.TH "NPM\-PRUNE" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-prune\fR \- Remove extraneous packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-publish.3 b/deps/npm/man/man3/npm-publish.3
index 2d0d0e0daec439..2b72370143515e 100644
--- a/deps/npm/man/man3/npm-publish.3
+++ b/deps/npm/man/man3/npm-publish.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PUBLISH" "3" "April 2015" "" ""
+.TH "NPM\-PUBLISH" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-publish\fR \- Publish a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-rebuild.3 b/deps/npm/man/man3/npm-rebuild.3
index c94e1d8eaab7f2..707b5d567d777f 100644
--- a/deps/npm/man/man3/npm-rebuild.3
+++ b/deps/npm/man/man3/npm-rebuild.3
@@ -1,4 +1,4 @@
-.TH "NPM\-REBUILD" "3" "April 2015" "" ""
+.TH "NPM\-REBUILD" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-rebuild\fR \- Rebuild a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-repo.3 b/deps/npm/man/man3/npm-repo.3
index 4db47172f9d74f..ff62f017f50a7c 100644
--- a/deps/npm/man/man3/npm-repo.3
+++ b/deps/npm/man/man3/npm-repo.3
@@ -1,4 +1,4 @@
-.TH "NPM\-REPO" "3" "April 2015" "" ""
+.TH "NPM\-REPO" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-repo\fR \- Open package repository page in the browser
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-restart.3 b/deps/npm/man/man3/npm-restart.3
index 876b8b9a54845f..315900e79ca549 100644
--- a/deps/npm/man/man3/npm-restart.3
+++ b/deps/npm/man/man3/npm-restart.3
@@ -1,4 +1,4 @@
-.TH "NPM\-RESTART" "3" "April 2015" "" ""
+.TH "NPM\-RESTART" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-restart\fR \- Restart a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-root.3 b/deps/npm/man/man3/npm-root.3
index f3a3e0fbd12dc6..04eab190ab548f 100644
--- a/deps/npm/man/man3/npm-root.3
+++ b/deps/npm/man/man3/npm-root.3
@@ -1,4 +1,4 @@
-.TH "NPM\-ROOT" "3" "April 2015" "" ""
+.TH "NPM\-ROOT" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-root\fR \- Display npm root
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-run-script.3 b/deps/npm/man/man3/npm-run-script.3
index 8b16b3ce77a711..7c07de1f83a26c 100644
--- a/deps/npm/man/man3/npm-run-script.3
+++ b/deps/npm/man/man3/npm-run-script.3
@@ -1,4 +1,4 @@
-.TH "NPM\-RUN\-SCRIPT" "3" "April 2015" "" ""
+.TH "NPM\-RUN\-SCRIPT" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-run-script\fR \- Run arbitrary package scripts
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-search.3 b/deps/npm/man/man3/npm-search.3
index 7698f74deb7a08..bf518ff14b6b0a 100644
--- a/deps/npm/man/man3/npm-search.3
+++ b/deps/npm/man/man3/npm-search.3
@@ -1,4 +1,4 @@
-.TH "NPM\-SEARCH" "3" "April 2015" "" ""
+.TH "NPM\-SEARCH" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-search\fR \- Search for packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-shrinkwrap.3 b/deps/npm/man/man3/npm-shrinkwrap.3
index 294f3075113edc..b95a07352bca7b 100644
--- a/deps/npm/man/man3/npm-shrinkwrap.3
+++ b/deps/npm/man/man3/npm-shrinkwrap.3
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP" "3" "April 2015" "" ""
+.TH "NPM\-SHRINKWRAP" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR \- programmatically generate package shrinkwrap file
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-start.3 b/deps/npm/man/man3/npm-start.3
index f3ac79acb9850e..69b68d9fbedbee 100644
--- a/deps/npm/man/man3/npm-start.3
+++ b/deps/npm/man/man3/npm-start.3
@@ -1,4 +1,4 @@
-.TH "NPM\-START" "3" "April 2015" "" ""
+.TH "NPM\-START" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-start\fR \- Start a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-stop.3 b/deps/npm/man/man3/npm-stop.3
index 7ea9ee95e1baaf..5d1b781d99832a 100644
--- a/deps/npm/man/man3/npm-stop.3
+++ b/deps/npm/man/man3/npm-stop.3
@@ -1,4 +1,4 @@
-.TH "NPM\-STOP" "3" "April 2015" "" ""
+.TH "NPM\-STOP" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-stop\fR \- Stop a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-submodule.3 b/deps/npm/man/man3/npm-submodule.3
deleted file mode 100644
index 95739ce3b08428..00000000000000
--- a/deps/npm/man/man3/npm-submodule.3
+++ /dev/null
@@ -1,42 +0,0 @@
-.\" Generated with Ronnjs 0.3.8
-.\" http://github.com/kapouer/ronnjs/
-.
-.TH "NPM\-SUBMODULE" "3" "September 2014" "" ""
-.
-.SH "NAME"
-\fBnpm-submodule\fR \-\- Add a package as a git submodule
-.
-.SH "SYNOPSIS"
-.
-.nf
-npm\.commands\.submodule(packages, callback)
-.
-.fi
-.
-.SH "DESCRIPTION"
-For each package specified, npm will check if it has a git repository url
-in its package\.json description then add it as a git submodule at \fBnode_modules/\fR\|\.
-.
-.P
-This is a convenience only\. From then on, it\'s up to you to manage
-updates by using the appropriate git commands\. npm will stubbornly
-refuse to update, modify, or remove anything with a \fB\|\.git\fR subfolder
-in it\.
-.
-.P
-This command also does not install missing dependencies, if the package
-does not include them in its git repository\. If \fBnpm ls\fR reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do \fBnpm explore \-\- npm install\fR to install the
-dependencies into the submodule folder\.
-.
-.SH "SEE ALSO"
-.
-.IP "\(bu" 4
-npm help json
-.
-.IP "\(bu" 4
-git help submodule
-.
-.IP "" 0
-
diff --git a/deps/npm/man/man3/npm-tag.3 b/deps/npm/man/man3/npm-tag.3
index ace3a0a8a1af14..a51c6a6596dc79 100644
--- a/deps/npm/man/man3/npm-tag.3
+++ b/deps/npm/man/man3/npm-tag.3
@@ -1,4 +1,4 @@
-.TH "NPM\-TAG" "3" "April 2015" "" ""
+.TH "NPM\-TAG" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-tag\fR \- Tag a published version
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-test.3 b/deps/npm/man/man3/npm-test.3
index d6aad659c43df8..e14d7da4331b3b 100644
--- a/deps/npm/man/man3/npm-test.3
+++ b/deps/npm/man/man3/npm-test.3
@@ -1,4 +1,4 @@
-.TH "NPM\-TEST" "3" "April 2015" "" ""
+.TH "NPM\-TEST" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-test\fR \- Test a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-uninstall.3 b/deps/npm/man/man3/npm-uninstall.3
index 7b17c8faf0430a..3a3f8ee51c77bd 100644
--- a/deps/npm/man/man3/npm-uninstall.3
+++ b/deps/npm/man/man3/npm-uninstall.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UNINSTALL" "3" "April 2015" "" ""
+.TH "NPM\-UNINSTALL" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-uninstall\fR \- uninstall a package programmatically
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-unpublish.3 b/deps/npm/man/man3/npm-unpublish.3
index cab0bd3d98db8a..7fadd177776243 100644
--- a/deps/npm/man/man3/npm-unpublish.3
+++ b/deps/npm/man/man3/npm-unpublish.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UNPUBLISH" "3" "April 2015" "" ""
+.TH "NPM\-UNPUBLISH" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-unpublish\fR \- Remove a package from the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-update.3 b/deps/npm/man/man3/npm-update.3
index e299ee28424360..d428782f590ac6 100644
--- a/deps/npm/man/man3/npm-update.3
+++ b/deps/npm/man/man3/npm-update.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UPDATE" "3" "April 2015" "" ""
+.TH "NPM\-UPDATE" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-update\fR \- Update a package
.SH SYNOPSIS
@@ -8,7 +8,7 @@
npm\.commands\.update(packages, callback)
.fi
.RE
-.TH "DESCRIPTION" "" "April 2015" "" ""
+.TH "DESCRIPTION" "" "May 2015" "" ""
.SH "NAME"
\fBDESCRIPTION\fR
.P
diff --git a/deps/npm/man/man3/npm-version.3 b/deps/npm/man/man3/npm-version.3
index 43a680592663ef..934d93607adb98 100644
--- a/deps/npm/man/man3/npm-version.3
+++ b/deps/npm/man/man3/npm-version.3
@@ -1,4 +1,4 @@
-.TH "NPM\-VERSION" "3" "April 2015" "" ""
+.TH "NPM\-VERSION" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-version\fR \- Bump a package version
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-view.3 b/deps/npm/man/man3/npm-view.3
index eb6d8d6f1b97a2..bed7694a83af37 100644
--- a/deps/npm/man/man3/npm-view.3
+++ b/deps/npm/man/man3/npm-view.3
@@ -1,4 +1,4 @@
-.TH "NPM\-VIEW" "3" "April 2015" "" ""
+.TH "NPM\-VIEW" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-view\fR \- View registry info
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-whoami.3 b/deps/npm/man/man3/npm-whoami.3
index a7b968fe990dbf..311803c313a9ef 100644
--- a/deps/npm/man/man3/npm-whoami.3
+++ b/deps/npm/man/man3/npm-whoami.3
@@ -1,4 +1,4 @@
-.TH "NPM\-WHOAMI" "3" "April 2015" "" ""
+.TH "NPM\-WHOAMI" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm-whoami\fR \- Display npm username
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm.3 b/deps/npm/man/man3/npm.3
index 22751011eb8b68..7fd9d79b9c9b42 100644
--- a/deps/npm/man/man3/npm.3
+++ b/deps/npm/man/man3/npm.3
@@ -1,4 +1,4 @@
-.TH "NPM" "3" "April 2015" "" ""
+.TH "NPM" "3" "May 2015" "" ""
.SH "NAME"
\fBnpm\fR \- javascript package manager
.SH SYNOPSIS
@@ -20,7 +20,7 @@ npm\.load([configObject, ]function (er, npm) {
.RE
.SH VERSION
.P
-2.8.3
+2.10.1
.SH DESCRIPTION
.P
This is the API documentation for npm\.
diff --git a/deps/npm/man/man5/npm-folders.5 b/deps/npm/man/man5/npm-folders.5
index 454a6950ad4e3e..7346af98f1f99e 100644
--- a/deps/npm/man/man5/npm-folders.5
+++ b/deps/npm/man/man5/npm-folders.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "April 2015" "" ""
+.TH "NPM\-FOLDERS" "5" "May 2015" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 454a6950ad4e3e..7346af98f1f99e 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "April 2015" "" ""
+.TH "NPM\-FOLDERS" "5" "May 2015" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index 088e62bdc302b1..b0cb4e94dd538f 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "April 2015" "" ""
+.TH "PACKAGE\.JSON" "5" "May 2015" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -16,17 +16,32 @@ them\. The name and version together form an identifier that is assumed
to be completely unique\. Changes to the package should come along with
changes to the version\.
.P
-The name is what your thing is called\. Some tips:
+The name is what your thing is called\.
+.P
+Some rules:
.RS 0
.IP \(bu 2
+The name must be shorter than 214 characters\. This includes the scope for
+scoped packages\.
+.IP \(bu 2
+The name can't start with a dot or an underscore\.
+.IP \(bu 2
+New packages must not have uppercase letters in the name\.
+.IP \(bu 2
+The name ends up being part of a URL, an argument on the command line, and a
+folder name\. Therefore, the name can't contain any non\-URL\-safe characters\.
+
+.RE
+.P
+Some tips:
+.RS 0
+.IP \(bu 2
+Don't use the same name as a core Node module\.
+.IP \(bu 2
Don't put "js" or "node" in the name\. It's assumed that it's js, since you're
writing a package\.json file, and you can specify the engine using the "engines"
field\. (See below\.)
.IP \(bu 2
-The name ends up being part of a URL, an argument on the command line, and a
-folder name\. Any name with non\-url\-safe characters will be rejected\.
-Also, it can't start with a dot or an underscore\.
-.IP \(bu 2
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive\.
.IP \(bu 2
@@ -92,9 +107,8 @@ If a url is provided, it will be used by the \fBnpm bugs\fR command\.
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it\.
.P
-The simplest way, assuming you're using a common license such as BSD\-3\-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD\-2\-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
.P
.RS 2
.nf
@@ -106,8 +120,61 @@ You can check the full list of SPDX license IDs \fIhttps://spdx\.org/licenses/\f
Ideally you should pick one that is
OSI \fIhttp://opensource\.org/licenses/alphabetical\fR approved\.
.P
-It's also a good idea to include a LICENSE file at the top level in
-your package\.
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2\.0 string \fIhttp://npmjs\.com/package/spdx\fR, like this:
+.P
+.RS 2
+.nf
+{ "license" : "(ISC OR GPL\-3\.0)" }
+.fi
+.RE
+.P
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+.P
+.RS 2
+.nf
+{ "license" : "LicenseRef\-LICENSE" }
+.fi
+.RE
+.P
+Then include a LICENSE file at the top level of the package\.
+.P
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+.P
+.RS 2
+.nf
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource\.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www\.opensource\.org/licenses/mit\-license\.php"
+ }
+ , { "type": "Apache\-2\.0"
+ , "url": "http://opensource\.org/licenses/apache2\.0\.php"
+ }
+ ]
+}
+.fi
+.RE
+.P
+Those styles are now deprecated\. Instead, use SPDX expressions, like this:
+.P
+.RS 2
+.nf
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache\-2\.0)" }
+.fi
+.RE
.SH people fields: author, contributors
.P
The "author" is one person\. "contributors" is an array of people\. A "person"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 703176aaa53e40..eba9ab61ede304 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "April 2015" "" ""
+.TH "NPMRC" "5" "May 2015" "" ""
.SH "NAME"
\fBnpmrc\fR \- The npm config files
.SH DESCRIPTION
@@ -19,7 +19,7 @@ per\-project config file (/path/to/my/project/\.npmrc)
.IP \(bu 2
per\-user config file (~/\.npmrc)
.IP \(bu 2
-global config file ($PREFIX/npmrc)
+global config file ($PREFIX/etc/npmrc)
.IP \(bu 2
npm builtin config file (/path/to/npm/npmrc)
diff --git a/deps/npm/man/man5/package.json.5 b/deps/npm/man/man5/package.json.5
index 088e62bdc302b1..b0cb4e94dd538f 100644
--- a/deps/npm/man/man5/package.json.5
+++ b/deps/npm/man/man5/package.json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "April 2015" "" ""
+.TH "PACKAGE\.JSON" "5" "May 2015" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -16,17 +16,32 @@ them\. The name and version together form an identifier that is assumed
to be completely unique\. Changes to the package should come along with
changes to the version\.
.P
-The name is what your thing is called\. Some tips:
+The name is what your thing is called\.
+.P
+Some rules:
.RS 0
.IP \(bu 2
+The name must be shorter than 214 characters\. This includes the scope for
+scoped packages\.
+.IP \(bu 2
+The name can't start with a dot or an underscore\.
+.IP \(bu 2
+New packages must not have uppercase letters in the name\.
+.IP \(bu 2
+The name ends up being part of a URL, an argument on the command line, and a
+folder name\. Therefore, the name can't contain any non\-URL\-safe characters\.
+
+.RE
+.P
+Some tips:
+.RS 0
+.IP \(bu 2
+Don't use the same name as a core Node module\.
+.IP \(bu 2
Don't put "js" or "node" in the name\. It's assumed that it's js, since you're
writing a package\.json file, and you can specify the engine using the "engines"
field\. (See below\.)
.IP \(bu 2
-The name ends up being part of a URL, an argument on the command line, and a
-folder name\. Any name with non\-url\-safe characters will be rejected\.
-Also, it can't start with a dot or an underscore\.
-.IP \(bu 2
The name will probably be passed as an argument to require(), so it should
be something short, but also reasonably descriptive\.
.IP \(bu 2
@@ -92,9 +107,8 @@ If a url is provided, it will be used by the \fBnpm bugs\fR command\.
You should specify a license for your package so that people know how they are
permitted to use it, and any restrictions you're placing on it\.
.P
-The simplest way, assuming you're using a common license such as BSD\-3\-Clause
-or MIT, is to just specify the standard SPDX ID of the license you're using,
-like this:
+If you're using a common license such as BSD\-2\-Clause or MIT, add a
+current SPDX license identifier for the license you're using, like this:
.P
.RS 2
.nf
@@ -106,8 +120,61 @@ You can check the full list of SPDX license IDs \fIhttps://spdx\.org/licenses/\f
Ideally you should pick one that is
OSI \fIhttp://opensource\.org/licenses/alphabetical\fR approved\.
.P
-It's also a good idea to include a LICENSE file at the top level in
-your package\.
+If your package is licensed under multiple common licenses, use an SPDX license
+expression syntax version 2\.0 string \fIhttp://npmjs\.com/package/spdx\fR, like this:
+.P
+.RS 2
+.nf
+{ "license" : "(ISC OR GPL\-3\.0)" }
+.fi
+.RE
+.P
+If you are using a license that hasn't been assigned an SPDX identifier, or if
+you are using a custom license, use the following valid SPDX expression:
+.P
+.RS 2
+.nf
+{ "license" : "LicenseRef\-LICENSE" }
+.fi
+.RE
+.P
+Then include a LICENSE file at the top level of the package\.
+.P
+Some old packages used license objects or a "licenses" property containing an
+array of license objects:
+.P
+.RS 2
+.nf
+// Not valid metadata
+{ "license" :
+ { "type" : "ISC"
+ , "url" : "http://opensource\.org/licenses/ISC"
+ }
+}
+
+// Not valid metadata
+{ "licenses" :
+ [
+ { "type": "MIT"
+ , "url": "http://www\.opensource\.org/licenses/mit\-license\.php"
+ }
+ , { "type": "Apache\-2\.0"
+ , "url": "http://opensource\.org/licenses/apache2\.0\.php"
+ }
+ ]
+}
+.fi
+.RE
+.P
+Those styles are now deprecated\. Instead, use SPDX expressions, like this:
+.P
+.RS 2
+.nf
+{ "license": "ISC" }
+
+{ "license": "(MIT OR Apache\-2\.0)" }
+.fi
+.RE
.SH people fields: author, contributors
.P
The "author" is one person\. "contributors" is an array of people\. A "person"
diff --git a/deps/npm/man/man7/npm-coding-style.7 b/deps/npm/man/man7/npm-coding-style.7
index b018adbac16489..68367cd6e72688 100644
--- a/deps/npm/man/man7/npm-coding-style.7
+++ b/deps/npm/man/man7/npm-coding-style.7
@@ -1,4 +1,4 @@
-.TH "NPM\-CODING\-STYLE" "7" "April 2015" "" ""
+.TH "NPM\-CODING\-STYLE" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-coding-style\fR \- npm's "funny" coding style
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-config.7 b/deps/npm/man/man7/npm-config.7
index 821595161bfa50..d7620a176eb835 100644
--- a/deps/npm/man/man7/npm-config.7
+++ b/deps/npm/man/man7/npm-config.7
@@ -1,9 +1,9 @@
-.TH "NPM\-CONFIG" "7" "April 2015" "" ""
+.TH "NPM\-CONFIG" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- More than you probably want to know about npm configuration
.SH DESCRIPTION
.P
-npm gets its configuration values from 6 sources, in this priority:
+npm gets its configuration values from the following sources, sorted by priority:
.SS Command Line Flags
.P
Putting \fB\-\-foo bar\fR on the command line sets the \fBfoo\fR configuration
@@ -1101,6 +1101,22 @@ it will install the specified tag\.
.P
Also the tag that is added to the package@version specified by the \fBnpm
tag\fR command, if no explicit tag is given\.
+.SS tag\-version\-prefix
+.RS 0
+.IP \(bu 2
+Default: \fB"v"\fR
+.IP \(bu 2
+Type: String
+
+.RE
+.P
+If set, alters the prefix used when tagging a new version when performing a
+version increment using \fBnpm\-version\fR\|\. To remove the prefix altogether, set it
+to the empty string: \fB""\fR\|\.
+.P
+Because other tools may rely on the convention that npm version tags look like
+\fBv1\.0\.0\fR, \fIonly use this property if it is absolutely necessary\fR\|\. In
+particular, use care when overriding this setting for public packages\.
.SS tmp
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man7/npm-developers.7 b/deps/npm/man/man7/npm-developers.7
index f3c85e453bdc76..d41c8f40149a4b 100644
--- a/deps/npm/man/man7/npm-developers.7
+++ b/deps/npm/man/man7/npm-developers.7
@@ -1,4 +1,4 @@
-.TH "NPM\-DEVELOPERS" "7" "April 2015" "" ""
+.TH "NPM\-DEVELOPERS" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-developers\fR \- Developer Guide
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-disputes.7 b/deps/npm/man/man7/npm-disputes.7
index 0db6f8a4dbd9ec..1fb6b9ce94deb6 100644
--- a/deps/npm/man/man7/npm-disputes.7
+++ b/deps/npm/man/man7/npm-disputes.7
@@ -1,4 +1,4 @@
-.TH "NPM\-DISPUTES" "7" "April 2015" "" ""
+.TH "NPM\-DISPUTES" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-disputes\fR \- Handling Module Name Disputes
.SH SYNOPSIS
diff --git a/deps/npm/man/man7/npm-faq.7 b/deps/npm/man/man7/npm-faq.7
index db84fbe8d72481..30391e7f2dbb22 100644
--- a/deps/npm/man/man7/npm-faq.7
+++ b/deps/npm/man/man7/npm-faq.7
@@ -1,4 +1,4 @@
-.TH "NPM\-FAQ" "7" "April 2015" "" ""
+.TH "NPM\-FAQ" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-faq\fR \- Frequently Asked Questions
.SH Where can I find these docs in HTML?
diff --git a/deps/npm/man/man7/npm-index.7 b/deps/npm/man/man7/npm-index.7
index 787668e656eb0a..a0dde122bedfd9 100644
--- a/deps/npm/man/man7/npm-index.7
+++ b/deps/npm/man/man7/npm-index.7
@@ -1,4 +1,4 @@
-.TH "NPM\-INDEX" "7" "April 2015" "" ""
+.TH "NPM\-INDEX" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-index\fR \- Index of all npm documentation
.SS npm help README
diff --git a/deps/npm/man/man7/npm-registry.7 b/deps/npm/man/man7/npm-registry.7
index dd493a8b1287ed..075b85a7cbd9b4 100644
--- a/deps/npm/man/man7/npm-registry.7
+++ b/deps/npm/man/man7/npm-registry.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REGISTRY" "7" "April 2015" "" ""
+.TH "NPM\-REGISTRY" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-registry\fR \- The JavaScript Package Registry
.SH DESCRIPTION
@@ -30,9 +30,10 @@ similar) design doc to implement the APIs\.
If you set up continuous replication from the official CouchDB, and then
set your internal CouchDB as the registry config, then you'll be able
to read any published packages, in addition to your private ones, and by
-default will only publish internally\. If you then want to publish a
-package for the whole world to see, you can simply override the
-\fB\-\-registry\fR config for that command\.
+default will only publish internally\.
+.P
+If you then want to publish a package for the whole world to see, you can
+simply override the \fB\-\-registry\fR option for that \fBpublish\fR command\.
.SH I don't want my package published in the official registry\. It's private\.
.P
Set \fB"private": true\fR in your package\.json to prevent it from being
diff --git a/deps/npm/man/man7/npm-scope.7 b/deps/npm/man/man7/npm-scope.7
index dca1c503223a4b..cc6542828ccf9e 100644
--- a/deps/npm/man/man7/npm-scope.7
+++ b/deps/npm/man/man7/npm-scope.7
@@ -1,4 +1,4 @@
-.TH "NPM\-SCOPE" "7" "April 2015" "" ""
+.TH "NPM\-SCOPE" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-scope\fR \- Scoped packages
.SH DESCRIPTION
@@ -17,9 +17,9 @@ followed by a slash, e\.g\.
Scopes are a way of grouping related packages together, and also affect a few
things about the way npm treats the package\.
.P
-\fBAs of 2014\-09\-03, scoped packages are not supported by the public npm registry\fR\|\.
-However, the npm client is backwards\-compatible with un\-scoped registries, so
-it can be used to work with scoped and un\-scoped registries at the same time\.
+Scoped packages are supported by the public npm registry\. The npm
+client is backwards\-compatible with un\-scoped registries, so it can be
+used to work with scoped and un\-scoped registries at the same time\.
.SH Installing scoped packages
.P
Scoped packages are installed to a sub\-folder of the regular installation
@@ -64,10 +64,27 @@ There is nothing special about the way Node treats scope folders, this is
just specifying to require the module \fBmypackage\fR in the folder called \fB@myorg\fR\|\.
.SH Publishing scoped packages
.P
-Scoped packages can be published to any registry that supports them\.
-\fIAs of 2014\-09\-03, the public npm registry does not support scoped packages\fR,
-so attempting to publish a scoped package to the registry will fail unless
-you have associated that scope with a different registry, see below\.
+Scoped packages can be published to any registry that supports them, including
+the public npm registry\.
+.P
+(As of 2015\-04\-19, the public npm registry \fBdoes\fR support scoped packages)
+.P
+If you wish, you may associate a scope with a registry; see below\.
+.SS Publishing public scoped packages to the public npm registry
+.P
+To publish a public scoped package, you must specify \fB\-\-access public\fR with
+the initial publication\. This will publish the package and set access
+to \fBpublic\fR as if you had run \fBnpm access public\fR after publishing\.
+.SS Publishing private scoped packages to the npm registry
+.P
+To publish a private scoped package to the npm registry, you must have
+an npm Private Modules \fIhttps://www\.npmjs\.com/private\-modules\fR
+account\.
+.P
+You can then publish the module with \fBnpm publish\fR or \fBnpm publish
+\-\-access restricted\fR, and it will be present in the npm registry, with
+restricted access\. You can then change the access permissions, if
+desired, with \fBnpm access\fR or on the npmjs\.com website\.
.SH Associating a scope with a registry
.P
Scopes can be associated with a separate registry\. This allows you to
@@ -103,6 +120,8 @@ that registry instead\.
npm help install
.IP \(bu 2
npm help publish
+.IP \(bu 2
+npm help access
.RE
diff --git a/deps/npm/man/man7/npm-scripts.7 b/deps/npm/man/man7/npm-scripts.7
index 2ca079346a6cdd..46ab09ed4c1641 100644
--- a/deps/npm/man/man7/npm-scripts.7
+++ b/deps/npm/man/man7/npm-scripts.7
@@ -1,4 +1,4 @@
-.TH "NPM\-SCRIPTS" "7" "April 2015" "" ""
+.TH "NPM\-SCRIPTS" "7" "May 2015" "" ""
.SH "NAME"
\fBnpm-scripts\fR \- How npm handles the "scripts" field
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/removing-npm.7 b/deps/npm/man/man7/removing-npm.7
index f3e47cfefccdc3..653e47d6d734f7 100644
--- a/deps/npm/man/man7/removing-npm.7
+++ b/deps/npm/man/man7/removing-npm.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REMOVAL" "1" "April 2015" "" ""
+.TH "NPM\-REMOVAL" "1" "May 2015" "" ""
.SH "NAME"
\fBnpm-removal\fR \- Cleaning the Slate
.SH SYNOPSIS
diff --git a/deps/npm/man/man7/semver.7 b/deps/npm/man/man7/semver.7
index f77a51ee5d2b9c..69d890f9f7bbe3 100644
--- a/deps/npm/man/man7/semver.7
+++ b/deps/npm/man/man7/semver.7
@@ -1,4 +1,4 @@
-.TH "SEMVER" "7" "April 2015" "" ""
+.TH "SEMVER" "7" "May 2015" "" ""
.SH "NAME"
\fBsemver\fR \- The semantic versioner for npm
.SH Usage
diff --git a/deps/npm/node_modules/editor/package.json b/deps/npm/node_modules/editor/package.json
index 963b5d29bc33ca..0e5abd3bfb2607 100644
--- a/deps/npm/node_modules/editor/package.json
+++ b/deps/npm/node_modules/editor/package.json
@@ -1,6 +1,6 @@
{
"name": "editor",
- "version": "0.1.0",
+ "version": "1.0.0",
"description": "launch $EDITOR in your program",
"main": "index.js",
"directories": {
@@ -33,19 +33,22 @@
"engine": {
"node": ">=0.6"
},
+ "gitHead": "15200af2c417c65a4df153f39f32143dcd476375",
"bugs": {
"url": "https://github.com/substack/node-editor/issues"
},
- "_id": "editor@0.1.0",
- "dist": {
- "shasum": "542f4662c6a8c88e862fc11945e204e51981b9a1",
- "tarball": "http://registry.npmjs.org/editor/-/editor-0.1.0.tgz"
- },
- "_from": "editor@latest",
- "_npmVersion": "1.3.21",
+ "_id": "editor@1.0.0",
+ "_shasum": "60c7f87bd62bcc6a894fa8ccd6afb7823a24f742",
+ "_from": "editor@>=1.0.0 <1.1.0",
+ "_npmVersion": "2.7.5",
+ "_nodeVersion": "1.6.3",
"_npmUser": {
"name": "substack",
- "email": "mail@substack.net"
+ "email": "substack@gmail.com"
+ },
+ "dist": {
+ "shasum": "60c7f87bd62bcc6a894fa8ccd6afb7823a24f742",
+ "tarball": "http://registry.npmjs.org/editor/-/editor-1.0.0.tgz"
},
"maintainers": [
{
@@ -53,6 +56,5 @@
"email": "mail@substack.net"
}
],
- "_shasum": "542f4662c6a8c88e862fc11945e204e51981b9a1",
- "_resolved": "https://registry.npmjs.org/editor/-/editor-0.1.0.tgz"
+ "_resolved": "https://registry.npmjs.org/editor/-/editor-1.0.0.tgz"
}
diff --git a/deps/npm/node_modules/fstream/.travis.yml b/deps/npm/node_modules/fstream/.travis.yml
index 2d26206d58cbb7..a092c82b26fc41 100644
--- a/deps/npm/node_modules/fstream/.travis.yml
+++ b/deps/npm/node_modules/fstream/.travis.yml
@@ -1,3 +1,9 @@
language: node_js
node_js:
- - 0.6
+ - iojs
+ - 0.12
+ - 0.10
+ - 0.8
+before_install:
+ - "npm config set spin false"
+ - "npm install -g npm/npm"
diff --git a/deps/npm/node_modules/fstream/LICENSE b/deps/npm/node_modules/fstream/LICENSE
index 0c44ae716db8f3..19129e315fe593 100644
--- a/deps/npm/node_modules/fstream/LICENSE
+++ b/deps/npm/node_modules/fstream/LICENSE
@@ -1,27 +1,15 @@
-Copyright (c) Isaac Z. Schlueter ("Author")
-All rights reserved.
+The ISC License
-The BSD License
+Copyright (c) Isaac Z. Schlueter and Contributors
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
-BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
-IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/fstream/examples/filter-pipe.js b/deps/npm/node_modules/fstream/examples/filter-pipe.js
index 983649bb9d5814..fc59cfc7abae44 100644
--- a/deps/npm/node_modules/fstream/examples/filter-pipe.js
+++ b/deps/npm/node_modules/fstream/examples/filter-pipe.js
@@ -1,77 +1,78 @@
-var fstream = require("../fstream.js")
-var path = require("path")
-
-var r = fstream.Reader({ path: path.dirname(__dirname)
- , filter: function () {
- return !this.basename.match(/^\./) &&
- !this.basename.match(/^node_modules$/) &&
- !this.basename.match(/^deep-copy$/) &&
- !this.basename.match(/^filter-copy$/)
- }
- })
+var fstream = require('../fstream.js')
+var path = require('path')
+
+var r = fstream.Reader({
+ path: path.dirname(__dirname),
+ filter: function () {
+ return !this.basename.match(/^\./) &&
+ !this.basename.match(/^node_modules$/) &&
+ !this.basename.match(/^deep-copy$/) &&
+ !this.basename.match(/^filter-copy$/)
+ }
+})
// this writer will only write directories
-var w = fstream.Writer({ path: path.resolve(__dirname, "filter-copy")
- , type: "Directory"
- , filter: function () {
- return this.type === "Directory"
- }
- })
+var w = fstream.Writer({
+ path: path.resolve(__dirname, 'filter-copy'),
+ type: 'Directory',
+ filter: function () {
+ return this.type === 'Directory'
+ }
+})
-var indent = ""
-var escape = {}
+var indent = ''
-r.on("entry", appears)
-r.on("ready", function () {
- console.error("ready to begin!", r.path)
+r.on('entry', appears)
+r.on('ready', function () {
+ console.error('ready to begin!', r.path)
})
function appears (entry) {
- console.error(indent + "a %s appears!", entry.type, entry.basename, typeof entry.basename)
+ console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename)
if (foggy) {
- console.error("FOGGY!")
+ console.error('FOGGY!')
var p = entry
do {
console.error(p.depth, p.path, p._paused)
- } while (p = p.parent)
+ p = p.parent
+ } while (p)
- throw new Error("\033[mshould not have entries while foggy")
+ throw new Error('\u001b[mshould not have entries while foggy')
}
- indent += "\t"
- entry.on("data", missile(entry))
- entry.on("end", runaway(entry))
- entry.on("entry", appears)
+ indent += '\t'
+ entry.on('data', missile(entry))
+ entry.on('end', runaway(entry))
+ entry.on('entry', appears)
}
var foggy
function missile (entry) {
- if (entry.type === "Directory") {
+ function liftFog (who) {
+ if (!foggy) return
+ if (who) {
+ console.error('%s breaks the spell!', who && who.path)
+ } else {
+ console.error('the spell expires!')
+ }
+ console.error('\u001b[mthe fog lifts!\n')
+ clearTimeout(foggy)
+ foggy = null
+ if (entry._paused) entry.resume()
+ }
+
+ if (entry.type === 'Directory') {
var ended = false
- entry.once("end", function () { ended = true })
+ entry.once('end', function () { ended = true })
return function (c) {
// throw in some pathological pause()/resume() behavior
// just for extra fun.
process.nextTick(function () {
if (!foggy && !ended) { // && Math.random() < 0.3) {
- console.error(indent +"%s casts a spell", entry.basename)
- console.error("\na slowing fog comes over the battlefield...\n\033[32m")
+ console.error(indent + '%s casts a spell', entry.basename)
+ console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
entry.pause()
- entry.once("resume", liftFog)
+ entry.once('resume', liftFog)
foggy = setTimeout(liftFog, 1000)
-
- function liftFog (who) {
- if (!foggy) return
- if (who) {
- console.error("%s breaks the spell!", who && who.path)
- } else {
- console.error("the spell expires!")
- }
- console.error("\033[mthe fog lifts!\n")
- clearTimeout(foggy)
- foggy = null
- if (entry._paused) entry.resume()
- }
-
}
})
}
@@ -79,53 +80,54 @@ function missile (entry) {
return function (c) {
var e = Math.random() < 0.5
- console.error(indent + "%s %s for %d damage!",
+ console.error(indent + '%s %s for %d damage!',
entry.basename,
- e ? "is struck" : "fires a chunk",
+ e ? 'is struck' : 'fires a chunk',
c.length)
}
}
-function runaway (entry) { return function () {
- var e = Math.random() < 0.5
- console.error(indent + "%s %s",
- entry.basename,
- e ? "turns to flee" : "is vanquished!")
- indent = indent.slice(0, -1)
-}}
-
+function runaway (entry) {
+ return function () {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s',
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
+ indent = indent.slice(0, -1)
+ }
+}
-w.on("entry", attacks)
-//w.on("ready", function () { attacks(w) })
+w.on('entry', attacks)
+// w.on('ready', function () { attacks(w) })
function attacks (entry) {
- console.error(indent + "%s %s!", entry.basename,
- entry.type === "Directory" ? "calls for backup" : "attacks")
- entry.on("entry", attacks)
+ console.error(indent + '%s %s!', entry.basename,
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.on('entry', attacks)
}
-ended = false
+var ended = false
var i = 1
-r.on("end", function () {
+r.on('end', function () {
if (foggy) clearTimeout(foggy)
- console.error("\033[mIT'S OVER!!")
- console.error("A WINNAR IS YOU!")
+ console.error("\u001b[mIT'S OVER!!")
+ console.error('A WINNAR IS YOU!')
- console.log("ok " + (i ++) + " A WINNAR IS YOU")
+ console.log('ok ' + (i++) + ' A WINNAR IS YOU')
ended = true
// now go through and verify that everything in there is a dir.
- var p = path.resolve(__dirname, "filter-copy")
+ var p = path.resolve(__dirname, 'filter-copy')
var checker = fstream.Reader({ path: p })
checker.checker = true
- checker.on("child", function (e) {
- var ok = e.type === "Directory"
- console.log((ok ? "" : "not ") + "ok " + (i ++) +
- " should be a dir: " +
+ checker.on('child', function (e) {
+ var ok = e.type === 'Directory'
+ console.log((ok ? '' : 'not ') + 'ok ' + (i++) +
+ ' should be a dir: ' +
e.path.substr(checker.path.length + 1))
})
})
-process.on("exit", function () {
- console.log((ended ? "" : "not ") + "ok " + (i ++) + " ended")
+process.on('exit', function () {
+ console.log((ended ? '' : 'not ') + 'ok ' + (i++) + ' ended')
})
r.pipe(w)
diff --git a/deps/npm/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/fstream/examples/pipe.js
index 0bad122f9bd6cc..c611dd5c47b507 100644
--- a/deps/npm/node_modules/fstream/examples/pipe.js
+++ b/deps/npm/node_modules/fstream/examples/pipe.js
@@ -1,72 +1,73 @@
-var fstream = require("../fstream.js")
-var path = require("path")
-
-var r = fstream.Reader({ path: path.dirname(__dirname)
- , filter: function () {
- return !this.basename.match(/^\./) &&
- !this.basename.match(/^node_modules$/) &&
- !this.basename.match(/^deep-copy$/)
- }
- })
+var fstream = require('../fstream.js')
+var path = require('path')
+
+var r = fstream.Reader({
+ path: path.dirname(__dirname),
+ filter: function () {
+ return !this.basename.match(/^\./) &&
+ !this.basename.match(/^node_modules$/) &&
+ !this.basename.match(/^deep-copy$/)
+ }
+})
-var w = fstream.Writer({ path: path.resolve(__dirname, "deep-copy")
- , type: "Directory"
- })
+var w = fstream.Writer({
+ path: path.resolve(__dirname, 'deep-copy'),
+ type: 'Directory'
+})
-var indent = ""
-var escape = {}
+var indent = ''
-r.on("entry", appears)
-r.on("ready", function () {
- console.error("ready to begin!", r.path)
+r.on('entry', appears)
+r.on('ready', function () {
+ console.error('ready to begin!', r.path)
})
function appears (entry) {
- console.error(indent + "a %s appears!", entry.type, entry.basename, typeof entry.basename, entry)
+ console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry)
if (foggy) {
- console.error("FOGGY!")
+ console.error('FOGGY!')
var p = entry
do {
console.error(p.depth, p.path, p._paused)
- } while (p = p.parent)
+ p = p.parent
+ } while (p)
- throw new Error("\033[mshould not have entries while foggy")
+ throw new Error('\u001b[mshould not have entries while foggy')
}
- indent += "\t"
- entry.on("data", missile(entry))
- entry.on("end", runaway(entry))
- entry.on("entry", appears)
+ indent += '\t'
+ entry.on('data', missile(entry))
+ entry.on('end', runaway(entry))
+ entry.on('entry', appears)
}
var foggy
function missile (entry) {
- if (entry.type === "Directory") {
+ function liftFog (who) {
+ if (!foggy) return
+ if (who) {
+ console.error('%s breaks the spell!', who && who.path)
+ } else {
+ console.error('the spell expires!')
+ }
+ console.error('\u001b[mthe fog lifts!\n')
+ clearTimeout(foggy)
+ foggy = null
+ if (entry._paused) entry.resume()
+ }
+
+ if (entry.type === 'Directory') {
var ended = false
- entry.once("end", function () { ended = true })
+ entry.once('end', function () { ended = true })
return function (c) {
// throw in some pathological pause()/resume() behavior
// just for extra fun.
process.nextTick(function () {
if (!foggy && !ended) { // && Math.random() < 0.3) {
- console.error(indent +"%s casts a spell", entry.basename)
- console.error("\na slowing fog comes over the battlefield...\n\033[32m")
+ console.error(indent + '%s casts a spell', entry.basename)
+ console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
entry.pause()
- entry.once("resume", liftFog)
+ entry.once('resume', liftFog)
foggy = setTimeout(liftFog, 10)
-
- function liftFog (who) {
- if (!foggy) return
- if (who) {
- console.error("%s breaks the spell!", who && who.path)
- } else {
- console.error("the spell expires!")
- }
- console.error("\033[mthe fog lifts!\n")
- clearTimeout(foggy)
- foggy = null
- if (entry._paused) entry.resume()
- }
-
}
})
}
@@ -74,42 +75,43 @@ function missile (entry) {
return function (c) {
var e = Math.random() < 0.5
- console.error(indent + "%s %s for %d damage!",
+ console.error(indent + '%s %s for %d damage!',
entry.basename,
- e ? "is struck" : "fires a chunk",
+ e ? 'is struck' : 'fires a chunk',
c.length)
}
}
-function runaway (entry) { return function () {
- var e = Math.random() < 0.5
- console.error(indent + "%s %s",
- entry.basename,
- e ? "turns to flee" : "is vanquished!")
- indent = indent.slice(0, -1)
-}}
-
+function runaway (entry) {
+ return function () {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s',
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
+ indent = indent.slice(0, -1)
+ }
+}
-w.on("entry", attacks)
-//w.on("ready", function () { attacks(w) })
+w.on('entry', attacks)
+// w.on('ready', function () { attacks(w) })
function attacks (entry) {
- console.error(indent + "%s %s!", entry.basename,
- entry.type === "Directory" ? "calls for backup" : "attacks")
- entry.on("entry", attacks)
+ console.error(indent + '%s %s!', entry.basename,
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.on('entry', attacks)
}
-ended = false
-r.on("end", function () {
+var ended = false
+r.on('end', function () {
if (foggy) clearTimeout(foggy)
- console.error("\033[mIT'S OVER!!")
- console.error("A WINNAR IS YOU!")
+ console.error("\u001b[mIT'S OVER!!")
+ console.error('A WINNAR IS YOU!')
- console.log("ok 1 A WINNAR IS YOU")
+ console.log('ok 1 A WINNAR IS YOU')
ended = true
})
-process.on("exit", function () {
- console.log((ended ? "" : "not ") + "ok 2 ended")
+process.on('exit', function () {
+ console.log((ended ? '' : 'not ') + 'ok 2 ended')
})
r.pipe(w)
diff --git a/deps/npm/node_modules/fstream/examples/reader.js b/deps/npm/node_modules/fstream/examples/reader.js
index 3787ae376ffe1e..19affbe7e6e23d 100644
--- a/deps/npm/node_modules/fstream/examples/reader.js
+++ b/deps/npm/node_modules/fstream/examples/reader.js
@@ -1,68 +1,68 @@
-var fstream = require("../fstream.js")
-var tap = require("tap")
-var fs = require("fs")
-var path = require("path")
+var fstream = require('../fstream.js')
+var tap = require('tap')
+var fs = require('fs')
+var path = require('path')
var dir = path.dirname(__dirname)
-tap.test("reader test", function (t) {
+tap.test('reader test', function (t) {
var children = -1
var gotReady = false
var ended = false
- var r = fstream.Reader({ path: dir
- , filter: function () {
- // return this.parent === r
- return this.parent === r || this === r
- }
- })
+ var r = fstream.Reader({
+ path: dir,
+ filter: function () {
+ // return this.parent === r
+ return this.parent === r || this === r
+ }
+ })
- r.on("ready", function () {
+ r.on('ready', function () {
gotReady = true
children = fs.readdirSync(dir).length
- console.error("Setting expected children to "+children)
- t.equal(r.type, "Directory", "should be a directory")
+ console.error('Setting expected children to ' + children)
+ t.equal(r.type, 'Directory', 'should be a directory')
})
- r.on("entry", function (entry) {
- children --
+ r.on('entry', function (entry) {
+ children--
if (!gotReady) {
- t.fail("children before ready!")
+ t.fail('children before ready!')
}
- t.equal(entry.dirname, r.path, "basename is parent dir")
+ t.equal(entry.dirname, r.path, 'basename is parent dir')
})
- r.on("error", function (er) {
+ r.on('error', function (er) {
t.fail(er)
t.end()
process.exit(1)
})
- r.on("end", function () {
- t.equal(children, 0, "should have seen all children")
+ r.on('end', function () {
+ t.equal(children, 0, 'should have seen all children')
ended = true
})
var closed = false
- r.on("close", function () {
- t.ok(ended, "saw end before close")
- t.notOk(closed, "close should only happen once")
+ r.on('close', function () {
+ t.ok(ended, 'saw end before close')
+ t.notOk(closed, 'close should only happen once')
closed = true
t.end()
})
-
})
-tap.test("reader error test", function (t) {
+tap.test('reader error test', function (t) {
// assumes non-root on a *nix system
var r = fstream.Reader({ path: '/etc/shadow' })
- r.once("error", function (er) {
- t.ok(true);
+ r.once('error', function (er) {
+ t.ok(true)
t.end()
})
- r.on("end", function () {
- t.fail("reader ended without error");
+ r.on('end', function () {
+ t.fail('reader ended without error')
t.end()
})
})
diff --git a/deps/npm/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/fstream/examples/symlink-write.js
index d7816d24dddec5..f6f51099bc25cb 100644
--- a/deps/npm/node_modules/fstream/examples/symlink-write.js
+++ b/deps/npm/node_modules/fstream/examples/symlink-write.js
@@ -1,24 +1,25 @@
-var fstream = require("../fstream.js")
- , closed = false
+var fstream = require('../fstream.js')
+var notOpen = false
fstream
- .Writer({ path: "path/to/symlink"
- , linkpath: "./file"
- , isSymbolicLink: true
- , mode: "0755" // octal strings supported
- })
- .on("close", function () {
- closed = true
- var fs = require("fs")
- var s = fs.lstatSync("path/to/symlink")
+ .Writer({
+ path: 'path/to/symlink',
+ linkpath: './file',
+ isSymbolicLink: true,
+ mode: '0755' // octal strings supported
+ })
+ .on('close', function () {
+ notOpen = true
+ var fs = require('fs')
+ var s = fs.lstatSync('path/to/symlink')
var isSym = s.isSymbolicLink()
- console.log((isSym?"":"not ") +"ok 1 should be symlink")
- var t = fs.readlinkSync("path/to/symlink")
- var isTarget = t === "./file"
- console.log((isTarget?"":"not ") +"ok 2 should link to ./file")
+ console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink')
+ var t = fs.readlinkSync('path/to/symlink')
+ var isTarget = t === './file'
+ console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file')
})
.end()
-process.on("exit", function () {
- console.log((closed?"":"not ")+"ok 3 should be closed")
+process.on('exit', function () {
+ console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed')
})
diff --git a/deps/npm/node_modules/fstream/fstream.js b/deps/npm/node_modules/fstream/fstream.js
index c66d26f519bbb1..c0eb3bea788036 100644
--- a/deps/npm/node_modules/fstream/fstream.js
+++ b/deps/npm/node_modules/fstream/fstream.js
@@ -1,22 +1,26 @@
-exports.Abstract = require("./lib/abstract.js")
-exports.Reader = require("./lib/reader.js")
-exports.Writer = require("./lib/writer.js")
+exports.Abstract = require('./lib/abstract.js')
+exports.Reader = require('./lib/reader.js')
+exports.Writer = require('./lib/writer.js')
-exports.File =
- { Reader: require("./lib/file-reader.js")
- , Writer: require("./lib/file-writer.js") }
+exports.File = {
+ Reader: require('./lib/file-reader.js'),
+ Writer: require('./lib/file-writer.js')
+}
-exports.Dir =
- { Reader : require("./lib/dir-reader.js")
- , Writer : require("./lib/dir-writer.js") }
+exports.Dir = {
+ Reader: require('./lib/dir-reader.js'),
+ Writer: require('./lib/dir-writer.js')
+}
-exports.Link =
- { Reader : require("./lib/link-reader.js")
- , Writer : require("./lib/link-writer.js") }
+exports.Link = {
+ Reader: require('./lib/link-reader.js'),
+ Writer: require('./lib/link-writer.js')
+}
-exports.Proxy =
- { Reader : require("./lib/proxy-reader.js")
- , Writer : require("./lib/proxy-writer.js") }
+exports.Proxy = {
+ Reader: require('./lib/proxy-reader.js'),
+ Writer: require('./lib/proxy-writer.js')
+}
exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
exports.Reader.File = exports.FileReader = exports.File.Reader
@@ -28,4 +32,4 @@ exports.Writer.File = exports.FileWriter = exports.File.Writer
exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
-exports.collect = require("./lib/collect.js")
+exports.collect = require('./lib/collect.js')
diff --git a/deps/npm/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/fstream/lib/abstract.js
index 11ef0e28fb3e2f..94af1ae08656dd 100644
--- a/deps/npm/node_modules/fstream/lib/abstract.js
+++ b/deps/npm/node_modules/fstream/lib/abstract.js
@@ -2,8 +2,8 @@
module.exports = Abstract
-var Stream = require("stream").Stream
- , inherits = require("inherits")
+var Stream = require('stream').Stream
+var inherits = require('inherits')
function Abstract () {
Stream.call(this)
@@ -12,7 +12,7 @@ function Abstract () {
inherits(Abstract, Stream)
Abstract.prototype.on = function (ev, fn) {
- if (ev === "ready" && this.ready) {
+ if (ev === 'ready' && this.ready) {
process.nextTick(fn.bind(this))
} else {
Stream.prototype.on.call(this, ev, fn)
@@ -22,24 +22,24 @@ Abstract.prototype.on = function (ev, fn) {
Abstract.prototype.abort = function () {
this._aborted = true
- this.emit("abort")
+ this.emit('abort')
}
Abstract.prototype.destroy = function () {}
Abstract.prototype.warn = function (msg, code) {
- var me = this
- , er = decorate(msg, code, me)
- if (!me.listeners("warn")) {
- console.error("%s %s\n" +
- "path = %s\n" +
- "syscall = %s\n" +
- "fstream_type = %s\n" +
- "fstream_path = %s\n" +
- "fstream_unc_path = %s\n" +
- "fstream_class = %s\n" +
- "fstream_stack =\n%s\n",
- code || "UNKNOWN",
+ var self = this
+ var er = decorate(msg, code, self)
+ if (!self.listeners('warn')) {
+ console.error('%s %s\n' +
+ 'path = %s\n' +
+ 'syscall = %s\n' +
+ 'fstream_type = %s\n' +
+ 'fstream_path = %s\n' +
+ 'fstream_unc_path = %s\n' +
+ 'fstream_class = %s\n' +
+ 'fstream_stack =\n%s\n',
+ code || 'UNKNOWN',
er.stack,
er.path,
er.syscall,
@@ -47,38 +47,38 @@ Abstract.prototype.warn = function (msg, code) {
er.fstream_path,
er.fstream_unc_path,
er.fstream_class,
- er.fstream_stack.join("\n"))
+ er.fstream_stack.join('\n'))
} else {
- me.emit("warn", er)
+ self.emit('warn', er)
}
}
Abstract.prototype.info = function (msg, code) {
- this.emit("info", msg, code)
+ this.emit('info', msg, code)
}
Abstract.prototype.error = function (msg, code, th) {
var er = decorate(msg, code, this)
if (th) throw er
- else this.emit("error", er)
+ else this.emit('error', er)
}
-function decorate (er, code, me) {
+function decorate (er, code, self) {
if (!(er instanceof Error)) er = new Error(er)
er.code = er.code || code
- er.path = er.path || me.path
- er.fstream_type = er.fstream_type || me.type
- er.fstream_path = er.fstream_path || me.path
- if (me._path !== me.path) {
- er.fstream_unc_path = er.fstream_unc_path || me._path
+ er.path = er.path || self.path
+ er.fstream_type = er.fstream_type || self.type
+ er.fstream_path = er.fstream_path || self.path
+ if (self._path !== self.path) {
+ er.fstream_unc_path = er.fstream_unc_path || self._path
}
- if (me.linkpath) {
- er.fstream_linkpath = er.fstream_linkpath || me.linkpath
+ if (self.linkpath) {
+ er.fstream_linkpath = er.fstream_linkpath || self.linkpath
}
- er.fstream_class = er.fstream_class || me.constructor.name
+ er.fstream_class = er.fstream_class || self.constructor.name
er.fstream_stack = er.fstream_stack ||
new Error().stack.split(/\n/).slice(3).map(function (s) {
- return s.replace(/^ at /, "")
+ return s.replace(/^ {4}at /, '')
})
return er
diff --git a/deps/npm/node_modules/fstream/lib/collect.js b/deps/npm/node_modules/fstream/lib/collect.js
index a36f780eb2c6b3..6245e6ce492d20 100644
--- a/deps/npm/node_modules/fstream/lib/collect.js
+++ b/deps/npm/node_modules/fstream/lib/collect.js
@@ -6,62 +6,63 @@ function collect (stream) {
stream._collected = true
stream.pause()
- stream.on("data", save)
- stream.on("end", save)
+ stream.on('data', save)
+ stream.on('end', save)
var buf = []
function save (b) {
- if (typeof b === "string") b = new Buffer(b)
+ if (typeof b === 'string') b = new Buffer(b)
if (Buffer.isBuffer(b) && !b.length) return
buf.push(b)
}
- stream.on("entry", saveEntry)
+ stream.on('entry', saveEntry)
var entryBuffer = []
function saveEntry (e) {
collect(e)
entryBuffer.push(e)
}
- stream.on("proxy", proxyPause)
+ stream.on('proxy', proxyPause)
function proxyPause (p) {
p.pause()
}
-
// replace the pipe method with a new version that will
// unlock the buffered stuff. if you just call .pipe()
// without a destination, then it'll re-play the events.
- stream.pipe = (function (orig) { return function (dest) {
- // console.error(" === open the pipes", dest && dest.path)
+ stream.pipe = (function (orig) {
+ return function (dest) {
+ // console.error(' === open the pipes', dest && dest.path)
- // let the entries flow through one at a time.
- // Once they're all done, then we can resume completely.
- var e = 0
- ;(function unblockEntry () {
- var entry = entryBuffer[e++]
- // console.error(" ==== unblock entry", entry && entry.path)
- if (!entry) return resume()
- entry.on("end", unblockEntry)
- if (dest) dest.add(entry)
- else stream.emit("entry", entry)
- })()
+ // let the entries flow through one at a time.
+ // Once they're all done, then we can resume completely.
+ var e = 0
+ ;(function unblockEntry () {
+ var entry = entryBuffer[e++]
+ // console.error(" ==== unblock entry", entry && entry.path)
+ if (!entry) return resume()
+ entry.on('end', unblockEntry)
+ if (dest) dest.add(entry)
+ else stream.emit('entry', entry)
+ })()
- function resume () {
- stream.removeListener("entry", saveEntry)
- stream.removeListener("data", save)
- stream.removeListener("end", save)
+ function resume () {
+ stream.removeListener('entry', saveEntry)
+ stream.removeListener('data', save)
+ stream.removeListener('end', save)
- stream.pipe = orig
- if (dest) stream.pipe(dest)
+ stream.pipe = orig
+ if (dest) stream.pipe(dest)
- buf.forEach(function (b) {
- if (b) stream.emit("data", b)
- else stream.emit("end")
- })
+ buf.forEach(function (b) {
+ if (b) stream.emit('data', b)
+ else stream.emit('end')
+ })
- stream.resume()
- }
+ stream.resume()
+ }
- return dest
- }})(stream.pipe)
+ return dest
+ }
+ })(stream.pipe)
}
diff --git a/deps/npm/node_modules/fstream/lib/dir-reader.js b/deps/npm/node_modules/fstream/lib/dir-reader.js
index 346ac2b8317716..820cdc85a8e9c4 100644
--- a/deps/npm/node_modules/fstream/lib/dir-reader.js
+++ b/deps/npm/node_modules/fstream/lib/dir-reader.js
@@ -4,31 +4,29 @@
module.exports = DirReader
-var fs = require("graceful-fs")
- , fstream = require("../fstream.js")
- , Reader = fstream.Reader
- , inherits = require("inherits")
- , mkdir = require("mkdirp")
- , path = require("path")
- , Reader = require("./reader.js")
- , assert = require("assert").ok
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var path = require('path')
+var Reader = require('./reader.js')
+var assert = require('assert').ok
inherits(DirReader, Reader)
function DirReader (props) {
- var me = this
- if (!(me instanceof DirReader)) throw new Error(
- "DirReader must be called as constructor.")
+ var self = this
+ if (!(self instanceof DirReader)) {
+ throw new Error('DirReader must be called as constructor.')
+ }
// should already be established as a Directory type
- if (props.type !== "Directory" || !props.Directory) {
- throw new Error("Non-directory type "+ props.type)
+ if (props.type !== 'Directory' || !props.Directory) {
+ throw new Error('Non-directory type ' + props.type)
}
- me.entries = null
- me._index = -1
- me._paused = false
- me._length = -1
+ self.entries = null
+ self._index = -1
+ self._paused = false
+ self._length = -1
if (props.sort) {
this.sort = props.sort
@@ -38,49 +36,49 @@ function DirReader (props) {
}
DirReader.prototype._getEntries = function () {
- var me = this
+ var self = this
// race condition. might pause() before calling _getEntries,
// and then resume, and try to get them a second time.
- if (me._gotEntries) return
- me._gotEntries = true
+ if (self._gotEntries) return
+ self._gotEntries = true
- fs.readdir(me._path, function (er, entries) {
- if (er) return me.error(er)
+ fs.readdir(self._path, function (er, entries) {
+ if (er) return self.error(er)
- me.entries = entries
+ self.entries = entries
- me.emit("entries", entries)
- if (me._paused) me.once("resume", processEntries)
+ self.emit('entries', entries)
+ if (self._paused) self.once('resume', processEntries)
else processEntries()
function processEntries () {
- me._length = me.entries.length
- if (typeof me.sort === "function") {
- me.entries = me.entries.sort(me.sort.bind(me))
+ self._length = self.entries.length
+ if (typeof self.sort === 'function') {
+ self.entries = self.entries.sort(self.sort.bind(self))
}
- me._read()
+ self._read()
}
})
}
// start walking the dir, and emit an "entry" event for each one.
DirReader.prototype._read = function () {
- var me = this
+ var self = this
- if (!me.entries) return me._getEntries()
+ if (!self.entries) return self._getEntries()
- if (me._paused || me._currentEntry || me._aborted) {
- // console.error("DR paused=%j, current=%j, aborted=%j", me._paused, !!me._currentEntry, me._aborted)
+ if (self._paused || self._currentEntry || self._aborted) {
+ // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted)
return
}
- me._index ++
- if (me._index >= me.entries.length) {
- if (!me._ended) {
- me._ended = true
- me.emit("end")
- me.emit("close")
+ self._index++
+ if (self._index >= self.entries.length) {
+ if (!self._ended) {
+ self._ended = true
+ self.emit('end')
+ self.emit('close')
}
return
}
@@ -88,21 +86,21 @@ DirReader.prototype._read = function () {
// ok, handle this one, then.
// save creating a proxy, by stat'ing the thing now.
- var p = path.resolve(me._path, me.entries[me._index])
- assert(p !== me._path)
- assert(me.entries[me._index])
+ var p = path.resolve(self._path, self.entries[self._index])
+ assert(p !== self._path)
+ assert(self.entries[self._index])
// set this to prevent trying to _read() again in the stat time.
- me._currentEntry = p
- fs[ me.props.follow ? "stat" : "lstat" ](p, function (er, stat) {
- if (er) return me.error(er)
+ self._currentEntry = p
+ fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) {
+ if (er) return self.error(er)
- var who = me._proxy || me
+ var who = self._proxy || self
stat.path = p
stat.basename = path.basename(p)
stat.dirname = path.dirname(p)
- var childProps = me.getChildProps.call(who, stat)
+ var childProps = self.getChildProps.call(who, stat)
childProps.path = p
childProps.basename = path.basename(p)
childProps.dirname = path.dirname(p)
@@ -111,141 +109,144 @@ DirReader.prototype._read = function () {
// console.error("DR Entry", p, stat.size)
- me._currentEntry = entry
+ self._currentEntry = entry
// "entry" events are for direct entries in a specific dir.
// "child" events are for any and all children at all levels.
// This nomenclature is not completely final.
- entry.on("pause", function (who) {
- if (!me._paused && !entry._disowned) {
- me.pause(who)
+ entry.on('pause', function (who) {
+ if (!self._paused && !entry._disowned) {
+ self.pause(who)
}
})
- entry.on("resume", function (who) {
- if (me._paused && !entry._disowned) {
- me.resume(who)
+ entry.on('resume', function (who) {
+ if (self._paused && !entry._disowned) {
+ self.resume(who)
}
})
- entry.on("stat", function (props) {
- me.emit("_entryStat", entry, props)
+ entry.on('stat', function (props) {
+ self.emit('_entryStat', entry, props)
if (entry._aborted) return
- if (entry._paused) entry.once("resume", function () {
- me.emit("entryStat", entry, props)
- })
- else me.emit("entryStat", entry, props)
+ if (entry._paused) {
+ entry.once('resume', function () {
+ self.emit('entryStat', entry, props)
+ })
+ } else self.emit('entryStat', entry, props)
})
- entry.on("ready", function EMITCHILD () {
+ entry.on('ready', function EMITCHILD () {
// console.error("DR emit child", entry._path)
- if (me._paused) {
+ if (self._paused) {
// console.error(" DR emit child - try again later")
// pause the child, and emit the "entry" event once we drain.
// console.error("DR pausing child entry")
- entry.pause(me)
- return me.once("resume", EMITCHILD)
+ entry.pause(self)
+ return self.once('resume', EMITCHILD)
}
// skip over sockets. they can't be piped around properly,
// so there's really no sense even acknowledging them.
// if someone really wants to see them, they can listen to
// the "socket" events.
- if (entry.type === "Socket") {
- me.emit("socket", entry)
+ if (entry.type === 'Socket') {
+ self.emit('socket', entry)
} else {
- me.emitEntry(entry)
+ self.emitEntry(entry)
}
})
var ended = false
- entry.on("close", onend)
- entry.on("disown", onend)
+ entry.on('close', onend)
+ entry.on('disown', onend)
function onend () {
if (ended) return
ended = true
- me.emit("childEnd", entry)
- me.emit("entryEnd", entry)
- me._currentEntry = null
- if (!me._paused) {
- me._read()
+ self.emit('childEnd', entry)
+ self.emit('entryEnd', entry)
+ self._currentEntry = null
+ if (!self._paused) {
+ self._read()
}
}
// XXX Remove this. Works in node as of 0.6.2 or so.
// Long filenames should not break stuff.
- entry.on("error", function (er) {
+ entry.on('error', function (er) {
if (entry._swallowErrors) {
- me.warn(er)
- entry.emit("end")
- entry.emit("close")
+ self.warn(er)
+ entry.emit('end')
+ entry.emit('close')
} else {
- me.emit("error", er)
+ self.emit('error', er)
}
})
// proxy up some events.
- ; [ "child"
- , "childEnd"
- , "warn"
- ].forEach(function (ev) {
- entry.on(ev, me.emit.bind(me, ev))
- })
+ ;[
+ 'child',
+ 'childEnd',
+ 'warn'
+ ].forEach(function (ev) {
+ entry.on(ev, self.emit.bind(self, ev))
+ })
})
}
DirReader.prototype.disown = function (entry) {
- entry.emit("beforeDisown")
+ entry.emit('beforeDisown')
entry._disowned = true
entry.parent = entry.root = null
if (entry === this._currentEntry) {
this._currentEntry = null
}
- entry.emit("disown")
+ entry.emit('disown')
}
-DirReader.prototype.getChildProps = function (stat) {
- return { depth: this.depth + 1
- , root: this.root || this
- , parent: this
- , follow: this.follow
- , filter: this.filter
- , sort: this.props.sort
- , hardlinks: this.props.hardlinks
- }
+DirReader.prototype.getChildProps = function () {
+ return {
+ depth: this.depth + 1,
+ root: this.root || this,
+ parent: this,
+ follow: this.follow,
+ filter: this.filter,
+ sort: this.props.sort,
+ hardlinks: this.props.hardlinks
+ }
}
DirReader.prototype.pause = function (who) {
- var me = this
- if (me._paused) return
- who = who || me
- me._paused = true
- if (me._currentEntry && me._currentEntry.pause) {
- me._currentEntry.pause(who)
+ var self = this
+ if (self._paused) return
+ who = who || self
+ self._paused = true
+ if (self._currentEntry && self._currentEntry.pause) {
+ self._currentEntry.pause(who)
}
- me.emit("pause", who)
+ self.emit('pause', who)
}
DirReader.prototype.resume = function (who) {
- var me = this
- if (!me._paused) return
- who = who || me
-
- me._paused = false
- // console.error("DR Emit Resume", me._path)
- me.emit("resume", who)
- if (me._paused) {
- // console.error("DR Re-paused", me._path)
+ var self = this
+ if (!self._paused) return
+ who = who || self
+
+ self._paused = false
+ // console.error('DR Emit Resume', self._path)
+ self.emit('resume', who)
+ if (self._paused) {
+ // console.error('DR Re-paused', self._path)
return
}
- if (me._currentEntry) {
- if (me._currentEntry.resume) me._currentEntry.resume(who)
- } else me._read()
+ if (self._currentEntry) {
+ if (self._currentEntry.resume) self._currentEntry.resume(who)
+ } else self._read()
}
DirReader.prototype.emitEntry = function (entry) {
- this.emit("entry", entry)
- this.emit("child", entry)
+ this.emit('entry', entry)
+ this.emit('child', entry)
}
diff --git a/deps/npm/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/fstream/lib/dir-writer.js
index 7073b883ea282e..aed9e4db146d77 100644
--- a/deps/npm/node_modules/fstream/lib/dir-writer.js
+++ b/deps/npm/node_modules/fstream/lib/dir-writer.js
@@ -6,38 +6,37 @@
module.exports = DirWriter
-var fs = require("graceful-fs")
- , fstream = require("../fstream.js")
- , Writer = require("./writer.js")
- , inherits = require("inherits")
- , mkdir = require("mkdirp")
- , path = require("path")
- , collect = require("./collect.js")
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var mkdir = require('mkdirp')
+var path = require('path')
+var collect = require('./collect.js')
inherits(DirWriter, Writer)
function DirWriter (props) {
- var me = this
- if (!(me instanceof DirWriter)) me.error(
- "DirWriter must be called as constructor.", null, true)
+ var self = this
+ if (!(self instanceof DirWriter)) {
+ self.error('DirWriter must be called as constructor.', null, true)
+ }
// should already be established as a Directory type
- if (props.type !== "Directory" || !props.Directory) {
- me.error("Non-directory type "+ props.type + " " +
- JSON.stringify(props), null, true)
+ if (props.type !== 'Directory' || !props.Directory) {
+ self.error('Non-directory type ' + props.type + ' ' +
+ JSON.stringify(props), null, true)
}
Writer.call(this, props)
}
DirWriter.prototype._create = function () {
- var me = this
- mkdir(me._path, Writer.dirmode, function (er) {
- if (er) return me.error(er)
+ var self = this
+ mkdir(self._path, Writer.dirmode, function (er) {
+ if (er) return self.error(er)
// ready to start getting entries!
- me.ready = true
- me.emit("ready")
- me._process()
+ self.ready = true
+ self.emit('ready')
+ self._process()
})
}
@@ -55,78 +54,82 @@ DirWriter.prototype.end = function () {
}
DirWriter.prototype.add = function (entry) {
- var me = this
+ var self = this
- // console.error("\tadd", entry._path, "->", me._path)
+ // console.error('\tadd', entry._path, '->', self._path)
collect(entry)
- if (!me.ready || me._currentEntry) {
- me._buffer.push(entry)
+ if (!self.ready || self._currentEntry) {
+ self._buffer.push(entry)
return false
}
// create a new writer, and pipe the incoming entry into it.
- if (me._ended) {
- return me.error("add after end")
+ if (self._ended) {
+ return self.error('add after end')
}
- me._buffer.push(entry)
- me._process()
+ self._buffer.push(entry)
+ self._process()
- return 0 === this._buffer.length
+ return this._buffer.length === 0
}
DirWriter.prototype._process = function () {
- var me = this
+ var self = this
- // console.error("DW Process p=%j", me._processing, me.basename)
+ // console.error('DW Process p=%j', self._processing, self.basename)
- if (me._processing) return
+ if (self._processing) return
- var entry = me._buffer.shift()
+ var entry = self._buffer.shift()
if (!entry) {
// console.error("DW Drain")
- me.emit("drain")
- if (me._ended) me._finish()
+ self.emit('drain')
+ if (self._ended) self._finish()
return
}
- me._processing = true
+ self._processing = true
// console.error("DW Entry", entry._path)
- me.emit("entry", entry)
+ self.emit('entry', entry)
// ok, add this entry
//
// don't allow recursive copying
var p = entry
+ var pp
do {
- var pp = p._path || p.path
- if (pp === me.root._path || pp === me._path ||
- (pp && pp.indexOf(me._path) === 0)) {
- // console.error("DW Exit (recursive)", entry.basename, me._path)
- me._processing = false
+ pp = p._path || p.path
+ if (pp === self.root._path || pp === self._path ||
+ (pp && pp.indexOf(self._path) === 0)) {
+ // console.error('DW Exit (recursive)', entry.basename, self._path)
+ self._processing = false
if (entry._collected) entry.pipe()
- return me._process()
+ return self._process()
}
- } while (p = p.parent)
+ p = p.parent
+ } while (p)
// console.error("DW not recursive")
// chop off the entry's root dir, replace with ours
- var props = { parent: me
- , root: me.root || me
- , type: entry.type
- , depth: me.depth + 1 }
+ var props = {
+ parent: self,
+ root: self.root || self,
+ type: entry.type,
+ depth: self.depth + 1
+ }
- var p = entry._path || entry.path || entry.props.path
+ pp = entry._path || entry.path || entry.props.path
if (entry.parent) {
- p = p.substr(entry.parent._path.length + 1)
+ pp = pp.substr(entry.parent._path.length + 1)
}
// get rid of any ../../ shenanigans
- props.path = path.join(me.path, path.join("/", p))
+ props.path = path.join(self.path, path.join('/', pp))
// if i have a filter, the child should inherit it.
- props.filter = me.filter
+ props.filter = self.filter
// all the rest of the stuff, copy over from the source.
Object.keys(entry.props).forEach(function (k) {
@@ -136,8 +139,8 @@ DirWriter.prototype._process = function () {
})
// not sure at this point what kind of writer this is.
- var child = me._currentChild = new Writer(props)
- child.on("ready", function () {
+ var child = self._currentChild = new Writer(props)
+ child.on('ready', function () {
// console.error("DW Child Ready", child.type, child._path)
// console.error(" resuming", entry._path)
entry.pipe(child)
@@ -146,26 +149,26 @@ DirWriter.prototype._process = function () {
// XXX Make this work in node.
// Long filenames should not break stuff.
- child.on("error", function (er) {
+ child.on('error', function (er) {
if (child._swallowErrors) {
- me.warn(er)
- child.emit("end")
- child.emit("close")
+ self.warn(er)
+ child.emit('end')
+ child.emit('close')
} else {
- me.emit("error", er)
+ self.emit('error', er)
}
})
// we fire _end internally *after* end, so that we don't move on
// until any "end" listeners have had their chance to do stuff.
- child.on("close", onend)
+ child.on('close', onend)
var ended = false
function onend () {
if (ended) return
ended = true
// console.error("* DW Child end", child.basename)
- me._currentChild = null
- me._processing = false
- me._process()
+ self._currentChild = null
+ self._processing = false
+ self._process()
}
}
diff --git a/deps/npm/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/fstream/lib/file-reader.js
index 4720cd86a4aa38..0757b286b50bed 100644
--- a/deps/npm/node_modules/fstream/lib/file-reader.js
+++ b/deps/npm/node_modules/fstream/lib/file-reader.js
@@ -2,150 +2,149 @@
module.exports = FileReader
-var fs = require("graceful-fs")
- , fstream = require("../fstream.js")
- , Reader = fstream.Reader
- , inherits = require("inherits")
- , mkdir = require("mkdirp")
- , Reader = require("./reader.js")
- , EOF = {EOF: true}
- , CLOSE = {CLOSE: true}
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var Reader = require('./reader.js')
+var EOF = {EOF: true}
+var CLOSE = {CLOSE: true}
inherits(FileReader, Reader)
function FileReader (props) {
// console.error(" FR create", props.path, props.size, new Error().stack)
- var me = this
- if (!(me instanceof FileReader)) throw new Error(
- "FileReader must be called as constructor.")
+ var self = this
+ if (!(self instanceof FileReader)) {
+ throw new Error('FileReader must be called as constructor.')
+ }
// should already be established as a File type
// XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
// with a HardLinkReader class.
- if (!((props.type === "Link" && props.Link) ||
- (props.type === "File" && props.File))) {
- throw new Error("Non-file type "+ props.type)
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'File' && props.File))) {
+ throw new Error('Non-file type ' + props.type)
}
- me._buffer = []
- me._bytesEmitted = 0
- Reader.call(me, props)
+ self._buffer = []
+ self._bytesEmitted = 0
+ Reader.call(self, props)
}
FileReader.prototype._getStream = function () {
- var me = this
- , stream = me._stream = fs.createReadStream(me._path, me.props)
+ var self = this
+ var stream = self._stream = fs.createReadStream(self._path, self.props)
- if (me.props.blksize) {
- stream.bufferSize = me.props.blksize
+ if (self.props.blksize) {
+ stream.bufferSize = self.props.blksize
}
- stream.on("open", me.emit.bind(me, "open"))
+ stream.on('open', self.emit.bind(self, 'open'))
- stream.on("data", function (c) {
- // console.error("\t\t%d %s", c.length, me.basename)
- me._bytesEmitted += c.length
+ stream.on('data', function (c) {
+ // console.error('\t\t%d %s', c.length, self.basename)
+ self._bytesEmitted += c.length
// no point saving empty chunks
- if (!c.length) return
- else if (me._paused || me._buffer.length) {
- me._buffer.push(c)
- me._read()
- } else me.emit("data", c)
+ if (!c.length) {
+ return
+ } else if (self._paused || self._buffer.length) {
+ self._buffer.push(c)
+ self._read()
+ } else self.emit('data', c)
})
- stream.on("end", function () {
- if (me._paused || me._buffer.length) {
- // console.error("FR Buffering End", me._path)
- me._buffer.push(EOF)
- me._read()
+ stream.on('end', function () {
+ if (self._paused || self._buffer.length) {
+ // console.error('FR Buffering End', self._path)
+ self._buffer.push(EOF)
+ self._read()
} else {
- me.emit("end")
+ self.emit('end')
}
- if (me._bytesEmitted !== me.props.size) {
- me.error("Didn't get expected byte count\n"+
- "expect: "+me.props.size + "\n" +
- "actual: "+me._bytesEmitted)
+ if (self._bytesEmitted !== self.props.size) {
+ self.error("Didn't get expected byte count\n" +
+ 'expect: ' + self.props.size + '\n' +
+ 'actual: ' + self._bytesEmitted)
}
})
- stream.on("close", function () {
- if (me._paused || me._buffer.length) {
- // console.error("FR Buffering Close", me._path)
- me._buffer.push(CLOSE)
- me._read()
+ stream.on('close', function () {
+ if (self._paused || self._buffer.length) {
+ // console.error('FR Buffering Close', self._path)
+ self._buffer.push(CLOSE)
+ self._read()
} else {
- // console.error("FR close 1", me._path)
- me.emit("close")
+ // console.error('FR close 1', self._path)
+ self.emit('close')
}
})
- stream.on("error", function (e) {
- me.emit("error", e);
- });
+ stream.on('error', function (e) {
+ self.emit('error', e)
+ })
- me._read()
+ self._read()
}
FileReader.prototype._read = function () {
- var me = this
- // console.error("FR _read", me._path)
- if (me._paused) {
- // console.error("FR _read paused", me._path)
+ var self = this
+ // console.error('FR _read', self._path)
+ if (self._paused) {
+ // console.error('FR _read paused', self._path)
return
}
- if (!me._stream) {
- // console.error("FR _getStream calling", me._path)
- return me._getStream()
+ if (!self._stream) {
+ // console.error('FR _getStream calling', self._path)
+ return self._getStream()
}
// clear out the buffer, if there is one.
- if (me._buffer.length) {
- // console.error("FR _read has buffer", me._buffer.length, me._path)
- var buf = me._buffer
- for (var i = 0, l = buf.length; i < l; i ++) {
+ if (self._buffer.length) {
+ // console.error('FR _read has buffer', self._buffer.length, self._path)
+ var buf = self._buffer
+ for (var i = 0, l = buf.length; i < l; i++) {
var c = buf[i]
if (c === EOF) {
- // console.error("FR Read emitting buffered end", me._path)
- me.emit("end")
+ // console.error('FR Read emitting buffered end', self._path)
+ self.emit('end')
} else if (c === CLOSE) {
- // console.error("FR Read emitting buffered close", me._path)
- me.emit("close")
+ // console.error('FR Read emitting buffered close', self._path)
+ self.emit('close')
} else {
- // console.error("FR Read emitting buffered data", me._path)
- me.emit("data", c)
+ // console.error('FR Read emitting buffered data', self._path)
+ self.emit('data', c)
}
- if (me._paused) {
- // console.error("FR Read Re-pausing at "+i, me._path)
- me._buffer = buf.slice(i)
+ if (self._paused) {
+ // console.error('FR Read Re-pausing at '+i, self._path)
+ self._buffer = buf.slice(i)
return
}
}
- me._buffer.length = 0
+ self._buffer.length = 0
}
// console.error("FR _read done")
// that's about all there is to it.
}
FileReader.prototype.pause = function (who) {
- var me = this
- // console.error("FR Pause", me._path)
- if (me._paused) return
- who = who || me
- me._paused = true
- if (me._stream) me._stream.pause()
- me.emit("pause", who)
+ var self = this
+ // console.error('FR Pause', self._path)
+ if (self._paused) return
+ who = who || self
+ self._paused = true
+ if (self._stream) self._stream.pause()
+ self.emit('pause', who)
}
FileReader.prototype.resume = function (who) {
- var me = this
- // console.error("FR Resume", me._path)
- if (!me._paused) return
- who = who || me
- me.emit("resume", who)
- me._paused = false
- if (me._stream) me._stream.resume()
- me._read()
+ var self = this
+ // console.error('FR Resume', self._path)
+ if (!self._paused) return
+ who = who || self
+ self.emit('resume', who)
+ self._paused = false
+ if (self._stream) self._stream.resume()
+ self._read()
}
diff --git a/deps/npm/node_modules/fstream/lib/file-writer.js b/deps/npm/node_modules/fstream/lib/file-writer.js
index 5e9902a6316f02..4c803d8d68d2f5 100644
--- a/deps/npm/node_modules/fstream/lib/file-writer.js
+++ b/deps/npm/node_modules/fstream/lib/file-writer.js
@@ -1,104 +1,107 @@
module.exports = FileWriter
-var fs = require("graceful-fs")
- , mkdir = require("mkdirp")
- , Writer = require("./writer.js")
- , inherits = require("inherits")
- , EOF = {}
+var fs = require('graceful-fs')
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var EOF = {}
inherits(FileWriter, Writer)
function FileWriter (props) {
- var me = this
- if (!(me instanceof FileWriter)) throw new Error(
- "FileWriter must be called as constructor.")
+ var self = this
+ if (!(self instanceof FileWriter)) {
+ throw new Error('FileWriter must be called as constructor.')
+ }
// should already be established as a File type
- if (props.type !== "File" || !props.File) {
- throw new Error("Non-file type "+ props.type)
+ if (props.type !== 'File' || !props.File) {
+ throw new Error('Non-file type ' + props.type)
}
- me._buffer = []
- me._bytesWritten = 0
+ self._buffer = []
+ self._bytesWritten = 0
Writer.call(this, props)
}
FileWriter.prototype._create = function () {
- var me = this
- if (me._stream) return
+ var self = this
+ if (self._stream) return
var so = {}
- if (me.props.flags) so.flags = me.props.flags
+ if (self.props.flags) so.flags = self.props.flags
so.mode = Writer.filemode
- if (me._old && me._old.blksize) so.bufferSize = me._old.blksize
+ if (self._old && self._old.blksize) so.bufferSize = self._old.blksize
- me._stream = fs.createWriteStream(me._path, so)
+ self._stream = fs.createWriteStream(self._path, so)
- me._stream.on("open", function (fd) {
- // console.error("FW open", me._buffer, me._path)
- me.ready = true
- me._buffer.forEach(function (c) {
- if (c === EOF) me._stream.end()
- else me._stream.write(c)
+ self._stream.on('open', function () {
+ // console.error("FW open", self._buffer, self._path)
+ self.ready = true
+ self._buffer.forEach(function (c) {
+ if (c === EOF) self._stream.end()
+ else self._stream.write(c)
})
- me.emit("ready")
+ self.emit('ready')
// give this a kick just in case it needs it.
- me.emit("drain")
+ self.emit('drain')
})
- me._stream.on("drain", function () { me.emit("drain") })
+ self._stream.on('error', function (er) { self.emit('error', er) })
+
+ self._stream.on('drain', function () { self.emit('drain') })
- me._stream.on("close", function () {
- // console.error("\n\nFW Stream Close", me._path, me.size)
- me._finish()
+ self._stream.on('close', function () {
+ // console.error('\n\nFW Stream Close', self._path, self.size)
+ self._finish()
})
}
FileWriter.prototype.write = function (c) {
- var me = this
+ var self = this
- me._bytesWritten += c.length
+ self._bytesWritten += c.length
- if (!me.ready) {
- if (!Buffer.isBuffer(c) && typeof c !== 'string')
+ if (!self.ready) {
+ if (!Buffer.isBuffer(c) && typeof c !== 'string') {
throw new Error('invalid write data')
- me._buffer.push(c)
+ }
+ self._buffer.push(c)
return false
}
- var ret = me._stream.write(c)
- // console.error("\t-- fw wrote, _stream says", ret, me._stream._queue.length)
+ var ret = self._stream.write(c)
+ // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length)
// allow 2 buffered writes, because otherwise there's just too
// much stop and go bs.
- if (ret === false && me._stream._queue) {
- return me._stream._queue.length <= 2;
+ if (ret === false && self._stream._queue) {
+ return self._stream._queue.length <= 2
} else {
- return ret;
+ return ret
}
}
FileWriter.prototype.end = function (c) {
- var me = this
+ var self = this
- if (c) me.write(c)
+ if (c) self.write(c)
- if (!me.ready) {
- me._buffer.push(EOF)
+ if (!self.ready) {
+ self._buffer.push(EOF)
return false
}
- return me._stream.end()
+ return self._stream.end()
}
FileWriter.prototype._finish = function () {
- var me = this
- if (typeof me.size === "number" && me._bytesWritten != me.size) {
- me.error(
- "Did not get expected byte count.\n" +
- "expect: " + me.size + "\n" +
- "actual: " + me._bytesWritten)
+ var self = this
+ if (typeof self.size === 'number' && self._bytesWritten !== self.size) {
+ self.error(
+ 'Did not get expected byte count.\n' +
+ 'expect: ' + self.size + '\n' +
+ 'actual: ' + self._bytesWritten)
}
- Writer.prototype._finish.call(me)
+ Writer.prototype._finish.call(self)
}
diff --git a/deps/npm/node_modules/fstream/lib/get-type.js b/deps/npm/node_modules/fstream/lib/get-type.js
index cd65c41d8bcf9e..19f6a657db8475 100644
--- a/deps/npm/node_modules/fstream/lib/get-type.js
+++ b/deps/npm/node_modules/fstream/lib/get-type.js
@@ -1,26 +1,27 @@
module.exports = getType
function getType (st) {
- var types =
- [ "Directory"
- , "File"
- , "SymbolicLink"
- , "Link" // special for hardlinks from tarballs
- , "BlockDevice"
- , "CharacterDevice"
- , "FIFO"
- , "Socket" ]
- , type
+ var types = [
+ 'Directory',
+ 'File',
+ 'SymbolicLink',
+ 'Link', // special for hardlinks from tarballs
+ 'BlockDevice',
+ 'CharacterDevice',
+ 'FIFO',
+ 'Socket'
+ ]
+ var type
- if (st.type && -1 !== types.indexOf(st.type)) {
+ if (st.type && types.indexOf(st.type) !== -1) {
st[st.type] = true
return st.type
}
- for (var i = 0, l = types.length; i < l; i ++) {
+ for (var i = 0, l = types.length; i < l; i++) {
type = types[i]
- var is = st[type] || st["is" + type]
- if (typeof is === "function") is = is.call(st)
+ var is = st[type] || st['is' + type]
+ if (typeof is === 'function') is = is.call(st)
if (is) {
st[type] = true
st.type = type
diff --git a/deps/npm/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/fstream/lib/link-reader.js
index 7e7ab6ce5c0e07..a44dd39d7cdb98 100644
--- a/deps/npm/node_modules/fstream/lib/link-reader.js
+++ b/deps/npm/node_modules/fstream/lib/link-reader.js
@@ -6,25 +6,24 @@
module.exports = LinkReader
-var fs = require("graceful-fs")
- , fstream = require("../fstream.js")
- , inherits = require("inherits")
- , mkdir = require("mkdirp")
- , Reader = require("./reader.js")
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var Reader = require('./reader.js')
inherits(LinkReader, Reader)
function LinkReader (props) {
- var me = this
- if (!(me instanceof LinkReader)) throw new Error(
- "LinkReader must be called as constructor.")
+ var self = this
+ if (!(self instanceof LinkReader)) {
+ throw new Error('LinkReader must be called as constructor.')
+ }
- if (!((props.type === "Link" && props.Link) ||
- (props.type === "SymbolicLink" && props.SymbolicLink))) {
- throw new Error("Non-link type "+ props.type)
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ throw new Error('Non-link type ' + props.type)
}
- Reader.call(me, props)
+ Reader.call(self, props)
}
// When piping a LinkReader into a LinkWriter, we have to
@@ -32,23 +31,23 @@ function LinkReader (props) {
// happen *before* the "ready" event, which means we need to
// override the _stat method.
LinkReader.prototype._stat = function (currentStat) {
- var me = this
- fs.readlink(me._path, function (er, linkpath) {
- if (er) return me.error(er)
- me.linkpath = me.props.linkpath = linkpath
- me.emit("linkpath", linkpath)
- Reader.prototype._stat.call(me, currentStat)
+ var self = this
+ fs.readlink(self._path, function (er, linkpath) {
+ if (er) return self.error(er)
+ self.linkpath = self.props.linkpath = linkpath
+ self.emit('linkpath', linkpath)
+ Reader.prototype._stat.call(self, currentStat)
})
}
LinkReader.prototype._read = function () {
- var me = this
- if (me._paused) return
+ var self = this
+ if (self._paused) return
// basically just a no-op, since we got all the info we need
// from the _stat method
- if (!me._ended) {
- me.emit("end")
- me.emit("close")
- me._ended = true
+ if (!self._ended) {
+ self.emit('end')
+ self.emit('close')
+ self._ended = true
}
}
diff --git a/deps/npm/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/fstream/lib/link-writer.js
index 5c8f1e7012899d..07a9abf7e6f9cd 100644
--- a/deps/npm/node_modules/fstream/lib/link-writer.js
+++ b/deps/npm/node_modules/fstream/lib/link-writer.js
@@ -1,28 +1,28 @@
-
module.exports = LinkWriter
-var fs = require("graceful-fs")
- , Writer = require("./writer.js")
- , inherits = require("inherits")
- , path = require("path")
- , rimraf = require("rimraf")
+var fs = require('graceful-fs')
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var path = require('path')
+var rimraf = require('rimraf')
inherits(LinkWriter, Writer)
function LinkWriter (props) {
- var me = this
- if (!(me instanceof LinkWriter)) throw new Error(
- "LinkWriter must be called as constructor.")
+ var self = this
+ if (!(self instanceof LinkWriter)) {
+ throw new Error('LinkWriter must be called as constructor.')
+ }
// should already be established as a Link type
- if (!((props.type === "Link" && props.Link) ||
- (props.type === "SymbolicLink" && props.SymbolicLink))) {
- throw new Error("Non-link type "+ props.type)
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ throw new Error('Non-link type ' + props.type)
}
- if (props.linkpath === "") props.linkpath = "."
+ if (props.linkpath === '') props.linkpath = '.'
if (!props.linkpath) {
- me.error("Need linkpath property to create " + props.type)
+ self.error('Need linkpath property to create ' + props.type)
}
Writer.call(this, props)
@@ -30,32 +30,32 @@ function LinkWriter (props) {
LinkWriter.prototype._create = function () {
// console.error(" LW _create")
- var me = this
- , hard = me.type === "Link" || process.platform === "win32"
- , link = hard ? "link" : "symlink"
- , lp = hard ? path.resolve(me.dirname, me.linkpath) : me.linkpath
+ var self = this
+ var hard = self.type === 'Link' || process.platform === 'win32'
+ var link = hard ? 'link' : 'symlink'
+ var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath
// can only change the link path by clobbering
// For hard links, let's just assume that's always the case, since
// there's no good way to read them if we don't already know.
- if (hard) return clobber(me, lp, link)
+ if (hard) return clobber(self, lp, link)
- fs.readlink(me._path, function (er, p) {
+ fs.readlink(self._path, function (er, p) {
// only skip creation if it's exactly the same link
- if (p && p === lp) return finish(me)
- clobber(me, lp, link)
+ if (p && p === lp) return finish(self)
+ clobber(self, lp, link)
})
}
-function clobber (me, lp, link) {
- rimraf(me._path, function (er) {
- if (er) return me.error(er)
- create(me, lp, link)
+function clobber (self, lp, link) {
+ rimraf(self._path, function (er) {
+ if (er) return self.error(er)
+ create(self, lp, link)
})
}
-function create (me, lp, link) {
- fs[link](lp, me._path, function (er) {
+function create (self, lp, link) {
+ fs[link](lp, self._path, function (er) {
// if this is a hard link, and we're in the process of writing out a
// directory, it's very possible that the thing we're linking to
// doesn't exist yet (especially if it was intended as a symlink),
@@ -65,24 +65,24 @@ function create (me, lp, link) {
// A better solution would be to have fs.symlink be supported on
// windows in some nice fashion.
if (er) {
- if ((er.code === "ENOENT" ||
- er.code === "EACCES" ||
- er.code === "EPERM" ) && process.platform === "win32") {
- me.ready = true
- me.emit("ready")
- me.emit("end")
- me.emit("close")
- me.end = me._finish = function () {}
- } else return me.error(er)
+ if ((er.code === 'ENOENT' ||
+ er.code === 'EACCES' ||
+ er.code === 'EPERM') && process.platform === 'win32') {
+ self.ready = true
+ self.emit('ready')
+ self.emit('end')
+ self.emit('close')
+ self.end = self._finish = function () {}
+ } else return self.error(er)
}
- finish(me)
+ finish(self)
})
}
-function finish (me) {
- me.ready = true
- me.emit("ready")
- if (me._ended && !me._finished) me._finish()
+function finish (self) {
+ self.ready = true
+ self.emit('ready')
+ if (self._ended && !self._finished) self._finish()
}
LinkWriter.prototype.end = function () {
diff --git a/deps/npm/node_modules/fstream/lib/proxy-reader.js b/deps/npm/node_modules/fstream/lib/proxy-reader.js
index a0ece34a26828d..4f431c9d9e27d3 100644
--- a/deps/npm/node_modules/fstream/lib/proxy-reader.js
+++ b/deps/npm/node_modules/fstream/lib/proxy-reader.js
@@ -3,82 +3,84 @@
module.exports = ProxyReader
-var Reader = require("./reader.js")
- , getType = require("./get-type.js")
- , inherits = require("inherits")
- , fs = require("graceful-fs")
+var Reader = require('./reader.js')
+var getType = require('./get-type.js')
+var inherits = require('inherits')
+var fs = require('graceful-fs')
inherits(ProxyReader, Reader)
function ProxyReader (props) {
- var me = this
- if (!(me instanceof ProxyReader)) throw new Error(
- "ProxyReader must be called as constructor.")
+ var self = this
+ if (!(self instanceof ProxyReader)) {
+ throw new Error('ProxyReader must be called as constructor.')
+ }
- me.props = props
- me._buffer = []
- me.ready = false
+ self.props = props
+ self._buffer = []
+ self.ready = false
- Reader.call(me, props)
+ Reader.call(self, props)
}
ProxyReader.prototype._stat = function () {
- var me = this
- , props = me.props
- // stat the thing to see what the proxy should be.
- , stat = props.follow ? "stat" : "lstat"
+ var self = this
+ var props = self.props
+ // stat the thing to see what the proxy should be.
+ var stat = props.follow ? 'stat' : 'lstat'
fs[stat](props.path, function (er, current) {
var type
if (er || !current) {
- type = "File"
+ type = 'File'
} else {
type = getType(current)
}
props[type] = true
- props.type = me.type = type
+ props.type = self.type = type
- me._old = current
- me._addProxy(Reader(props, current))
+ self._old = current
+ self._addProxy(Reader(props, current))
})
}
ProxyReader.prototype._addProxy = function (proxy) {
- var me = this
- if (me._proxyTarget) {
- return me.error("proxy already set")
+ var self = this
+ if (self._proxyTarget) {
+ return self.error('proxy already set')
}
- me._proxyTarget = proxy
- proxy._proxy = me
-
- ; [ "error"
- , "data"
- , "end"
- , "close"
- , "linkpath"
- , "entry"
- , "entryEnd"
- , "child"
- , "childEnd"
- , "warn"
- , "stat"
- ].forEach(function (ev) {
- // console.error("~~ proxy event", ev, me.path)
- proxy.on(ev, me.emit.bind(me, ev))
- })
-
- me.emit("proxy", proxy)
-
- proxy.on("ready", function () {
- // console.error("~~ proxy is ready!", me.path)
- me.ready = true
- me.emit("ready")
+ self._proxyTarget = proxy
+ proxy._proxy = self
+
+ ;[
+ 'error',
+ 'data',
+ 'end',
+ 'close',
+ 'linkpath',
+ 'entry',
+ 'entryEnd',
+ 'child',
+ 'childEnd',
+ 'warn',
+ 'stat'
+ ].forEach(function (ev) {
+ // console.error('~~ proxy event', ev, self.path)
+ proxy.on(ev, self.emit.bind(self, ev))
+ })
+
+ self.emit('proxy', proxy)
+
+ proxy.on('ready', function () {
+ // console.error("~~ proxy is ready!", self.path)
+ self.ready = true
+ self.emit('ready')
})
- var calls = me._buffer
- me._buffer.length = 0
+ var calls = self._buffer
+ self._buffer.length = 0
calls.forEach(function (c) {
proxy[c[0]].apply(proxy, c[1])
})
diff --git a/deps/npm/node_modules/fstream/lib/proxy-writer.js b/deps/npm/node_modules/fstream/lib/proxy-writer.js
index b0476633a3a922..a6544621bfbe75 100644
--- a/deps/npm/node_modules/fstream/lib/proxy-writer.js
+++ b/deps/npm/node_modules/fstream/lib/proxy-writer.js
@@ -7,74 +7,76 @@
module.exports = ProxyWriter
-var Writer = require("./writer.js")
- , getType = require("./get-type.js")
- , inherits = require("inherits")
- , collect = require("./collect.js")
- , fs = require("fs")
+var Writer = require('./writer.js')
+var getType = require('./get-type.js')
+var inherits = require('inherits')
+var collect = require('./collect.js')
+var fs = require('fs')
inherits(ProxyWriter, Writer)
function ProxyWriter (props) {
- var me = this
- if (!(me instanceof ProxyWriter)) throw new Error(
- "ProxyWriter must be called as constructor.")
+ var self = this
+ if (!(self instanceof ProxyWriter)) {
+ throw new Error('ProxyWriter must be called as constructor.')
+ }
- me.props = props
- me._needDrain = false
+ self.props = props
+ self._needDrain = false
- Writer.call(me, props)
+ Writer.call(self, props)
}
ProxyWriter.prototype._stat = function () {
- var me = this
- , props = me.props
- // stat the thing to see what the proxy should be.
- , stat = props.follow ? "stat" : "lstat"
+ var self = this
+ var props = self.props
+ // stat the thing to see what the proxy should be.
+ var stat = props.follow ? 'stat' : 'lstat'
fs[stat](props.path, function (er, current) {
var type
if (er || !current) {
- type = "File"
+ type = 'File'
} else {
type = getType(current)
}
props[type] = true
- props.type = me.type = type
+ props.type = self.type = type
- me._old = current
- me._addProxy(Writer(props, current))
+ self._old = current
+ self._addProxy(Writer(props, current))
})
}
ProxyWriter.prototype._addProxy = function (proxy) {
// console.error("~~ set proxy", this.path)
- var me = this
- if (me._proxy) {
- return me.error("proxy already set")
+ var self = this
+ if (self._proxy) {
+ return self.error('proxy already set')
}
- me._proxy = proxy
- ; [ "ready"
- , "error"
- , "close"
- , "pipe"
- , "drain"
- , "warn"
- ].forEach(function (ev) {
- proxy.on(ev, me.emit.bind(me, ev))
- })
+ self._proxy = proxy
+ ;[
+ 'ready',
+ 'error',
+ 'close',
+ 'pipe',
+ 'drain',
+ 'warn'
+ ].forEach(function (ev) {
+ proxy.on(ev, self.emit.bind(self, ev))
+ })
- me.emit("proxy", proxy)
+ self.emit('proxy', proxy)
- var calls = me._buffer
+ var calls = self._buffer
calls.forEach(function (c) {
// console.error("~~ ~~ proxy buffered call", c[0], c[1])
proxy[c[0]].apply(proxy, c[1])
})
- me._buffer.length = 0
- if (me._needsDrain) me.emit("drain")
+ self._buffer.length = 0
+ if (self._needsDrain) self.emit('drain')
}
ProxyWriter.prototype.add = function (entry) {
@@ -82,7 +84,7 @@ ProxyWriter.prototype.add = function (entry) {
collect(entry)
if (!this._proxy) {
- this._buffer.push(["add", [entry]])
+ this._buffer.push(['add', [entry]])
this._needDrain = true
return false
}
@@ -90,9 +92,9 @@ ProxyWriter.prototype.add = function (entry) {
}
ProxyWriter.prototype.write = function (c) {
- // console.error("~~ proxy write")
+ // console.error('~~ proxy write')
if (!this._proxy) {
- this._buffer.push(["write", [c]])
+ this._buffer.push(['write', [c]])
this._needDrain = true
return false
}
@@ -100,9 +102,9 @@ ProxyWriter.prototype.write = function (c) {
}
ProxyWriter.prototype.end = function (c) {
- // console.error("~~ proxy end")
+ // console.error('~~ proxy end')
if (!this._proxy) {
- this._buffer.push(["end", [c]])
+ this._buffer.push(['end', [c]])
return false
}
return this._proxy.end(c)
diff --git a/deps/npm/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/fstream/lib/reader.js
index 0edb794d38f907..1d007ee211c1ab 100644
--- a/deps/npm/node_modules/fstream/lib/reader.js
+++ b/deps/npm/node_modules/fstream/lib/reader.js
@@ -1,33 +1,28 @@
-
module.exports = Reader
-var fs = require("graceful-fs")
- , Stream = require("stream").Stream
- , inherits = require("inherits")
- , path = require("path")
- , getType = require("./get-type.js")
- , hardLinks = Reader.hardLinks = {}
- , Abstract = require("./abstract.js")
+var fs = require('graceful-fs')
+var Stream = require('stream').Stream
+var inherits = require('inherits')
+var path = require('path')
+var getType = require('./get-type.js')
+var hardLinks = Reader.hardLinks = {}
+var Abstract = require('./abstract.js')
// Must do this *before* loading the child classes
inherits(Reader, Abstract)
-var DirReader = require("./dir-reader.js")
- , FileReader = require("./file-reader.js")
- , LinkReader = require("./link-reader.js")
- , SocketReader = require("./socket-reader.js")
- , ProxyReader = require("./proxy-reader.js")
+var LinkReader = require('./link-reader.js')
function Reader (props, currentStat) {
- var me = this
- if (!(me instanceof Reader)) return new Reader(props, currentStat)
+ var self = this
+ if (!(self instanceof Reader)) return new Reader(props, currentStat)
- if (typeof props === "string") {
+ if (typeof props === 'string') {
props = { path: props }
}
if (!props.path) {
- me.error("Must provide a path", null, true)
+ self.error('Must provide a path', null, true)
}
// polymorphism.
@@ -36,11 +31,10 @@ function Reader (props, currentStat) {
// to be the *normal* state of affairs, since we rarely know
// the type of a file prior to reading it.
-
var type
- , ClassType
+ var ClassType
- if (props.type && typeof props.type === "function") {
+ if (props.type && typeof props.type === 'function') {
type = props.type
ClassType = type
} else {
@@ -55,11 +49,11 @@ function Reader (props, currentStat) {
}
switch (type) {
- case "Directory":
- ClassType = DirReader
+ case 'Directory':
+ ClassType = require('./dir-reader.js')
break
- case "Link":
+ case 'Link':
// XXX hard links are just files.
// However, it would be good to keep track of files' dev+inode
// and nlink values, and create a HardLinkReader that emits
@@ -68,66 +62,66 @@ function Reader (props, currentStat) {
// ClassType = HardLinkReader
// break
- case "File":
- ClassType = FileReader
+ case 'File':
+ ClassType = require('./file-reader.js')
break
- case "SymbolicLink":
+ case 'SymbolicLink':
ClassType = LinkReader
break
- case "Socket":
- ClassType = SocketReader
+ case 'Socket':
+ ClassType = require('./socket-reader.js')
break
case null:
- ClassType = ProxyReader
+ ClassType = require('./proxy-reader.js')
break
}
- if (!(me instanceof ClassType)) {
+ if (!(self instanceof ClassType)) {
return new ClassType(props)
}
- Abstract.call(me)
+ Abstract.call(self)
- me.readable = true
- me.writable = false
+ self.readable = true
+ self.writable = false
- me.type = type
- me.props = props
- me.depth = props.depth = props.depth || 0
- me.parent = props.parent || null
- me.root = props.root || (props.parent && props.parent.root) || me
+ self.type = type
+ self.props = props
+ self.depth = props.depth = props.depth || 0
+ self.parent = props.parent || null
+ self.root = props.root || (props.parent && props.parent.root) || self
- me._path = me.path = path.resolve(props.path)
- if (process.platform === "win32") {
- me.path = me._path = me.path.replace(/\?/g, "_")
- if (me._path.length >= 260) {
+ self._path = self.path = path.resolve(props.path)
+ if (process.platform === 'win32') {
+ self.path = self._path = self.path.replace(/\?/g, '_')
+ if (self._path.length >= 260) {
// how DOES one create files on the moon?
// if the path has spaces in it, then UNC will fail.
- me._swallowErrors = true
- //if (me._path.indexOf(" ") === -1) {
- me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
- //}
+ self._swallowErrors = true
+ // if (self._path.indexOf(" ") === -1) {
+ self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
+ // }
}
}
- me.basename = props.basename = path.basename(me.path)
- me.dirname = props.dirname = path.dirname(me.path)
+ self.basename = props.basename = path.basename(self.path)
+ self.dirname = props.dirname = path.dirname(self.path)
// these have served their purpose, and are now just noisy clutter
props.parent = props.root = null
// console.error("\n\n\n%s setting size to", props.path, props.size)
- me.size = props.size
- me.filter = typeof props.filter === "function" ? props.filter : null
- if (props.sort === "alpha") props.sort = alphasort
+ self.size = props.size
+ self.filter = typeof props.filter === 'function' ? props.filter : null
+ if (props.sort === 'alpha') props.sort = alphasort
// start the ball rolling.
- // this will stat the thing, and then call me._read()
+ // this will stat the thing, and then call self._read()
// to start reading whatever it is.
// console.error("calling stat", props.path, currentStat)
- me._stat(currentStat)
+ self._stat(currentStat)
}
function alphasort (a, b) {
@@ -139,100 +133,100 @@ function alphasort (a, b) {
}
Reader.prototype._stat = function (currentStat) {
- var me = this
- , props = me.props
- , stat = props.follow ? "stat" : "lstat"
- // console.error("Reader._stat", me._path, currentStat)
+ var self = this
+ var props = self.props
+ var stat = props.follow ? 'stat' : 'lstat'
+ // console.error("Reader._stat", self._path, currentStat)
if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
- else fs[stat](me._path, statCb)
-
+ else fs[stat](self._path, statCb)
function statCb (er, props_) {
- // console.error("Reader._stat, statCb", me._path, props_, props_.nlink)
- if (er) return me.error(er)
+ // console.error("Reader._stat, statCb", self._path, props_, props_.nlink)
+ if (er) return self.error(er)
Object.keys(props_).forEach(function (k) {
props[k] = props_[k]
})
// if it's not the expected size, then abort here.
- if (undefined !== me.size && props.size !== me.size) {
- return me.error("incorrect size")
+ if (undefined !== self.size && props.size !== self.size) {
+ return self.error('incorrect size')
}
- me.size = props.size
+ self.size = props.size
var type = getType(props)
var handleHardlinks = props.hardlinks !== false
-
+
// special little thing for handling hardlinks.
- if (handleHardlinks && type !== "Directory" && props.nlink && props.nlink > 1) {
- var k = props.dev + ":" + props.ino
- // console.error("Reader has nlink", me._path, k)
- if (hardLinks[k] === me._path || !hardLinks[k]) hardLinks[k] = me._path
- else {
+ if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) {
+ var k = props.dev + ':' + props.ino
+ // console.error("Reader has nlink", self._path, k)
+ if (hardLinks[k] === self._path || !hardLinks[k]) {
+ hardLinks[k] = self._path
+ } else {
// switch into hardlink mode.
- type = me.type = me.props.type = "Link"
- me.Link = me.props.Link = true
- me.linkpath = me.props.linkpath = hardLinks[k]
- // console.error("Hardlink detected, switching mode", me._path, me.linkpath)
+ type = self.type = self.props.type = 'Link'
+ self.Link = self.props.Link = true
+ self.linkpath = self.props.linkpath = hardLinks[k]
+ // console.error("Hardlink detected, switching mode", self._path, self.linkpath)
// Setting __proto__ would arguably be the "correct"
// approach here, but that just seems too wrong.
- me._stat = me._read = LinkReader.prototype._read
+ self._stat = self._read = LinkReader.prototype._read
}
}
- if (me.type && me.type !== type) {
- me.error("Unexpected type: " + type)
+ if (self.type && self.type !== type) {
+ self.error('Unexpected type: ' + type)
}
// if the filter doesn't pass, then just skip over this one.
// still have to emit end so that dir-walking can move on.
- if (me.filter) {
- var who = me._proxy || me
+ if (self.filter) {
+ var who = self._proxy || self
// special handling for ProxyReaders
- if (!me.filter.call(who, who, props)) {
- if (!me._disowned) {
- me.abort()
- me.emit("end")
- me.emit("close")
+ if (!self.filter.call(who, who, props)) {
+ if (!self._disowned) {
+ self.abort()
+ self.emit('end')
+ self.emit('close')
}
return
}
}
// last chance to abort or disown before the flow starts!
- var events = ["_stat", "stat", "ready"]
+ var events = ['_stat', 'stat', 'ready']
var e = 0
;(function go () {
- if (me._aborted) {
- me.emit("end")
- me.emit("close")
+ if (self._aborted) {
+ self.emit('end')
+ self.emit('close')
return
}
- if (me._paused && me.type !== "Directory") {
- me.once("resume", go)
+ if (self._paused && self.type !== 'Directory') {
+ self.once('resume', go)
return
}
- var ev = events[e ++]
+ var ev = events[e++]
if (!ev) {
- return me._read()
+ return self._read()
}
- me.emit(ev, props)
+ self.emit(ev, props)
go()
})()
}
}
-Reader.prototype.pipe = function (dest, opts) {
- var me = this
- if (typeof dest.add === "function") {
+Reader.prototype.pipe = function (dest) {
+ var self = this
+ if (typeof dest.add === 'function') {
// piping to a multi-compatible, and we've got directory entries.
- me.on("entry", function (entry) {
+ self.on('entry', function (entry) {
var ret = dest.add(entry)
- if (false === ret) {
- me.pause()
+ if (ret === false) {
+ self.pause()
}
})
}
@@ -244,19 +238,18 @@ Reader.prototype.pipe = function (dest, opts) {
Reader.prototype.pause = function (who) {
this._paused = true
who = who || this
- this.emit("pause", who)
+ this.emit('pause', who)
if (this._stream) this._stream.pause(who)
}
Reader.prototype.resume = function (who) {
this._paused = false
who = who || this
- this.emit("resume", who)
+ this.emit('resume', who)
if (this._stream) this._stream.resume(who)
this._read()
}
Reader.prototype._read = function () {
- this.error("Cannot read unknown type: "+this.type)
+ this.error('Cannot read unknown type: ' + this.type)
}
-
diff --git a/deps/npm/node_modules/fstream/lib/socket-reader.js b/deps/npm/node_modules/fstream/lib/socket-reader.js
index e89c1731aa9c3d..e0456ba890ede8 100644
--- a/deps/npm/node_modules/fstream/lib/socket-reader.js
+++ b/deps/npm/node_modules/fstream/lib/socket-reader.js
@@ -5,34 +5,32 @@
module.exports = SocketReader
-var fs = require("graceful-fs")
- , fstream = require("../fstream.js")
- , inherits = require("inherits")
- , mkdir = require("mkdirp")
- , Reader = require("./reader.js")
+var inherits = require('inherits')
+var Reader = require('./reader.js')
inherits(SocketReader, Reader)
function SocketReader (props) {
- var me = this
- if (!(me instanceof SocketReader)) throw new Error(
- "SocketReader must be called as constructor.")
+ var self = this
+ if (!(self instanceof SocketReader)) {
+ throw new Error('SocketReader must be called as constructor.')
+ }
- if (!(props.type === "Socket" && props.Socket)) {
- throw new Error("Non-socket type "+ props.type)
+ if (!(props.type === 'Socket' && props.Socket)) {
+ throw new Error('Non-socket type ' + props.type)
}
- Reader.call(me, props)
+ Reader.call(self, props)
}
SocketReader.prototype._read = function () {
- var me = this
- if (me._paused) return
+ var self = this
+ if (self._paused) return
// basically just a no-op, since we got all the info we have
// from the _stat method
- if (!me._ended) {
- me.emit("end")
- me.emit("close")
- me._ended = true
+ if (!self._ended) {
+ self.emit('end')
+ self.emit('close')
+ self._ended = true
}
}
diff --git a/deps/npm/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/fstream/lib/writer.js
index 0700813b6797fc..25a608def22eff 100644
--- a/deps/npm/node_modules/fstream/lib/writer.js
+++ b/deps/npm/node_modules/fstream/lib/writer.js
@@ -1,233 +1,232 @@
-
module.exports = Writer
-var fs = require("graceful-fs")
- , inherits = require("inherits")
- , rimraf = require("rimraf")
- , mkdir = require("mkdirp")
- , path = require("path")
- , umask = process.platform === "win32" ? 0 : process.umask()
- , getType = require("./get-type.js")
- , Abstract = require("./abstract.js")
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var rimraf = require('rimraf')
+var mkdir = require('mkdirp')
+var path = require('path')
+var umask = process.platform === 'win32' ? 0 : process.umask()
+var getType = require('./get-type.js')
+var Abstract = require('./abstract.js')
// Must do this *before* loading the child classes
inherits(Writer, Abstract)
-Writer.dirmode = 0777 & (~umask)
-Writer.filemode = 0666 & (~umask)
+Writer.dirmode = parseInt('0777', 8) & (~umask)
+Writer.filemode = parseInt('0666', 8) & (~umask)
-var DirWriter = require("./dir-writer.js")
- , LinkWriter = require("./link-writer.js")
- , FileWriter = require("./file-writer.js")
- , ProxyWriter = require("./proxy-writer.js")
+var DirWriter = require('./dir-writer.js')
+var LinkWriter = require('./link-writer.js')
+var FileWriter = require('./file-writer.js')
+var ProxyWriter = require('./proxy-writer.js')
// props is the desired state. current is optionally the current stat,
// provided here so that subclasses can avoid statting the target
// more than necessary.
function Writer (props, current) {
- var me = this
+ var self = this
- if (typeof props === "string") {
+ if (typeof props === 'string') {
props = { path: props }
}
- if (!props.path) me.error("Must provide a path", null, true)
+ if (!props.path) self.error('Must provide a path', null, true)
// polymorphism.
// call fstream.Writer(dir) to get a DirWriter object, etc.
var type = getType(props)
- , ClassType = Writer
+ var ClassType = Writer
switch (type) {
- case "Directory":
+ case 'Directory':
ClassType = DirWriter
break
- case "File":
+ case 'File':
ClassType = FileWriter
break
- case "Link":
- case "SymbolicLink":
+ case 'Link':
+ case 'SymbolicLink':
ClassType = LinkWriter
break
case null:
+ default:
// Don't know yet what type to create, so we wrap in a proxy.
ClassType = ProxyWriter
break
}
- if (!(me instanceof ClassType)) return new ClassType(props)
+ if (!(self instanceof ClassType)) return new ClassType(props)
// now get down to business.
- Abstract.call(me)
+ Abstract.call(self)
// props is what we want to set.
// set some convenience properties as well.
- me.type = props.type
- me.props = props
- me.depth = props.depth || 0
- me.clobber = false === props.clobber ? props.clobber : true
- me.parent = props.parent || null
- me.root = props.root || (props.parent && props.parent.root) || me
-
- me._path = me.path = path.resolve(props.path)
- if (process.platform === "win32") {
- me.path = me._path = me.path.replace(/\?/g, "_")
- if (me._path.length >= 260) {
- me._swallowErrors = true
- me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
+ self.type = props.type
+ self.props = props
+ self.depth = props.depth || 0
+ self.clobber = props.clobber === false ? props.clobber : true
+ self.parent = props.parent || null
+ self.root = props.root || (props.parent && props.parent.root) || self
+
+ self._path = self.path = path.resolve(props.path)
+ if (process.platform === 'win32') {
+ self.path = self._path = self.path.replace(/\?/g, '_')
+ if (self._path.length >= 260) {
+ self._swallowErrors = true
+ self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
}
}
- me.basename = path.basename(props.path)
- me.dirname = path.dirname(props.path)
- me.linkpath = props.linkpath || null
+ self.basename = path.basename(props.path)
+ self.dirname = path.dirname(props.path)
+ self.linkpath = props.linkpath || null
props.parent = props.root = null
// console.error("\n\n\n%s setting size to", props.path, props.size)
- me.size = props.size
+ self.size = props.size
- if (typeof props.mode === "string") {
+ if (typeof props.mode === 'string') {
props.mode = parseInt(props.mode, 8)
}
- me.readable = false
- me.writable = true
+ self.readable = false
+ self.writable = true
// buffer until ready, or while handling another entry
- me._buffer = []
- me.ready = false
+ self._buffer = []
+ self.ready = false
- me.filter = typeof props.filter === "function" ? props.filter: null
+ self.filter = typeof props.filter === 'function' ? props.filter : null
// start the ball rolling.
// this checks what's there already, and then calls
- // me._create() to call the impl-specific creation stuff.
- me._stat(current)
+ // self._create() to call the impl-specific creation stuff.
+ self._stat(current)
}
// Calling this means that it's something we can't create.
// Just assert that it's already there, otherwise raise a warning.
Writer.prototype._create = function () {
- var me = this
- fs[me.props.follow ? "stat" : "lstat"](me._path, function (er, current) {
+ var self = this
+ fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {
if (er) {
- return me.warn("Cannot create " + me._path + "\n" +
- "Unsupported type: "+me.type, "ENOTSUP")
+ return self.warn('Cannot create ' + self._path + '\n' +
+ 'Unsupported type: ' + self.type, 'ENOTSUP')
}
- me._finish()
+ self._finish()
})
}
Writer.prototype._stat = function (current) {
- var me = this
- , props = me.props
- , stat = props.follow ? "stat" : "lstat"
- , who = me._proxy || me
+ var self = this
+ var props = self.props
+ var stat = props.follow ? 'stat' : 'lstat'
+ var who = self._proxy || self
if (current) statCb(null, current)
- else fs[stat](me._path, statCb)
+ else fs[stat](self._path, statCb)
function statCb (er, current) {
- if (me.filter && !me.filter.call(who, who, current)) {
- me._aborted = true
- me.emit("end")
- me.emit("close")
+ if (self.filter && !self.filter.call(who, who, current)) {
+ self._aborted = true
+ self.emit('end')
+ self.emit('close')
return
}
// if it's not there, great. We'll just create it.
// if it is there, then we'll need to change whatever differs
if (er || !current) {
- return create(me)
+ return create(self)
}
- me._old = current
+ self._old = current
var currentType = getType(current)
// if it's a type change, then we need to clobber or error.
// if it's not a type change, then let the impl take care of it.
- if (currentType !== me.type) {
- return rimraf(me._path, function (er) {
- if (er) return me.error(er)
- me._old = null
- create(me)
+ if (currentType !== self.type) {
+ return rimraf(self._path, function (er) {
+ if (er) return self.error(er)
+ self._old = null
+ create(self)
})
}
// otherwise, just handle in the app-specific way
// this creates a fs.WriteStream, or mkdir's, or whatever
- create(me)
+ create(self)
}
}
-function create (me) {
- // console.error("W create", me._path, Writer.dirmode)
+function create (self) {
+ // console.error("W create", self._path, Writer.dirmode)
// XXX Need to clobber non-dirs that are in the way,
// unless { clobber: false } in the props.
- mkdir(path.dirname(me._path), Writer.dirmode, function (er, made) {
- // console.error("W created", path.dirname(me._path), er)
- if (er) return me.error(er)
+ mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) {
+ // console.error("W created", path.dirname(self._path), er)
+ if (er) return self.error(er)
// later on, we have to set the mode and owner for these
- me._madeDir = made
- return me._create()
+ self._madeDir = made
+ return self._create()
})
}
-function endChmod (me, want, current, path, cb) {
- var wantMode = want.mode
- , chmod = want.follow || me.type !== "SymbolicLink"
- ? "chmod" : "lchmod"
+function endChmod (self, want, current, path, cb) {
+ var wantMode = want.mode
+ var chmod = want.follow || self.type !== 'SymbolicLink'
+ ? 'chmod' : 'lchmod'
if (!fs[chmod]) return cb()
- if (typeof wantMode !== "number") return cb()
+ if (typeof wantMode !== 'number') return cb()
- var curMode = current.mode & 0777
- wantMode = wantMode & 0777
+ var curMode = current.mode & parseInt('0777', 8)
+ wantMode = wantMode & parseInt('0777', 8)
if (wantMode === curMode) return cb()
fs[chmod](path, wantMode, cb)
}
-
-function endChown (me, want, current, path, cb) {
+function endChown (self, want, current, path, cb) {
// Don't even try it unless root. Too easy to EPERM.
- if (process.platform === "win32") return cb()
+ if (process.platform === 'win32') return cb()
if (!process.getuid || process.getuid() !== 0) return cb()
- if (typeof want.uid !== "number" &&
- typeof want.gid !== "number" ) return cb()
+ if (typeof want.uid !== 'number' &&
+ typeof want.gid !== 'number') return cb()
if (current.uid === want.uid &&
current.gid === want.gid) return cb()
- var chown = (me.props.follow || me.type !== "SymbolicLink")
- ? "chown" : "lchown"
+ var chown = (self.props.follow || self.type !== 'SymbolicLink')
+ ? 'chown' : 'lchown'
if (!fs[chown]) return cb()
- if (typeof want.uid !== "number") want.uid = current.uid
- if (typeof want.gid !== "number") want.gid = current.gid
+ if (typeof want.uid !== 'number') want.uid = current.uid
+ if (typeof want.gid !== 'number') want.gid = current.gid
fs[chown](path, want.uid, want.gid, cb)
}
-function endUtimes (me, want, current, path, cb) {
- if (!fs.utimes || process.platform === "win32") return cb()
+function endUtimes (self, want, current, path, cb) {
+ if (!fs.utimes || process.platform === 'win32') return cb()
- var utimes = (want.follow || me.type !== "SymbolicLink")
- ? "utimes" : "lutimes"
+ var utimes = (want.follow || self.type !== 'SymbolicLink')
+ ? 'utimes' : 'lutimes'
- if (utimes === "lutimes" && !fs[utimes]) {
- utimes = "utimes"
+ if (utimes === 'lutimes' && !fs[utimes]) {
+ utimes = 'utimes'
}
if (!fs[utimes]) return cb()
var curA = current.atime
- , curM = current.mtime
- , meA = want.atime
- , meM = want.mtime
+ var curM = current.mtime
+ var meA = want.atime
+ var meM = want.mtime
if (meA === undefined) meA = curA
if (meM === undefined) meM = curM
@@ -241,15 +240,14 @@ function endUtimes (me, want, current, path, cb) {
fs[utimes](path, meA, meM, cb)
}
-
// XXX This function is beastly. Break it up!
Writer.prototype._finish = function () {
- var me = this
+ var self = this
- if (me._finishing) return
- me._finishing = true
+ if (self._finishing) return
+ self._finishing = true
- // console.error(" W Finish", me._path, me.size)
+ // console.error(" W Finish", self._path, self.size)
// set up all the things.
// At this point, we're already done writing whatever we've gotta write,
@@ -258,35 +256,35 @@ Writer.prototype._finish = function () {
var errState = null
var done = false
- if (me._old) {
+ if (self._old) {
// the times will almost *certainly* have changed.
// adds the utimes syscall, but remove another stat.
- me._old.atime = new Date(0)
- me._old.mtime = new Date(0)
- // console.error(" W Finish Stale Stat", me._path, me.size)
- setProps(me._old)
+ self._old.atime = new Date(0)
+ self._old.mtime = new Date(0)
+ // console.error(" W Finish Stale Stat", self._path, self.size)
+ setProps(self._old)
} else {
- var stat = me.props.follow ? "stat" : "lstat"
- // console.error(" W Finish Stating", me._path, me.size)
- fs[stat](me._path, function (er, current) {
- // console.error(" W Finish Stated", me._path, me.size, current)
+ var stat = self.props.follow ? 'stat' : 'lstat'
+ // console.error(" W Finish Stating", self._path, self.size)
+ fs[stat](self._path, function (er, current) {
+ // console.error(" W Finish Stated", self._path, self.size, current)
if (er) {
// if we're in the process of writing out a
// directory, it's very possible that the thing we're linking to
// doesn't exist yet (especially if it was intended as a symlink),
// so swallow ENOENT errors here and just soldier on.
- if (er.code === "ENOENT" &&
- (me.type === "Link" || me.type === "SymbolicLink") &&
- process.platform === "win32") {
- me.ready = true
- me.emit("ready")
- me.emit("end")
- me.emit("close")
- me.end = me._finish = function () {}
+ if (er.code === 'ENOENT' &&
+ (self.type === 'Link' || self.type === 'SymbolicLink') &&
+ process.platform === 'win32') {
+ self.ready = true
+ self.emit('ready')
+ self.emit('end')
+ self.emit('close')
+ self.end = self._finish = function () {}
return
- } else return me.error(er)
+ } else return self.error(er)
}
- setProps(me._old = current)
+ setProps(self._old = current)
})
}
@@ -294,9 +292,9 @@ Writer.prototype._finish = function () {
function setProps (current) {
todo += 3
- endChmod(me, me.props, current, me._path, next("chmod"))
- endChown(me, me.props, current, me._path, next("chown"))
- endUtimes(me, me.props, current, me._path, next("utimes"))
+ endChmod(self, self.props, current, self._path, next('chmod'))
+ endChown(self, self.props, current, self._path, next('chown'))
+ endUtimes(self, self.props, current, self._path, next('utimes'))
}
function next (what) {
@@ -305,7 +303,7 @@ Writer.prototype._finish = function () {
if (errState) return
if (er) {
er.fstream_finish_call = what
- return me.error(errState = er)
+ return self.error(errState = er)
}
if (--todo > 0) return
if (done) return
@@ -313,61 +311,61 @@ Writer.prototype._finish = function () {
// we may still need to set the mode/etc. on some parent dirs
// that were created previously. delay end/close until then.
- if (!me._madeDir) return end()
- else endMadeDir(me, me._path, end)
+ if (!self._madeDir) return end()
+ else endMadeDir(self, self._path, end)
function end (er) {
if (er) {
- er.fstream_finish_call = "setupMadeDir"
- return me.error(er)
+ er.fstream_finish_call = 'setupMadeDir'
+ return self.error(er)
}
// all the props have been set, so we're completely done.
- me.emit("end")
- me.emit("close")
+ self.emit('end')
+ self.emit('close')
}
}
}
}
-function endMadeDir (me, p, cb) {
- var made = me._madeDir
- // everything *between* made and path.dirname(me._path)
+function endMadeDir (self, p, cb) {
+ var made = self._madeDir
+ // everything *between* made and path.dirname(self._path)
// needs to be set up. Note that this may just be one dir.
var d = path.dirname(p)
- endMadeDir_(me, d, function (er) {
+ endMadeDir_(self, d, function (er) {
if (er) return cb(er)
if (d === made) {
return cb()
}
- endMadeDir(me, d, cb)
+ endMadeDir(self, d, cb)
})
}
-function endMadeDir_ (me, p, cb) {
+function endMadeDir_ (self, p, cb) {
var dirProps = {}
- Object.keys(me.props).forEach(function (k) {
- dirProps[k] = me.props[k]
+ Object.keys(self.props).forEach(function (k) {
+ dirProps[k] = self.props[k]
// only make non-readable dirs if explicitly requested.
- if (k === "mode" && me.type !== "Directory") {
- dirProps[k] = dirProps[k] | 0111
+ if (k === 'mode' && self.type !== 'Directory') {
+ dirProps[k] = dirProps[k] | parseInt('0111', 8)
}
})
var todo = 3
- , errState = null
+ var errState = null
fs.stat(p, function (er, current) {
if (er) return cb(errState = er)
- endChmod(me, dirProps, current, p, next)
- endChown(me, dirProps, current, p, next)
- endUtimes(me, dirProps, current, p, next)
+ endChmod(self, dirProps, current, p, next)
+ endChown(self, dirProps, current, p, next)
+ endUtimes(self, dirProps, current, p, next)
})
function next (er) {
if (errState) return
if (er) return cb(errState = er)
- if (-- todo === 0) return cb()
+ if (--todo === 0) return cb()
}
}
@@ -376,7 +374,7 @@ Writer.prototype.pipe = function () {
}
Writer.prototype.add = function () {
- this.error("Cannot add to non-Directory type")
+ this.error("Can't add to non-Directory type")
}
Writer.prototype.write = function () {
@@ -387,6 +385,6 @@ function objectToString (d) {
return Object.prototype.toString.call(d)
}
-function isDate(d) {
- return typeof d === 'object' && objectToString(d) === '[object Date]';
+function isDate (d) {
+ return typeof d === 'object' && objectToString(d) === '[object Date]'
}
diff --git a/deps/npm/node_modules/fstream/package.json b/deps/npm/node_modules/fstream/package.json
index 6b2de73a2fdaed..aa6bc1cf3e494b 100644
--- a/deps/npm/node_modules/fstream/package.json
+++ b/deps/npm/node_modules/fstream/package.json
@@ -6,7 +6,7 @@
},
"name": "fstream",
"description": "Advanced file system stream things",
- "version": "1.0.4",
+ "version": "1.0.6",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/fstream.git"
@@ -22,20 +22,45 @@
"rimraf": "2"
},
"devDependencies": {
- "tap": ""
+ "tap": "0",
+ "standard": "^2.3.2"
},
"scripts": {
- "test": "tap examples/*.js"
+ "test": "standard && tap examples/*.js"
},
- "license": "BSD",
- "readme": "Like FS streams, but with stat on them, and supporting directories and\nsymbolic links, as well as normal files. Also, you can use this to set\nthe stats on a file, even if you don't change its contents, or to create\na symlink, etc.\n\nSo, for example, you can \"write\" a directory, and it'll call `mkdir`. You\ncan specify a uid and gid, and it'll call `chown`. You can specify a\n`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink\nand provide a `linkpath` and it'll call `symlink`.\n\nNote that it won't automatically resolve symbolic links. So, if you\ncall `fstream.Reader('/some/symlink')` then you'll get an object\nthat stats and then ends immediately (since it has no data). To follow\nsymbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:\ntrue })`.\n\nThere are various checks to make sure that the bytes emitted are the\nsame as the intended size, if the size is set.\n\n## Examples\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n })\n .write(\"hello\\n\")\n .end()\n```\n\nThis will create the directories if they're missing, and then write\n`hello\\n` into the file, chmod it to 0755, and assert that 6 bytes have\nbeen written when it's done.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n , flags: \"a\"\n })\n .write(\"hello\\n\")\n .end()\n```\n\nYou can pass flags in, if you want to append to a file.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/symlink\"\n , linkpath: \"./file\"\n , SymbolicLink: true\n , mode: \"0755\" // octal strings supported\n })\n .end()\n```\n\nIf isSymbolicLink is a function, it'll be called, and if it returns\ntrue, then it'll treat it as a symlink. If it's not a function, then\nany truish value will make a symlink, or you can set `type:\n'SymbolicLink'`, which does the same thing.\n\nNote that the linkpath is relative to the symbolic link location, not\nthe parent dir or cwd.\n\n```javascript\nfstream\n .Reader(\"path/to/dir\")\n .pipe(fstream.Writer(\"path/to/other/dir\"))\n```\n\nThis will do like `cp -Rp path/to/dir path/to/other/dir`. If the other\ndir exists and isn't a directory, then it'll emit an error. It'll also\nset the uid, gid, mode, etc. to be identical. In this way, it's more\nlike `rsync -a` than simply a copy.\n",
- "readmeFilename": "README.md",
- "gitHead": "0bdcf1db6f9b04755b644f8268fc3726875367a6",
+ "license": "ISC",
+ "gitHead": "e0c0024379c5a94ca228d232e2794b6ffb0d3caf",
"bugs": {
"url": "https://github.com/isaacs/fstream/issues"
},
- "homepage": "https://github.com/isaacs/fstream",
- "_id": "fstream@1.0.4",
- "_shasum": "6c52298473fd6351fd22fc4bf9254fcfebe80f2b",
- "_from": "fstream@>=1.0.4 <1.1.0"
+ "homepage": "https://github.com/isaacs/fstream#readme",
+ "_id": "fstream@1.0.6",
+ "_shasum": "817e50312fb4ed90da865c8eb5ecd1d1d7aed0ec",
+ "_from": "fstream@>=1.0.6 <1.1.0",
+ "_npmVersion": "2.9.0",
+ "_nodeVersion": "2.0.0",
+ "_npmUser": {
+ "name": "iarna",
+ "email": "me@re-becca.org"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "othiym23",
+ "email": "ogd@aoaioxxysz.net"
+ },
+ {
+ "name": "iarna",
+ "email": "me@re-becca.org"
+ }
+ ],
+ "dist": {
+ "shasum": "817e50312fb4ed90da865c8eb5ecd1d1d7aed0ec",
+ "tarball": "http://registry.npmjs.org/fstream/-/fstream-1.0.6.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.6.tgz"
}
diff --git a/deps/npm/node_modules/glob/node_modules/path-is-absolute/package.json b/deps/npm/node_modules/glob/node_modules/path-is-absolute/package.json
index fb42bcb3454301..39372636f3fb4f 100644
--- a/deps/npm/node_modules/glob/node_modules/path-is-absolute/package.json
+++ b/deps/npm/node_modules/glob/node_modules/path-is-absolute/package.json
@@ -5,7 +5,7 @@
"license": "MIT",
"repository": {
"type": "git",
- "url": "https://github.com/sindresorhus/path-is-absolute"
+ "url": "git+https://github.com/sindresorhus/path-is-absolute.git"
},
"author": {
"name": "Sindre Sorhus",
@@ -65,5 +65,6 @@
"tarball": "http://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz"
+ "_resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json
index e60f438d3d700c..d2276e1b2b1bc4 100644
--- a/deps/npm/node_modules/glob/package.json
+++ b/deps/npm/node_modules/glob/package.json
@@ -6,7 +6,7 @@
},
"name": "glob",
"description": "a little globber",
- "version": "5.0.5",
+ "version": "5.0.6",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-glob.git"
@@ -30,7 +30,7 @@
"devDependencies": {
"mkdirp": "0",
"rimraf": "^2.2.8",
- "tap": "^0.5.0",
+ "tap": "^1.0.3",
"tick": "0.0.6"
},
"scripts": {
@@ -43,19 +43,23 @@
"benchclean": "bash benchclean.sh"
},
"license": "ISC",
- "gitHead": "9db1a83b44da0c60f5fdd31b28b1f9917ee6316d",
+ "gitHead": "7a0d65d7ed11871be6b5a68dc6f15e3f4b3fb93d",
"bugs": {
"url": "https://github.com/isaacs/node-glob/issues"
},
- "homepage": "https://github.com/isaacs/node-glob",
- "_id": "glob@5.0.5",
- "_shasum": "784431e4e29a900ae0d47fba6aa1c7f16a8e7df7",
- "_from": "glob@>=5.0.5 <5.1.0",
- "_npmVersion": "2.7.6",
- "_nodeVersion": "1.4.2",
+ "homepage": "https://github.com/isaacs/node-glob#readme",
+ "_id": "glob@5.0.6",
+ "_shasum": "51f1377c8d5ba36015997655d22bd7d20246accd",
+ "_from": "glob@>=5.0.6 <5.1.0",
+ "_npmVersion": "2.9.1",
+ "_nodeVersion": "2.0.1",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "51f1377c8d5ba36015997655d22bd7d20246accd",
+ "tarball": "http://registry.npmjs.org/glob/-/glob-5.0.6.tgz"
},
"maintainers": [
{
@@ -63,10 +67,6 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "784431e4e29a900ae0d47fba6aa1c7f16a8e7df7",
- "tarball": "http://registry.npmjs.org/glob/-/glob-5.0.5.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/glob/-/glob-5.0.5.tgz"
+ "_resolved": "https://registry.npmjs.org/glob/-/glob-5.0.6.tgz"
}
diff --git a/deps/npm/node_modules/hosted-git-info/README.md b/deps/npm/node_modules/hosted-git-info/README.md
index 8647c755f73fab..55b5dbcbb5050b 100644
--- a/deps/npm/node_modules/hosted-git-info/README.md
+++ b/deps/npm/node_modules/hosted-git-info/README.md
@@ -96,3 +96,4 @@ SSH connect strings will be normalized into `git+ssh` URLs.
Currently this supports Github, Bitbucket and Gitlab. Pull requests for
additional hosts welcome.
+
diff --git a/deps/npm/node_modules/init-package-json/.travis.yml b/deps/npm/node_modules/init-package-json/.travis.yml
new file mode 100644
index 00000000000000..05d299e6764496
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - "0.10"
+ - "0.11"
diff --git a/deps/npm/node_modules/init-package-json/README.md b/deps/npm/node_modules/init-package-json/README.md
index 3bdd35f6aa30b0..2cc79c4bf78100 100644
--- a/deps/npm/node_modules/init-package-json/README.md
+++ b/deps/npm/node_modules/init-package-json/README.md
@@ -2,6 +2,8 @@
A node module to get your node module started.
+[](http://travis-ci.org/npm/init-package-json)
+
## Usage
```javascript
diff --git a/deps/npm/node_modules/init-package-json/default-input.js b/deps/npm/node_modules/init-package-json/default-input.js
index 7ae892229ca4f4..fc65f2e056e344 100644
--- a/deps/npm/node_modules/init-package-json/default-input.js
+++ b/deps/npm/node_modules/init-package-json/default-input.js
@@ -1,7 +1,9 @@
var fs = require('fs')
var glob = require('glob')
var path = require('path')
-var validateName = require("validate-npm-package-name")
+var validateLicense = require('validate-npm-package-license')
+var validateName = require('validate-npm-package-name')
+var npa = require('npm-package-arg')
// more popular packages should go here, maybe?
function isTestPkg (p) {
@@ -40,15 +42,21 @@ function readDeps (test) { return function (cb) {
}}
var name = package.name || basename
+var spec = npa(name)
var scope = config.get('scope')
if (scope) {
if (scope.charAt(0) !== '@') scope = '@' + scope
- name = scope + '/' + name
+ if (spec.scope) {
+ name = scope + '/' + spec.name.split('/')[1]
+ } else {
+ name = scope + '/' + name
+ }
}
exports.name = yes ? name : prompt('name', name, function (data) {
var its = validateName(data)
if (its.validForNewPackages) return data
- var er = new Error('Sorry, ' + its.errors.join(' and ') + '.')
+ var errors = (its.errors || []).concat(its.warnings || [])
+ var er = new Error('Sorry, ' + errors.join(' and ') + '.')
er.notValid = true
return er
})
@@ -202,11 +210,18 @@ if (!package.author) {
"url" : config.get('init.author.url') ||
config.get('init-author-url')
}
- : prompt('author')
+ : yes ? '' : prompt('author')
}
var license = package.license ||
config.get('init.license') ||
config.get('init-license') ||
'ISC'
-exports.license = yes ? license : prompt('license', license)
+exports.license = yes ? license : prompt('license', license, function (data) {
+ var its = validateLicense(data)
+ if (its.validForNewPackages) return data
+ var errors = (its.errors || []).concat(its.warnings || [])
+ var er = new Error('Sorry, ' + errors.join(' and ') + '.')
+ er.notValid = true
+ return er
+})
diff --git a/deps/npm/node_modules/init-package-json/example/example-basic.js b/deps/npm/node_modules/init-package-json/example/example-basic.js
index 29b0c818d1d7d3..0d13bda064e59c 100644
--- a/deps/npm/node_modules/init-package-json/example/example-basic.js
+++ b/deps/npm/node_modules/init-package-json/example/example-basic.js
@@ -1,5 +1,4 @@
var init = require('../init-package-json.js')
-var path = require('path')
var dir = process.cwd()
var initFile = require.resolve('./init/basic-init.js')
diff --git a/deps/npm/node_modules/init-package-json/example/example-default.js b/deps/npm/node_modules/init-package-json/example/example-default.js
index f3aea518c5baaa..29a819906888a1 100644
--- a/deps/npm/node_modules/init-package-json/example/example-default.js
+++ b/deps/npm/node_modules/init-package-json/example/example-default.js
@@ -1,5 +1,4 @@
var init = require('../init-package-json.js')
-var path = require('path')
var dir = process.cwd()
init(dir, 'file that does not exist', function (err, data) {
diff --git a/deps/npm/node_modules/init-package-json/example/example-npm.js b/deps/npm/node_modules/init-package-json/example/example-npm.js
index b394eeabc04b5a..292da6a7a7a23b 100644
--- a/deps/npm/node_modules/init-package-json/example/example-npm.js
+++ b/deps/npm/node_modules/init-package-json/example/example-npm.js
@@ -1,5 +1,4 @@
var init = require('../init-package-json.js')
-var path = require('path')
var dir = process.cwd()
var npm = require('npm')
@@ -10,4 +9,3 @@ npm.load(function (er, npm) {
console.log('written successfully')
})
})
-
diff --git a/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json b/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
index 1007cdde44352d..1407e97be584d7 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
+++ b/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
@@ -8,7 +8,7 @@
"description": "prompting wizardly",
"version": "0.3.0",
"repository": {
- "url": "git://github.com/isaacs/promzard"
+ "url": "git://github.com/isaacs/promzard.git"
},
"dependencies": {
"read": "1"
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md
new file mode 100644
index 00000000000000..2180a8c1a3676e
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md
@@ -0,0 +1,7 @@
+Copyright Kyle E. Mitchell
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md
new file mode 100644
index 00000000000000..904f74b9d3b810
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md
@@ -0,0 +1,29 @@
+npm-validate-package-license
+============================
+
+Give me a string and I'll tell you if it's a valid npm package license.
+
+*This package is not endorsed or approved by npm. It is part of a proposal to add license field validation to the npm command-line interface.*
+
+
+
+```js
+var validResult = {
+ validForNewPackages: true,
+ validForOldPackages: true
+};
+
+valid('Apache-2.0'); // => validResult
+valid('GPL-3.0 OR BSD-2-Clause'); // => validResult
+
+var invalidResult = {
+ validForOldPackages: false,
+ validForNewPackages: false,
+ warnings: [
+ 'license should be a valid SPDX license expression',
+ 'license is similar to the valid expression "Apache-2.0"'
+ ]
+};
+
+valid('Apache 2.0'); // => invalidResult
+```
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js
new file mode 100644
index 00000000000000..c8407a5203792f
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js
@@ -0,0 +1,26 @@
+var spdx = require('spdx');
+var correct = require('spdx-correct');
+
+module.exports = function(argument) {
+ if (spdx.valid(argument)) {
+ return {
+ validForNewPackages: true,
+ validForOldPackages: true
+ };
+ } else {
+ var warnings = [
+ 'license should be a valid SPDX license expression'
+ ];
+ var corrected = correct(argument);
+ if (corrected) {
+ warnings.push(
+ 'license is similar to the valid expression "' + corrected + '"'
+ );
+ }
+ return {
+ validForOldPackages: false,
+ validForNewPackages: false,
+ warnings: warnings
+ };
+ }
+};
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore
new file mode 100644
index 00000000000000..5229acdc80bc4c
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore
@@ -0,0 +1,5 @@
+.gitignore
+.jscsrc
+.jshintrc
+test
+.travis.yml
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md
new file mode 100644
index 00000000000000..05cd9947d22ccd
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md
@@ -0,0 +1,21 @@
+spdx-correct.js
+===============
+
+[](https://www.npmjs.com/package/spdx-correct)
+[](http://www.apache.org/licenses/LICENSE-2.0)
+[](http://travis-ci.org/kemitchell/spdx-correct.js)
+
+
+Correct invalid SPDX identifiers.
+
+
+
+```js
+correct('mit'); // => 'MIT'
+
+correct('Apache 2'); // => 'Apache-2.0'
+
+correct('No idea what license'); // => null
+```
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json
new file mode 100644
index 00000000000000..e3d291b602d25b
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json
@@ -0,0 +1,63 @@
+{
+ "name": "spdx-correct",
+ "description": "correct invalid SPDX identifiers",
+ "version": "1.0.0-prerelease-3",
+ "author": {
+ "name": "Kyle Mitchell",
+ "email": "kyle@kemitchell.com",
+ "url": "http://kemitchell.com"
+ },
+ "bugs": {
+ "url": "https://github.com/kemitchell/spdx-correct/issues"
+ },
+ "dependencies": {
+ "spdx": "^0.4.0"
+ },
+ "devDependencies": {
+ "jscs": "^1.13.0",
+ "jshint": "^2.7.0",
+ "jsmd": "^0.3.0",
+ "tap": "^0.7.1"
+ },
+ "homepage": "https://github.com/kemitchell/spdx-correct",
+ "keywords": [
+ "SPDX",
+ "law",
+ "legal",
+ "license",
+ "metadata"
+ ],
+ "license": "Apache-2.0",
+ "main": "spdx-correct.js",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kemitchell/spdx-correct.git"
+ },
+ "scripts": {
+ "lint": "jshint spdx-correct.js test && jscs spdx-correct.js test",
+ "precommit": "npm run lint && npm run test",
+ "test": "jsmd README.md && tap test"
+ },
+ "gitHead": "0289b9068391d4a1db571137083e0beb18a2faef",
+ "_id": "spdx-correct@1.0.0-prerelease-3",
+ "_shasum": "5706cc6ce05b928a65564c76e1d6809ba033ac7e",
+ "_from": "spdx-correct@1.0.0-prerelease-3",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ },
+ "maintainers": [
+ {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ }
+ ],
+ "dist": {
+ "shasum": "5706cc6ce05b928a65564c76e1d6809ba033ac7e",
+ "tarball": "http://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0-prerelease-3.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0-prerelease-3.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js
new file mode 100644
index 00000000000000..094712d62361c8
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js
@@ -0,0 +1,235 @@
+var spdx = require('spdx');
+
+var valid = spdx.valid.bind(spdx);
+
+// Common transpositions of license identifier acronyms
+var transpositions = [
+ ['APGL', 'AGPL'],
+ ['Gpl', 'GPL'],
+ ['GLP', 'GPL'],
+ ['APL', 'Apache'],
+ ['ISD', 'ISC'],
+ ['GLP', 'GPL'],
+ ['IST', 'ISC'],
+ ['Claude', 'Clause'],
+ [' or later', '+'],
+ [' International', ''],
+ ['GNU', 'GPL'],
+ ['GUN', 'GPL'],
+ ['+', ''],
+ ['GNU GPL', 'GPL'],
+ ['GNU/GPL', 'GPL'],
+ ['GNU GLP', 'GPL'],
+ ['GNU General Public License', 'GPL'],
+ ['Gnu public license', 'GPL'],
+ ['GNU Public License', 'GPL'],
+ ['GNU GENERAL PUBLIC LICENSE', 'GPL'],
+ ['MTI', 'MIT'],
+ ['Mozilla Public License', 'MPL'],
+ ['WTH', 'WTF'],
+ ['-License', '']
+];
+
+var TRANSPOSED = 0;
+var CORRECT = 1;
+
+// Simple corrections to nearly valid identifiers.
+var transforms = [
+ // e.g. 'mit'
+ function(argument) {
+ return argument.toUpperCase();
+ },
+ // e.g. 'MIT '
+ function(argument) {
+ return argument.trim();
+ },
+ // e.g. 'M.I.T.'
+ function(argument) {
+ return argument.replace(/\./g, '');
+ },
+ // e.g. 'Apache- 2.0'
+ function(argument) {
+ return argument.replace(/\s+/g, '');
+ },
+ // e.g. 'CC BY 4.0''
+ function(argument) {
+ return argument.replace(/\s+/g, '-');
+ },
+ // e.g. 'LGPLv2.1'
+ function(argument) {
+ return argument.replace('v', '-');
+ },
+ // e.g. 'Apache 2.0'
+ function(argument) {
+ return argument.replace(/,?\s*(\d)/, '-$1');
+ },
+ // e.g. 'GPL 2'
+ function(argument) {
+ return argument.replace(/,?\s*(\d)/, '-$1.0');
+ },
+ // e.g. 'Apache Version 2.0'
+ function(argument) {
+ return argument.replace(/,?\s*(V\.|v\.|V|v|Version|version)\s*(\d)/, '-$2');
+ },
+ // e.g. 'Apache Version 2'
+ function(argument) {
+ return argument.replace(/,?\s*(V\.|v\.|V|v|Version|version)\s*(\d)/, '-$2.0');
+ },
+ // e.g. 'ZLIB'
+ function(argument) {
+ return argument[0].toUpperCase() + argument.slice(1);
+ },
+ // e.g. 'MPL/2.0'
+ function(argument) {
+ return argument.replace('/', '-');
+ },
+ // e.g. 'Apache 2'
+ function(argument) {
+ return argument
+ .replace(/\s*V\s*(\d)/, '-$1')
+ .replace(/(\d)$/, '$1.0');
+ },
+ // e.g. 'GPL-2.0-'
+ function(argument) {
+ return argument.slice(0, argument.length - 1);
+ },
+ // e.g. 'GPL2'
+ function(argument) {
+ return argument.replace(/(\d)$/, '-$1.0');
+ },
+ // e.g. 'BSD 3'
+ function(argument) {
+ return argument.replace(/(-| )?(\d)$/, '-$2-Clause');
+ },
+ // e.g. 'BSD clause 3'
+ function(argument) {
+ return argument.replace(/(-| )clause(-| )(\d)/, '-$3-Clause');
+ },
+ // e.g. 'BY-NC-4.0'
+ function(argument) {
+ return 'CC-' + argument;
+ },
+ // e.g. 'BY-NC'
+ function(argument) {
+ return 'CC-' + argument + '-4.0';
+ },
+ // e.g. 'Attribution-NonCommercial'
+ function(argument) {
+ return argument
+ .replace('Attribution', 'BY')
+ .replace('NonCommercial', 'NC')
+ .replace('NoDerivatives', 'ND')
+ .replace(/ (\d)/, '-$1')
+ .replace(/ ?International/, '');
+ },
+ // e.g. 'Attribution-NonCommercial'
+ function(argument) {
+ return 'CC-' +
+ argument
+ .replace('Attribution', 'BY')
+ .replace('NonCommercial', 'NC')
+ .replace('NoDerivatives', 'ND')
+ .replace(/ (\d)/, '-$1')
+ .replace(/ ?International/, '') +
+ '-4.0';
+ }
+];
+
+// If all else fails, guess that strings containing certain substrings
+// meant to identify certain licenses.
+var lastResorts = [
+ ['UNLI', 'Unlicense'],
+ ['WTF', 'WTFPL'],
+ ['2 CLAUSE', 'BSD-2-Clause'],
+ ['2-CLAUSE', 'BSD-2-Clause'],
+ ['3 CLAUSE', 'BSD-3-Clause'],
+ ['3-CLAUSE', 'BSD-3-Clause'],
+ ['AFFERO', 'AGPL-3.0'],
+ ['AGPL', 'AGPL-3.0'],
+ ['APACHE', 'Apache-2.0'],
+ ['ARTISTIC', 'Artistic-2.0'],
+ ['Affero', 'AGPL-3.0'],
+ ['BEER', 'Beerware'],
+ ['BOOST', 'BSL-1.0'],
+ ['BSD', 'BSD-2-Clause'],
+ ['ECLIPSE', 'EPL-1.0'],
+ ['FUCK', 'WTFPL'],
+ ['GNU', 'GPL-3.0'],
+ ['LGPL', 'LGPL-3.0'],
+ ['GPL', 'GPL-3.0'],
+ ['MIT', 'MIT'],
+ ['MPL', 'MPL-2.0'],
+ ['X11', 'X11'],
+ ['ZLIB', 'Zlib']
+];
+
+var SUBSTRING = 0;
+var IDENTIFIER = 1;
+
+var validTransformation = function(identifier) {
+ for (var i = 0; i < transforms.length; i++) {
+ var transformed = transforms[i](identifier);
+ if (transformed !== identifier && valid(transformed)) {
+ return transformed;
+ }
+ }
+ return null;
+};
+
+var validLastResort = function(identifier) {
+ var upperCased = identifier.toUpperCase();
+ for (var i = 0; i < lastResorts.length; i++) {
+ var lastResort = lastResorts[i];
+ if (upperCased.indexOf(lastResort[SUBSTRING]) > -1) {
+ return lastResort[IDENTIFIER];
+ }
+ }
+ return null;
+};
+
+var anyCorrection = function(identifier, check) {
+ for (var i = 0; i < transpositions.length; i++) {
+ var transposition = transpositions[i];
+ var transposed = transposition[TRANSPOSED];
+ if (identifier.indexOf(transposed) > -1) {
+ var corrected = identifier.replace(
+ transposed,
+ transposition[CORRECT]
+ );
+ var checked = check(corrected);
+ if (checked !== null) {
+ return checked;
+ }
+ }
+ }
+ return null;
+};
+
+module.exports = function(identifier) {
+ identifier = identifier.replace(/\+$/, '');
+ if (valid(identifier)) {
+ return identifier;
+ }
+ var transformed = validTransformation(identifier);
+ if (transformed !== null) {
+ return transformed;
+ }
+ transformed = anyCorrection(identifier, function(argument) {
+ if (valid(argument)) {
+ return argument;
+ }
+ return validTransformation(argument);
+ });
+ if (transformed !== null) {
+ return transformed;
+ }
+ transformed = validLastResort(identifier);
+ if (transformed !== null) {
+ return transformed;
+ }
+ transformed = anyCorrection(identifier, validLastResort);
+ if (transformed !== null) {
+ return transformed;
+ }
+ return null;
+};
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json
new file mode 100644
index 00000000000000..4aaad362ad322d
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json
@@ -0,0 +1,59 @@
+{
+ "name": "validate-npm-package-license",
+ "description": "Give me a string and I'll tell you if it's a valid npm package license",
+ "version": "1.0.0-prerelease-2",
+ "author": {
+ "name": "Kyle E. Mitchell",
+ "email": "kyle@kemitchell.com",
+ "url": "http://kemitchell.com"
+ },
+ "bugs": {
+ "url": "https://github.com/kemitchell/npm-valid-package-license/issues"
+ },
+ "dependencies": {
+ "spdx": "^0.4.0",
+ "spdx-correct": "1.0.0-prerelease-3"
+ },
+ "devDependencies": {
+ "jsmd": "^0.3.0"
+ },
+ "homepage": "https://github.com/kemitchell/npm-valid-package-license",
+ "keywords": [
+ "license",
+ "npm",
+ "package",
+ "validation"
+ ],
+ "license": "Apache-2.0",
+ "main": "index.js",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kemitchell/npm-valid-package-license.git"
+ },
+ "scripts": {
+ "precommit": "npm run test",
+ "test": "jsmd README.md"
+ },
+ "gitHead": "d6043c5bf03a71409daae8d584ba74281e3c55c3",
+ "_id": "validate-npm-package-license@1.0.0-prerelease-2",
+ "_shasum": "0f45adce1728091b289597035c1ad25a5ba549be",
+ "_from": "validate-npm-package-license@1.0.0-prerelease-2",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ },
+ "maintainers": [
+ {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ }
+ ],
+ "dist": {
+ "shasum": "0f45adce1728091b289597035c1ad25a5ba549be",
+ "tarball": "http://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-1.0.0-prerelease-2.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-1.0.0-prerelease-2.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/README.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/README.md
index 97e7fc5207c20a..91a963b760e613 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/README.md
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/README.md
@@ -58,7 +58,7 @@ a change in the value of `validForNewPackages` property, and a warnings array
will be present:
```js
-validate("cRaZY-paCkAgE-with-mixed-case-and-more-than-fifty-characters")
+validate("cRaZY-paCkAgE-with-mixed-case-and-more-than-214-characters-----------------------------------------------------------------------------------------------------------------------------------------------------------")
```
returns:
@@ -69,7 +69,7 @@ returns:
validForOldPackages: true,
warnings: [
"name can no longer contain capital letters",
- "name can no longer contain more than 50 characters"
+ "name can no longer contain more than 214 characters"
]
}
```
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/index.js b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/index.js
index fde5b7e9170711..66a1d47326b03d 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/index.js
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/index.js
@@ -58,8 +58,9 @@ var validate = module.exports = function(name) {
})
// really-long-package-names-------------------------------such--length-----many---wow
- if (name.length > 50) {
- warnings.push("name can no longer contain more than 50 characters")
+ // the thisisareallyreallylongpackagenameitshouldpublishdowenowhavealimittothelengthofpackagenames-poch.
+ if (name.length > 214) {
+ warnings.push("name can no longer contain more than 214 characters")
}
// mIxeD CaSe nAMEs
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/History.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/History.md
index e9837a5068ae0e..0eb45c420775eb 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/History.md
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/History.md
@@ -1,10 +1,10 @@
-0.0.7 / 2014-09-01
+0.0.7 / 2014-09-01
==================
* update .repository
-0.0.6 / 2014-09-01
+0.0.6 / 2014-09-01
==================
* add travis
@@ -22,17 +22,17 @@
* add timers
-0.0.3 / 2014-02-22
+0.0.3 / 2014-02-22
==================
* add buffer
-0.0.2 / 2014-02-11
+0.0.2 / 2014-02-11
==================
* add assert
-0.0.1 / 2014-02-11
+0.0.1 / 2014-02-11
==================
* add main
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/package.json b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/package.json
index 459d0793b79dcb..c5ec77f136b8a2 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/package.json
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/node_modules/builtins/package.json
@@ -4,7 +4,7 @@
"description": "List of node.js builtin modules",
"repository": {
"type": "git",
- "url": "git://github.com/juliangruber/builtins"
+ "url": "git://github.com/juliangruber/builtins.git"
},
"license": "MIT",
"main": "builtins.json",
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json
index 68bd5451b612cb..65a4587c664529 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json
@@ -1,6 +1,6 @@
{
"name": "validate-npm-package-name",
- "version": "2.0.1",
+ "version": "2.2.0",
"description": "Give me a string and I'll tell you if it's a valid npm package name",
"main": "index.js",
"directories": {
@@ -17,7 +17,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/validate-npm-package-name"
+ "url": "git+https://github.com/npm/validate-npm-package-name.git"
},
"keywords": [
"npm",
@@ -33,26 +33,30 @@
"url": "https://github.com/npm/validate-npm-package-name/issues"
},
"homepage": "https://github.com/npm/validate-npm-package-name",
- "gitHead": "69f5c629525feeccfc941a6f1b9280ceed8a4b3a",
- "_id": "validate-npm-package-name@2.0.1",
- "_shasum": "ca006761b2b325f107fab172fb0cfcfc5e412c58",
+ "gitHead": "acef1219c13a0cf4cf6b8706d65f606d82a7d472",
+ "_id": "validate-npm-package-name@2.2.0",
+ "_shasum": "4cb6ff120bd7afb0b5681406cfaea8df2d763477",
"_from": "validate-npm-package-name@>=2.0.1 <3.0.0",
- "_npmVersion": "2.2.0",
- "_nodeVersion": "0.10.31",
+ "_npmVersion": "2.7.6",
+ "_nodeVersion": "1.6.2",
"_npmUser": {
- "name": "zeke",
- "email": "zeke@npmjs.com"
+ "name": "bcoe",
+ "email": "ben@npmjs.com"
+ },
+ "dist": {
+ "shasum": "4cb6ff120bd7afb0b5681406cfaea8df2d763477",
+ "tarball": "http://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.2.0.tgz"
},
"maintainers": [
{
"name": "zeke",
- "email": "zeke@sikelianos.com"
+ "email": "zeke@npmjs.com"
+ },
+ {
+ "name": "bcoe",
+ "email": "ben@npmjs.com"
}
],
- "dist": {
- "shasum": "ca006761b2b325f107fab172fb0cfcfc5e412c58",
- "tarball": "http://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.0.1.tgz"
- },
- "_resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.0.1.tgz",
+ "_resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.2.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/test/index.js b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/test/index.js
index 0a976dcc8262a6..1a7dca5de6fded 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/test/index.js
+++ b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-name/test/index.js
@@ -80,10 +80,15 @@ test("validate-npm-package-name", function (t) {
// Long Package Names
- t.deepEqual(validate("1234567890123456789012345678901234567890-more-than-fifty"), {
+ t.deepEqual(validate("ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou-"), {
validForNewPackages: false,
validForOldPackages: true,
- warnings: ["name can no longer contain more than 50 characters"]
+ warnings: ["name can no longer contain more than 214 characters"]
+ })
+
+ t.deepEqual(validate("ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou"), {
+ validForNewPackages: true,
+ validForOldPackages: true
})
// Legacy Mixed-Case
diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json
index 6cd75e79dac364..4ac14cb67b614b 100644
--- a/deps/npm/node_modules/init-package-json/package.json
+++ b/deps/npm/node_modules/init-package-json/package.json
@@ -1,13 +1,13 @@
{
"name": "init-package-json",
- "version": "1.4.0",
+ "version": "1.5.0",
"main": "init-package-json.js",
"scripts": {
"test": "tap test/*.js"
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/init-package-json"
+ "url": "git://github.com/isaacs/init-package-json.git"
},
"author": {
"name": "Isaac Z. Schlueter",
@@ -18,10 +18,12 @@
"description": "A node module to get your node module started",
"dependencies": {
"glob": "^5.0.3",
+ "npm-package-arg": "^4.0.0",
"promzard": "^0.3.0",
"read": "~1.0.1",
"read-package-json": "1 || 2",
"semver": "2.x || 3.x || 4",
+ "validate-npm-package-license": "1.0.0-prerelease-2",
"validate-npm-package-name": "^2.0.1"
},
"devDependencies": {
@@ -39,38 +41,14 @@
"prompt",
"start"
],
- "gitHead": "c422f6b38ab02d0859d757ec381e473657d4d195",
+ "readme": "# init-package-json\n\nA node module to get your node module started.\n\n[](http://travis-ci.org/npm/init-package-json)\n\n## Usage\n\n```javascript\nvar init = require('init-package-json')\nvar path = require('path')\n\n// a path to a promzard module. In the event that this file is\n// not found, one will be provided for you.\nvar initFile = path.resolve(process.env.HOME, '.npm-init')\n\n// the dir where we're doin stuff.\nvar dir = process.cwd()\n\n// extra stuff that gets put into the PromZard module's context.\n// In npm, this is the resolved config object. Exposed as 'config'\n// Optional.\nvar configData = { some: 'extra stuff' }\n\n// Any existing stuff from the package.json file is also exposed in the\n// PromZard module as the `package` object. There will also be free\n// vars for:\n// * `filename` path to the package.json file\n// * `basename` the tip of the package dir\n// * `dirname` the parent of the package dir\n\ninit(dir, initFile, configData, function (er, data) {\n // the data's already been written to {dir}/package.json\n // now you can do stuff with it\n})\n```\n\nOr from the command line:\n\n```\n$ npm-init\n```\n\nSee [PromZard](https://github.com/isaacs/promzard) for details about\nwhat can go in the config file.\n",
+ "readmeFilename": "README.md",
+ "gitHead": "17721cb55112690da3dc41b21d58354e89836067",
"bugs": {
"url": "https://github.com/isaacs/init-package-json/issues"
},
- "homepage": "https://github.com/isaacs/init-package-json",
- "_id": "init-package-json@1.4.0",
- "_shasum": "50b49cbe284cb7a48e037f36d03817af1022f070",
- "_from": "init-package-json@1.4.0",
- "_npmVersion": "2.7.5",
- "_nodeVersion": "1.6.2",
- "_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- },
- {
- "name": "iarna",
- "email": "me@re-becca.org"
- }
- ],
- "dist": {
- "shasum": "50b49cbe284cb7a48e037f36d03817af1022f070",
- "tarball": "http://registry.npmjs.org/init-package-json/-/init-package-json-1.4.0.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-1.4.0.tgz"
+ "homepage": "https://github.com/isaacs/init-package-json#readme",
+ "_id": "init-package-json@1.5.0",
+ "_shasum": "85b701b81463593d61da8bb66b46e352f4f2e298",
+ "_from": "init-package-json@>=1.5.0 <1.6.0"
}
diff --git a/deps/npm/node_modules/init-package-json/test/basic.js b/deps/npm/node_modules/init-package-json/test/basic.js
index 409c16978c6fcb..f07f435bcd2902 100644
--- a/deps/npm/node_modules/init-package-json/test/basic.js
+++ b/deps/npm/node_modules/init-package-json/test/basic.js
@@ -1,35 +1,33 @@
-var tap = require('tap')
+var common = require('./lib/common')
var init = require('../')
+var path = require('path')
var rimraf = require('rimraf')
+var test = require('tap').test
-tap.test('the basics', function (t) {
- var i = __dirname + '/basic.input'
- var dir = __dirname
- init(dir, i, {foo:'bar'}, function (er, data) {
+test('the basics', function (t) {
+ var i = path.join(__dirname, 'basic.input')
+ init(__dirname, i, { foo: 'bar' }, function (er, data) {
if (er) throw er
- var expect =
- { name: 'the-name',
- version: '1.2.5',
- description: 'description',
- author: 'npmbot (http://npm.im)',
- scripts: { test: 'make test' },
- main: 'main.js',
- config: { foo: 'bar' },
- package: {} }
+ var expect = {
+ name: 'the-name',
+ version: '1.2.5',
+ description: 'description',
+ author: 'npmbot (http://npm.im)',
+ scripts: { test: 'make test' },
+ main: 'main.js',
+ config: { foo: 'bar' },
+ package: {}
+ }
t.same(data, expect)
t.end()
})
- setTimeout(function () {
- process.stdin.emit('data', 'the-name\n')
- }, 50)
- setTimeout(function () {
- process.stdin.emit('data', 'description\n')
- }, 100)
- setTimeout(function () {
- process.stdin.emit('data', 'yes\n')
- }, 150)
+ common.drive([
+ 'the-name\n',
+ 'description\n',
+ 'yes\n'
+ ])
})
-tap.test('teardown', function (t) {
+test('teardown', function (t) {
rimraf(__dirname + '/package.json', t.end.bind(t))
})
diff --git a/deps/npm/node_modules/init-package-json/test/lib/common.js b/deps/npm/node_modules/init-package-json/test/lib/common.js
new file mode 100644
index 00000000000000..de45089f9b2b32
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/lib/common.js
@@ -0,0 +1,24 @@
+module.exports.drive = drive
+
+var semver = require('semver')
+
+function drive (input) {
+ var stdin = process.stdin
+ function emit (chunk, ms) {
+ setTimeout(function () {
+ stdin.emit('data', chunk)
+ }, ms)
+ }
+ if (semver.gte(process.versions.node, '0.11.0')) {
+ input.forEach(function (chunk) {
+ stdin.push(chunk)
+ })
+ } else {
+ stdin.once('readable', function () {
+ var ms = 0
+ input.forEach(function (chunk) {
+ emit(chunk, ms += 50)
+ })
+ })
+ }
+}
diff --git a/deps/npm/node_modules/init-package-json/test/license.js b/deps/npm/node_modules/init-package-json/test/license.js
new file mode 100644
index 00000000000000..87333fbf8f18ff
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/license.js
@@ -0,0 +1,38 @@
+var test = require('tap').test
+var init = require('../')
+var rimraf = require('rimraf')
+var common = require('./lib/common')
+
+test('license', function (t) {
+ init(__dirname, '', {}, function (er, data) {
+ t.ok(!er, 'should not error')
+ var wanted = {
+ name: 'the-name',
+ version: '1.0.0',
+ description: '',
+ scripts: { test: 'echo "Error: no test specified" && exit 1' },
+ license: 'Apache-2.0',
+ author: '',
+ main: 'basic.js'
+ }
+ t.same(data, wanted)
+ t.end()
+ })
+ common.drive([
+ 'the-name\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ 'Apache\n',
+ 'Apache-2.0\n',
+ 'yes\n'
+ ])
+})
+
+test('teardown', function (t) {
+ rimraf(__dirname + '/package.json', t.end.bind(t))
+})
diff --git a/deps/npm/node_modules/init-package-json/test/name-spaces.js b/deps/npm/node_modules/init-package-json/test/name-spaces.js
new file mode 100644
index 00000000000000..dee974fc8aa82f
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/name-spaces.js
@@ -0,0 +1,38 @@
+var test = require('tap').test
+var init = require('../')
+var rimraf = require('rimraf')
+var common = require('./lib/common')
+
+test('spaces', function (t) {
+ init(__dirname, '', {}, function (er, data) {
+ t.ok(!er, 'should not error')
+ var wanted = {
+ name: 'the-name',
+ version: '1.0.0',
+ description: '',
+ scripts: { test: 'echo "Error: no test specified" && exit 1' },
+ license: 'ISC',
+ author: '',
+ main: 'basic.js'
+ }
+ t.same(data, wanted)
+ t.end()
+ })
+ common.drive([
+ 'the name\n',
+ 'the-name\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ 'yes\n'
+ ])
+})
+
+test('teardown', function (t) {
+ rimraf(__dirname + '/package.json', t.end.bind(t))
+})
diff --git a/deps/npm/node_modules/init-package-json/test/name-uppercase.js b/deps/npm/node_modules/init-package-json/test/name-uppercase.js
new file mode 100644
index 00000000000000..ddedc30e524aef
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/name-uppercase.js
@@ -0,0 +1,38 @@
+var test = require('tap').test
+var init = require('../')
+var rimraf = require('rimraf')
+var common = require('./lib/common')
+
+test('uppercase', function (t) {
+ init(__dirname, '', {}, function (er, data) {
+ t.ok(!er, 'should not error')
+ var wanted = {
+ name: 'the-name',
+ version: '1.0.0',
+ description: '',
+ scripts: { test: 'echo "Error: no test specified" && exit 1' },
+ license: 'ISC',
+ author: '',
+ main: 'basic.js'
+ }
+ t.same(data, wanted)
+ t.end()
+ })
+ common.drive([
+ 'THE-NAME\n',
+ 'the-name\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ '\n',
+ 'yes\n'
+ ])
+})
+
+test('teardown', function (t) {
+ rimraf(__dirname + '/package.json', t.end.bind(t))
+})
diff --git a/deps/npm/node_modules/init-package-json/test/npm-defaults.js b/deps/npm/node_modules/init-package-json/test/npm-defaults.js
index 666e9a1c49661c..292b9623ae96f0 100644
--- a/deps/npm/node_modules/init-package-json/test/npm-defaults.js
+++ b/deps/npm/node_modules/init-package-json/test/npm-defaults.js
@@ -1,109 +1,109 @@
-var test = require("tap").test
-var rimraf = require("rimraf")
-var resolve = require("path").resolve
+var test = require('tap').test
+var rimraf = require('rimraf')
+var resolve = require('path').resolve
-var npm = require("npm")
-var init = require("../")
+var npm = require('npm')
+var init = require('../')
var EXPECTED = {
- name : "test",
- version : "3.1.4",
- description : "",
- main : "basic.js",
- scripts : {
- test : 'echo "Error: no test specified" && exit 1'
+ name: 'test',
+ version: '3.1.4',
+ description: '',
+ main: 'basic.js',
+ scripts: {
+ test: 'echo "Error: no test specified" && exit 1'
},
- keywords : [],
- author : "npmbot (http://npm.im/)",
- license : "WTFPL"
+ keywords: [],
+ author: 'npmbot (http://npm.im/)',
+ license: 'WTFPL'
}
-test("npm configuration values pulled from environment", function (t) {
+test('npm configuration values pulled from environment', function (t) {
/*eslint camelcase:0 */
- process.env.npm_config_yes = "yes"
+ process.env.npm_config_yes = 'yes'
- process.env.npm_config_init_author_name = "npmbot"
- process.env.npm_config_init_author_email = "n@p.m"
- process.env.npm_config_init_author_url = "http://npm.im"
+ process.env.npm_config_init_author_name = 'npmbot'
+ process.env.npm_config_init_author_email = 'n@p.m'
+ process.env.npm_config_init_author_url = 'http://npm.im'
process.env.npm_config_init_license = EXPECTED.license
process.env.npm_config_init_version = EXPECTED.version
npm.load({}, function (err) {
- t.ifError(err, "npm loaded successfully")
+ t.ifError(err, 'npm loaded successfully')
// clear out dotted names from test environment
- npm.config.del("init.author.name")
- npm.config.del("init.author.email")
- npm.config.del("init.author.url")
+ npm.config.del('init.author.name')
+ npm.config.del('init.author.email')
+ npm.config.del('init.author.url')
// the following have npm defaults, and need to be explicitly overridden
- npm.config.set("init.license", "")
- npm.config.set("init.version", "")
+ npm.config.set('init.license', '')
+ npm.config.set('init.version', '')
process.chdir(resolve(__dirname))
init(__dirname, __dirname, npm.config, function (er, data) {
- t.ifError(err, "init ran successfully")
+ t.ifError(err, 'init ran successfully')
- t.same(data, EXPECTED, "got the package data from the environment")
+ t.same(data, EXPECTED, 'got the package data from the environment')
t.end()
})
})
})
-test("npm configuration values pulled from dotted config", function (t) {
+test('npm configuration values pulled from dotted config', function (t) {
/*eslint camelcase:0 */
var config = {
- yes : "yes",
+ yes: 'yes',
- "init.author.name" : "npmbot",
- "init.author.email" : "n@p.m",
- "init.author.url" : "http://npm.im",
+ 'init.author.name': 'npmbot',
+ 'init.author.email': 'n@p.m',
+ 'init.author.url': 'http://npm.im',
- "init.license" : EXPECTED.license,
- "init.version" : EXPECTED.version
+ 'init.license': EXPECTED.license,
+ 'init.version': EXPECTED.version
}
npm.load(config, function (err) {
- t.ifError(err, "npm loaded successfully")
+ t.ifError(err, 'npm loaded successfully')
process.chdir(resolve(__dirname))
init(__dirname, __dirname, npm.config, function (er, data) {
- t.ifError(err, "init ran successfully")
+ t.ifError(err, 'init ran successfully')
- t.same(data, EXPECTED, "got the package data from the config")
+ t.same(data, EXPECTED, 'got the package data from the config')
t.end()
})
})
})
-test("npm configuration values pulled from dashed config", function (t) {
+test('npm configuration values pulled from dashed config', function (t) {
/*eslint camelcase:0 */
var config = {
- yes : "yes",
+ yes: 'yes',
- "init-author-name" : "npmbot",
- "init-author-email" : "n@p.m",
- "init-author-url" : "http://npm.im",
+ 'init-author-name': 'npmbot',
+ 'init-author-email': 'n@p.m',
+ 'init-author-url': 'http://npm.im',
- "init-license" : EXPECTED.license,
- "init-version" : EXPECTED.version
+ 'init-license': EXPECTED.license,
+ 'init-version': EXPECTED.version
}
npm.load(config, function (err) {
- t.ifError(err, "npm loaded successfully")
+ t.ifError(err, 'npm loaded successfully')
process.chdir(resolve(__dirname))
init(__dirname, __dirname, npm.config, function (er, data) {
- t.ifError(err, "init ran successfully")
+ t.ifError(err, 'init ran successfully')
- t.same(data, EXPECTED, "got the package data from the config")
+ t.same(data, EXPECTED, 'got the package data from the config')
t.end()
})
})
})
-test("cleanup", function (t) {
- rimraf.sync(resolve(__dirname, "package.json"))
- t.pass("cleaned up")
+test('cleanup', function (t) {
+ rimraf.sync(resolve(__dirname, 'package.json'))
+ t.pass('cleaned up')
t.end()
})
diff --git a/deps/npm/node_modules/init-package-json/test/scope-in-config.js b/deps/npm/node_modules/init-package-json/test/scope-in-config.js
new file mode 100644
index 00000000000000..1fa83d9c13a2d6
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/scope-in-config.js
@@ -0,0 +1,47 @@
+var fs = require('fs')
+var path = require('path')
+
+var rimraf = require('rimraf')
+var tap = require('tap')
+
+var init = require('../')
+
+var EXPECT = {
+ name: '@scoped/test',
+ version: '1.0.0',
+ description: '',
+ author: '',
+ scripts: { test: 'echo \"Error: no test specified\" && exit 1' },
+ main: 'basic.js',
+ keywords: [],
+ license: 'ISC'
+}
+
+tap.test('--yes with scope', function (t) {
+ init(__dirname, __dirname, { yes: 'yes', scope: '@scoped' }, function (er, data) {
+ if (er) throw er
+
+ t.same(EXPECT, data)
+ t.end()
+ })
+})
+
+var json = {
+ name: '@already/scoped',
+ version: '1.0.0'
+}
+
+tap.test('with existing package.json', function (t) {
+ fs.writeFileSync(path.join(__dirname, 'package.json'), JSON.stringify(json, null, 2))
+ init(__dirname, __dirname, { yes: 'yes', scope: '@still' }, function (er, data) {
+ if (er) throw er
+
+ t.equal(data.name, '@still/scoped', 'new scope is added, basic name is kept')
+ t.end()
+ })
+})
+
+tap.test('teardown', function (t) {
+ rimraf.sync(path.join(__dirname, 'package.json'))
+ t.end()
+})
diff --git a/deps/npm/node_modules/init-package-json/test/scope.js b/deps/npm/node_modules/init-package-json/test/scope.js
index 05968fcc052bf5..971916f2d8603b 100644
--- a/deps/npm/node_modules/init-package-json/test/scope.js
+++ b/deps/npm/node_modules/init-package-json/test/scope.js
@@ -1,38 +1,38 @@
-var tap = require("tap")
-var init = require("../")
-var rimraf = require("rimraf")
+var tap = require('tap')
+var init = require('../')
+var rimraf = require('rimraf')
var EXPECT = {
- name: "@foo/test"
- , version: "1.2.5"
- , description: "description"
- , author: "npmbot (http://npm.im)"
- , scripts: { test: "make test" }
- , main: "main.js"
- , config: { scope: "@foo" }
- , package: {}
+ name: '@foo/test',
+ version: '1.2.5',
+ description: 'description',
+ author: 'npmbot (http://npm.im)',
+ scripts: { test: 'make test' },
+ main: 'main.js',
+ config: { scope: '@foo' },
+ package: {}
}
-tap.test("the scope", function (t) {
- var i = __dirname + "/basic.input"
+tap.test('the scope', function (t) {
+ var i = __dirname + '/basic.input'
var dir = __dirname
- init(dir, i, {scope: "@foo"}, function (er, data) {
+ init(dir, i, {scope: '@foo'}, function (er, data) {
if (er) throw er
t.same(EXPECT, data)
t.end()
})
setTimeout(function () {
- process.stdin.emit("data", "@foo/test\n")
+ process.stdin.emit('data', '@foo/test\n')
}, 50)
setTimeout(function () {
- process.stdin.emit("data", "description\n")
+ process.stdin.emit('data', 'description\n')
}, 100)
setTimeout(function () {
- process.stdin.emit("data", "yes\n")
+ process.stdin.emit('data', 'yes\n')
}, 150)
})
-tap.test("teardown", function (t) {
- rimraf(__dirname + "/package.json", t.end.bind(t))
+tap.test('teardown', function (t) {
+ rimraf(__dirname + '/package.json', t.end.bind(t))
})
diff --git a/deps/npm/node_modules/init-package-json/test/yes-defaults.js b/deps/npm/node_modules/init-package-json/test/yes-defaults.js
new file mode 100644
index 00000000000000..747ab38fdd21f9
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/yes-defaults.js
@@ -0,0 +1,27 @@
+var tap = require('tap')
+var init = require('../')
+var rimraf = require('rimraf')
+
+var EXPECT = {
+ name: 'test',
+ version: '1.0.0',
+ description: '',
+ author: '',
+ scripts: { test: 'echo "Error: no test specified" && exit 1' },
+ main: 'basic.js',
+ keywords: [],
+ license: 'ISC'
+}
+
+tap.test('--yes defaults', function (t) {
+ init(__dirname, __dirname, {yes: 'yes'}, function (er, data) {
+ if (er) throw er
+
+ t.same(EXPECT, data, 'used the default data')
+ t.end()
+ })
+})
+
+tap.test('teardown', function (t) {
+ rimraf(__dirname + '/package.json', t.end.bind(t))
+})
diff --git a/deps/npm/node_modules/lru-cache/README.md b/deps/npm/node_modules/lru-cache/README.md
index 03ee0f98502374..82a6dabd5313ca 100644
--- a/deps/npm/node_modules/lru-cache/README.md
+++ b/deps/npm/node_modules/lru-cache/README.md
@@ -54,11 +54,12 @@ away.
## API
-* `set(key, value)`
+* `set(key, value, max)`
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
- They do what you think.
+ They do what you think. `max` is optional and overrides the
+ cache `max` option if provided.
* `peek(key)`
@@ -95,3 +96,14 @@ away.
* `values()`
Return an array of the values in the cache.
+
+* `length()`
+
+ Return total length of objects in cache taking into account
+ `length` options function.
+
+* `itemCount()`
+
+ Return total quantity of objects currently in cache. Note, that
+ `stale` (see options) items are returned as part of this item
+ count.
diff --git a/deps/npm/node_modules/lru-cache/lib/lru-cache.js b/deps/npm/node_modules/lru-cache/lib/lru-cache.js
index 7d3b04f9ddc1a2..d66e7a2382f176 100644
--- a/deps/npm/node_modules/lru-cache/lib/lru-cache.js
+++ b/deps/npm/node_modules/lru-cache/lib/lru-cache.js
@@ -86,11 +86,13 @@ Object.defineProperty(LRUCache.prototype, "itemCount",
LRUCache.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
- var i = 0;
- for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) {
+ var i = 0
+ var itemCount = this._itemCount
+
+ for (var k = this._mru - 1; k >= 0 && i < itemCount; k--) if (this._lruList[k]) {
i++
var hit = this._lruList[k]
- if (this._maxAge && (Date.now() - hit.now > this._maxAge)) {
+ if (isStale(this, hit)) {
del(this, hit)
if (!this._allowStale) hit = undefined
}
@@ -145,19 +147,24 @@ LRUCache.prototype.dumpLru = function () {
return this._lruList
}
-LRUCache.prototype.set = function (key, value) {
+LRUCache.prototype.set = function (key, value, maxAge) {
+ maxAge = maxAge || this._maxAge
+ var now = maxAge ? Date.now() : 0
+
if (hOP(this._cache, key)) {
// dispose of the old one before overwriting
- if (this._dispose) this._dispose(key, this._cache[key].value)
- if (this._maxAge) this._cache[key].now = Date.now()
+ if (this._dispose)
+ this._dispose(key, this._cache[key].value)
+
+ this._cache[key].now = now
+ this._cache[key].maxAge = maxAge
this._cache[key].value = value
this.get(key)
return true
}
var len = this._lengthCalculator(value)
- var age = this._maxAge ? Date.now() : 0
- var hit = new Entry(key, value, this._mru++, len, age)
+ var hit = new Entry(key, value, this._mru++, len, now, maxAge)
// oversized objects fall out of cache automatically.
if (hit.length > this._max) {
@@ -169,14 +176,16 @@ LRUCache.prototype.set = function (key, value) {
this._lruList[hit.lu] = this._cache[key] = hit
this._itemCount ++
- if (this._length > this._max) trim(this)
+ if (this._length > this._max)
+ trim(this)
+
return true
}
LRUCache.prototype.has = function (key) {
if (!hOP(this._cache, key)) return false
var hit = this._cache[key]
- if (this._maxAge && (Date.now() - hit.now > this._maxAge)) {
+ if (isStale(this, hit)) {
return false
}
return true
@@ -203,7 +212,7 @@ LRUCache.prototype.del = function (key) {
function get (self, key, doUse) {
var hit = self._cache[key]
if (hit) {
- if (self._maxAge && (Date.now() - hit.now > self._maxAge)) {
+ if (isStale(self, hit)) {
del(self, hit)
if (!self._allowStale) hit = undefined
} else {
@@ -214,10 +223,21 @@ function get (self, key, doUse) {
return hit
}
+function isStale(self, hit) {
+ if (!hit || (!hit.maxAge && !self._maxAge)) return false
+ var stale = false;
+ var diff = Date.now() - hit.now
+ if (hit.maxAge) {
+ stale = diff > hit.maxAge
+ } else {
+ stale = self._maxAge && (diff > self._maxAge)
+ }
+ return stale;
+}
+
function use (self, hit) {
shiftLU(self, hit)
hit.lu = self._mru ++
- if (self._maxAge) hit.now = Date.now()
self._lruList[hit.lu] = hit
}
@@ -242,12 +262,13 @@ function del (self, hit) {
}
// classy, since V8 prefers predictable objects.
-function Entry (key, value, lu, length, now) {
+function Entry (key, value, lu, length, now, maxAge) {
this.key = key
this.value = value
this.lu = lu
this.length = length
this.now = now
+ if (maxAge) this.maxAge = maxAge
}
})()
diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json
index 9e2d81f48f9996..1e8b1a754a6162 100644
--- a/deps/npm/node_modules/lru-cache/package.json
+++ b/deps/npm/node_modules/lru-cache/package.json
@@ -1,11 +1,16 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "2.5.2",
+ "version": "2.6.3",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
+ "keywords": [
+ "mru",
+ "lru",
+ "cache"
+ ],
"scripts": {
"test": "tap test --gc"
},
@@ -22,19 +27,23 @@
"type": "MIT",
"url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE"
},
- "gitHead": "ec01cc48ac06ee07b2b56a219d5aa931f899b21b",
+ "gitHead": "0654ce0b1f2d676a0cfc1f3001a097af9e7b0dfb",
"bugs": {
"url": "https://github.com/isaacs/node-lru-cache/issues"
},
- "homepage": "https://github.com/isaacs/node-lru-cache",
- "_id": "lru-cache@2.5.2",
- "_shasum": "1fddad938aae1263ce138680be1b3f591c0ab41c",
- "_from": "lru-cache@>=2.5.2 <2.6.0",
- "_npmVersion": "2.7.6",
- "_nodeVersion": "1.4.2",
+ "homepage": "https://github.com/isaacs/node-lru-cache#readme",
+ "_id": "lru-cache@2.6.3",
+ "_shasum": "51ccd0b4fc0c843587d7a5709ce4d3b7629bedc5",
+ "_from": "lru-cache@>=2.6.3 <2.7.0",
+ "_npmVersion": "2.10.0",
+ "_nodeVersion": "2.0.1",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "51ccd0b4fc0c843587d7a5709ce4d3b7629bedc5",
+ "tarball": "http://registry.npmjs.org/lru-cache/-/lru-cache-2.6.3.tgz"
},
"maintainers": [
{
@@ -42,11 +51,6 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "1fddad938aae1263ce138680be1b3f591c0ab41c",
- "tarball": "http://registry.npmjs.org/lru-cache/-/lru-cache-2.5.2.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.5.2.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.6.3.tgz"
}
diff --git a/deps/npm/node_modules/lru-cache/t.js b/deps/npm/node_modules/lru-cache/t.js
deleted file mode 100644
index 08e51809a0afc1..00000000000000
--- a/deps/npm/node_modules/lru-cache/t.js
+++ /dev/null
@@ -1,25 +0,0 @@
-var LRU = require("./");
-
-var cache = LRU( {
- max: 1,
- maxAge: 1000
-} );
-
-cache.set( "1234", 1 );
-
-setTimeout( function() {
- cache.set( "1234", 2 );
- console.log( "testing after 5s: " + cache.get( "1234" ) );
-}, 500 );
-
-setTimeout( function() {
- console.log( "testing after 9s: " + cache.get( "1234" ) );
-}, 900 );
-
-setTimeout( function() {
- console.log( "testing after 11s: " + cache.get( "1234" ) );
-}, 1100 );
-
-setTimeout( function() {
- console.log( "testing after 16s: " + cache.get( "1234" ) );
-}, 1600 );
diff --git a/deps/npm/node_modules/lru-cache/test/basic.js b/deps/npm/node_modules/lru-cache/test/basic.js
index 799e72dfb1e688..949113e9ce8bd7 100644
--- a/deps/npm/node_modules/lru-cache/test/basic.js
+++ b/deps/npm/node_modules/lru-cache/test/basic.js
@@ -228,6 +228,32 @@ test("drop the old items", function(t) {
}, 155)
})
+test("individual item can have it's own maxAge", function(t) {
+ var cache = new LRU({
+ max: 5,
+ maxAge: 50
+ })
+
+ cache.set("a", "A", 20)
+ setTimeout(function () {
+ t.notOk(cache.get("a"))
+ t.end()
+ }, 25)
+})
+
+test("individual item can have it's own maxAge > cache's", function(t) {
+ var cache = new LRU({
+ max: 5,
+ maxAge: 20
+ })
+
+ cache.set("a", "A", 50)
+ setTimeout(function () {
+ t.equal(cache.get("a"), "A")
+ t.end()
+ }, 25)
+})
+
test("disposal function", function(t) {
var disposed = false
var cache = new LRU({
diff --git a/deps/npm/node_modules/lru-cache/test/foreach.js b/deps/npm/node_modules/lru-cache/test/foreach.js
index eefb80d9d15644..429ebc12426b72 100644
--- a/deps/npm/node_modules/lru-cache/test/foreach.js
+++ b/deps/npm/node_modules/lru-cache/test/foreach.js
@@ -28,6 +28,7 @@ test('forEach', function (t) {
t.equal(key, j.toString())
t.equal(val, j.toString(2))
})
+ t.equal(i, order.length);
t.end()
})
@@ -50,3 +51,71 @@ test('keys() and values()', function (t) {
t.end()
})
+
+test('all entries are iterated over', function(t) {
+ var l = new LRU(5)
+ for (var i = 0; i < 10; i ++) {
+ l.set(i.toString(), i.toString(2))
+ }
+
+ var i = 0
+ l.forEach(function (val, key, cache) {
+ if (i > 0) {
+ cache.del(key)
+ }
+ i += 1
+ })
+
+ t.equal(i, 5)
+ t.equal(l.keys().length, 1)
+
+ t.end()
+})
+
+test('all stale entries are removed', function(t) {
+ var l = new LRU({ max: 5, maxAge: -5, stale: true })
+ for (var i = 0; i < 10; i ++) {
+ l.set(i.toString(), i.toString(2))
+ }
+
+ var i = 0
+ l.forEach(function () {
+ i += 1
+ })
+
+ t.equal(i, 5)
+ t.equal(l.keys().length, 0)
+
+ t.end()
+})
+
+test('expires', function (t) {
+ var l = new LRU({
+ max: 10,
+ maxAge: 50
+ })
+ for (var i = 0; i < 10; i++) {
+ l.set(i.toString(), i.toString(2), ((i % 2) ? 25 : undefined))
+ }
+
+ var i = 0
+ var order = [ 8, 6, 4, 2, 0 ]
+ setTimeout(function () {
+ l.forEach(function (val, key, cache) {
+ var j = order[i++]
+ t.equal(cache, l)
+ t.equal(key, j.toString())
+ t.equal(val, j.toString(2))
+ })
+ t.equal(i, order.length);
+ t.end()
+
+ setTimeout(function () {
+ var count = 0;
+ l.forEach(function (val, key, cache) { count++; })
+ t.equal(0, count);
+ t.end()
+ }, 25)
+
+ }, 26)
+})
diff --git a/deps/npm/node_modules/lru-cache/test/timeout.js b/deps/npm/node_modules/lru-cache/test/timeout.js
deleted file mode 100644
index 5dce62a9e3c595..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/timeout.js
+++ /dev/null
@@ -1,21 +0,0 @@
-var test = require("tap").test
-var LRU = require("../")
-
-var cache = LRU( {
- max: 1,
- maxAge: 500
-} );
-
-test('set the key', function (t) {
- cache.set( "1234", 1 );
- t.end()
-})
-
-for (var i = 0; i < 10; i ++) {
- test('get after ' + i + '00ms', function (t) {
- setTimeout(function () {
- t.equal(cache.get('1234'), 1)
- t.end()
- }, 100)
- })
-}
diff --git a/deps/npm/node_modules/minimatch/browser.js b/deps/npm/node_modules/minimatch/browser.js
index cf58a3f60cd850..967b45c0d67eba 100644
--- a/deps/npm/node_modules/minimatch/browser.js
+++ b/deps/npm/node_modules/minimatch/browser.js
@@ -2,35 +2,36 @@
module.exports = minimatch
minimatch.Minimatch = Minimatch
-var isWindows = false
-if (typeof process !== 'undefined' && process.platform === 'win32')
- isWindows = true
+var path = { sep: '/' }
+try {
+ path = require('path')
+} catch (er) {}
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
- , expand = require("brace-expansion")
+var expand = require('brace-expansion')
- // any single thing other than /
- // don't need to escape / when using new RegExp()
- , qmark = "[^/]"
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
- // * => any number of characters
- , star = qmark + "*?"
+// * => any number of characters
+var star = qmark + '*?'
- // ** when dots are allowed. Anything goes, except .. and .
- // not (^ or / followed by one or two dots followed by $ or /),
- // followed by anything, any number of times.
- , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?"
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
- // not a ^ or / followed by a dot,
- // followed by anything, any number of times.
- , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?"
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
- // characters that need to be escaped in RegExp.
- , reSpecials = charSet("().*{}+?[]^$\\!")
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
// "abc" -> { a:true, b:true, c:true }
function charSet (s) {
- return s.split("").reduce(function (set, c) {
+ return s.split('').reduce(function (set, c) {
set[c] = true
return set
}, {})
@@ -81,21 +82,20 @@ Minimatch.defaults = function (def) {
return minimatch.defaults(def).Minimatch
}
-
function minimatch (p, pattern, options) {
- if (typeof pattern !== "string") {
- throw new TypeError("glob pattern string required")
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
}
if (!options) options = {}
// shortcut: comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === "#") {
+ if (!options.nocomment && pattern.charAt(0) === '#') {
return false
}
// "" only matches ""
- if (pattern.trim() === "") return p === ""
+ if (pattern.trim() === '') return p === ''
return new Minimatch(pattern, options).match(p)
}
@@ -105,16 +105,17 @@ function Minimatch (pattern, options) {
return new Minimatch(pattern, options)
}
- if (typeof pattern !== "string") {
- throw new TypeError("glob pattern string required")
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
}
if (!options) options = {}
pattern = pattern.trim()
// windows support: need to use /, not \
- if (isWindows)
- pattern = pattern.split("\\").join("/")
+ if (path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
+ }
this.options = options
this.set = []
@@ -128,7 +129,7 @@ function Minimatch (pattern, options) {
this.make()
}
-Minimatch.prototype.debug = function() {}
+Minimatch.prototype.debug = function () {}
Minimatch.prototype.make = make
function make () {
@@ -139,7 +140,7 @@ function make () {
var options = this.options
// empty patterns and comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === "#") {
+ if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true
return
}
@@ -178,7 +179,7 @@ function make () {
// filter out everything that didn't compile properly.
set = set.filter(function (s) {
- return -1 === s.indexOf(false)
+ return s.indexOf(false) === -1
})
this.debug(this.pattern, set)
@@ -189,17 +190,17 @@ function make () {
Minimatch.prototype.parseNegate = parseNegate
function parseNegate () {
var pattern = this.pattern
- , negate = false
- , options = this.options
- , negateOffset = 0
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
if (options.nonegate) return
- for ( var i = 0, l = pattern.length
- ; i < l && pattern.charAt(i) === "!"
- ; i ++) {
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
negate = !negate
- negateOffset ++
+ negateOffset++
}
if (negateOffset) this.pattern = pattern.substr(negateOffset)
@@ -224,21 +225,22 @@ Minimatch.prototype.braceExpand = braceExpand
function braceExpand (pattern, options) {
if (!options) {
- if (this instanceof Minimatch)
+ if (this instanceof Minimatch) {
options = this.options
- else
+ } else {
options = {}
+ }
}
- pattern = typeof pattern === "undefined"
+ pattern = typeof pattern === 'undefined'
? this.pattern : pattern
- if (typeof pattern === "undefined") {
- throw new Error("undefined pattern")
+ if (typeof pattern === 'undefined') {
+ throw new Error('undefined pattern')
}
if (options.nobrace ||
- !pattern.match(/\{.*\}/)) {
+ !pattern.match(/\{.*\}/)) {
// shortcut. no need to expand.
return [pattern]
}
@@ -263,87 +265,86 @@ function parse (pattern, isSub) {
var options = this.options
// shortcuts
- if (!options.noglobstar && pattern === "**") return GLOBSTAR
- if (pattern === "") return ""
-
- var re = ""
- , hasMagic = !!options.nocase
- , escaping = false
- // ? => one single character
- , patternListStack = []
- , plType
- , stateChar
- , inClass = false
- , reClassStart = -1
- , classStart = -1
- // . and .. never match anything that doesn't start with .,
- // even when options.dot is set.
- , patternStart = pattern.charAt(0) === "." ? "" // anything
- // not (start or / followed by . or .. followed by / or end)
- : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))"
- : "(?!\\.)"
- , self = this
+ if (!options.noglobstar && pattern === '**') return GLOBSTAR
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var plType
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
function clearStateChar () {
if (stateChar) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch (stateChar) {
- case "*":
+ case '*':
re += star
hasMagic = true
- break
- case "?":
+ break
+ case '?':
re += qmark
hasMagic = true
- break
+ break
default:
- re += "\\"+stateChar
- break
+ re += '\\' + stateChar
+ break
}
self.debug('clearStateChar %j %j', stateChar, re)
stateChar = false
}
}
- for ( var i = 0, len = pattern.length, c
- ; (i < len) && (c = pattern.charAt(i))
- ; i ++ ) {
-
- this.debug("%s\t%s %s %j", pattern, i, re, c)
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
// skip over any that are escaped.
if (escaping && reSpecials[c]) {
- re += "\\" + c
+ re += '\\' + c
escaping = false
continue
}
- SWITCH: switch (c) {
- case "/":
+ switch (c) {
+ case '/':
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
- case "\\":
+ case '\\':
clearStateChar()
escaping = true
- continue
+ continue
// the various stateChar values
// for the "extglob" stuff.
- case "?":
- case "*":
- case "+":
- case "@":
- case "!":
- this.debug("%s\t%s %s %j <-- stateChar", pattern, i, re, c)
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if (inClass) {
this.debug(' in class')
- if (c === "!" && i === classStart + 1) c = "^"
+ if (c === '!' && i === classStart + 1) c = '^'
re += c
continue
}
@@ -358,70 +359,70 @@ function parse (pattern, isSub) {
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if (options.noext) clearStateChar()
- continue
+ continue
- case "(":
+ case '(':
if (inClass) {
- re += "("
+ re += '('
continue
}
if (!stateChar) {
- re += "\\("
+ re += '\\('
continue
}
plType = stateChar
- patternListStack.push({ type: plType
- , start: i - 1
- , reStart: re.length })
+ patternListStack.push({ type: plType, start: i - 1, reStart: re.length })
// negation is (?:(?!js)[^/]*)
- re += stateChar === "!" ? "(?:(?!" : "(?:"
+ re += stateChar === '!' ? '(?:(?!' : '(?:'
this.debug('plType %j %j', stateChar, re)
stateChar = false
- continue
+ continue
- case ")":
+ case ')':
if (inClass || !patternListStack.length) {
- re += "\\)"
+ re += '\\)'
continue
}
clearStateChar()
hasMagic = true
- re += ")"
+ re += ')'
plType = patternListStack.pop().type
// negation is (?:(?!js)[^/]*)
// The others are (?:)
switch (plType) {
- case "!":
- re += "[^/]*?)"
+ case '!':
+ re += '[^/]*?)'
break
- case "?":
- case "+":
- case "*": re += plType
- case "@": break // the default anyway
+ case '?':
+ case '+':
+ case '*':
+ re += plType
+ break
+ case '@': break // the default anyway
}
- continue
+ continue
- case "|":
+ case '|':
if (inClass || !patternListStack.length || escaping) {
- re += "\\|"
+ re += '\\|'
escaping = false
continue
}
clearStateChar()
- re += "|"
- continue
+ re += '|'
+ continue
// these are mostly the same in regexp and glob
- case "[":
+ case '[':
// swallow any state-tracking char before the [
clearStateChar()
if (inClass) {
- re += "\\" + c
+ re += '\\' + c
continue
}
@@ -429,15 +430,15 @@ function parse (pattern, isSub) {
classStart = i
reClassStart = re.length
re += c
- continue
+ continue
- case "]":
+ case ']':
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if (i === classStart + 1 || !inClass) {
- re += "\\" + c
+ re += '\\' + c
escaping = false
continue
}
@@ -454,11 +455,11 @@ function parse (pattern, isSub) {
// to do safely. For now, this is safe and works.
var cs = pattern.substring(classStart + 1, i)
try {
- new RegExp('[' + cs + ']')
+ RegExp('[' + cs + ']')
} catch (er) {
// not a valid class!
var sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + "\\[" + sp[0] + '\\]'
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
hasMagic = hasMagic || sp[1]
inClass = false
continue
@@ -469,7 +470,7 @@ function parse (pattern, isSub) {
hasMagic = true
inClass = false
re += c
- continue
+ continue
default:
// swallow any state char that wasn't consumed
@@ -479,8 +480,8 @@ function parse (pattern, isSub) {
// no need
escaping = false
} else if (reSpecials[c]
- && !(c === "^" && inClass)) {
- re += "\\"
+ && !(c === '^' && inClass)) {
+ re += '\\'
}
re += c
@@ -488,7 +489,6 @@ function parse (pattern, isSub) {
} // switch
} // for
-
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if (inClass) {
@@ -496,9 +496,9 @@ function parse (pattern, isSub) {
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
- var cs = pattern.substr(classStart + 1)
- , sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + "\\[" + sp[0]
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
hasMagic = hasMagic || sp[1]
}
@@ -508,14 +508,13 @@ function parse (pattern, isSub) {
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
- var pl
- while (pl = patternListStack.pop()) {
+ for (var pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
var tail = re.slice(pl.reStart + 3)
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
if (!$2) {
// the | isn't already escaped, so escape it.
- $2 = "\\"
+ $2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
@@ -524,46 +523,44 @@ function parse (pattern, isSub) {
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
- return $1 + $1 + $2 + "|"
+ return $1 + $1 + $2 + '|'
})
- this.debug("tail=%j\n %s", tail, tail)
- var t = pl.type === "*" ? star
- : pl.type === "?" ? qmark
- : "\\" + pl.type
+ this.debug('tail=%j\n %s', tail, tail)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
hasMagic = true
- re = re.slice(0, pl.reStart)
- + t + "\\("
- + tail
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar()
if (escaping) {
// trailing \\
- re += "\\\\"
+ re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch (re.charAt(0)) {
- case ".":
- case "[":
- case "(": addPatternStart = true
+ case '.':
+ case '[':
+ case '(': addPatternStart = true
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
- if (re !== "" && hasMagic) re = "(?=.)" + re
+ if (re !== '' && hasMagic) re = '(?=.)' + re
if (addPatternStart) re = patternStart + re
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
- return [ re, hasMagic ]
+ return [re, hasMagic]
}
// skip the regexp for non-magical patterns
@@ -573,8 +570,8 @@ function parse (pattern, isSub) {
return globUnescape(pattern)
}
- var flags = options.nocase ? "i" : ""
- , regExp = new RegExp("^" + re + "$", flags)
+ var flags = options.nocase ? 'i' : ''
+ var regExp = new RegExp('^' + re + '$', flags)
regExp._glob = pattern
regExp._src = re
@@ -598,34 +595,38 @@ function makeRe () {
// when you just want to work with a regex.
var set = this.set
- if (!set.length) return this.regexp = false
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
var options = this.options
var twoStar = options.noglobstar ? star
- : options.dot ? twoStarDot
- : twoStarNoDot
- , flags = options.nocase ? "i" : ""
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
var re = set.map(function (pattern) {
return pattern.map(function (p) {
return (p === GLOBSTAR) ? twoStar
- : (typeof p === "string") ? regExpEscape(p)
- : p._src
- }).join("\\\/")
- }).join("|")
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
// must match entire pattern
// ending in a * or ** will make it less strict.
- re = "^(?:" + re + ")$"
+ re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
- if (this.negate) re = "^(?!" + re + ").*$"
+ if (this.negate) re = '^(?!' + re + ').*$'
try {
- return this.regexp = new RegExp(re, flags)
+ this.regexp = new RegExp(re, flags)
} catch (ex) {
- return this.regexp = false
+ this.regexp = false
}
+ return this.regexp
}
minimatch.match = function (list, pattern, options) {
@@ -642,23 +643,24 @@ minimatch.match = function (list, pattern, options) {
Minimatch.prototype.match = match
function match (f, partial) {
- this.debug("match", f, this.pattern)
+ this.debug('match', f, this.pattern)
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) return false
- if (this.empty) return f === ""
+ if (this.empty) return f === ''
- if (f === "/" && partial) return true
+ if (f === '/' && partial) return true
var options = this.options
// windows: need to use /, not \
- if (isWindows)
- f = f.split("\\").join("/")
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
// treat the test path as a set of pathparts.
f = f.split(slashSplit)
- this.debug(this.pattern, "split", f)
+ this.debug(this.pattern, 'split', f)
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
@@ -666,17 +668,19 @@ function match (f, partial) {
// Either way, return on the first hit.
var set = this.set
- this.debug(this.pattern, "set", set)
+ this.debug(this.pattern, 'set', set)
// Find the basename of the path by looking for the last non-empty segment
- var filename;
- for (var i = f.length - 1; i >= 0; i--) {
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
filename = f[i]
if (filename) break
}
- for (var i = 0, l = set.length; i < l; i ++) {
- var pattern = set[i], file = f
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
if (options.matchBase && pattern.length === 1) {
file = [filename]
}
@@ -701,23 +705,20 @@ function match (f, partial) {
Minimatch.prototype.matchOne = function (file, pattern, partial) {
var options = this.options
- this.debug("matchOne",
- { "this": this
- , file: file
- , pattern: pattern })
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
- this.debug("matchOne", file.length, pattern.length)
+ this.debug('matchOne', file.length, pattern.length)
- for ( var fi = 0
- , pi = 0
- , fl = file.length
- , pl = pattern.length
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
; (fi < fl) && (pi < pl)
- ; fi ++, pi ++ ) {
-
- this.debug("matchOne loop")
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
var p = pattern[pi]
- , f = file[fi]
+ var f = file[fi]
this.debug(pattern, p, f)
@@ -751,7 +752,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
- , pr = pi + 1
+ var pr = pi + 1
if (pr === pl) {
this.debug('** at the end')
// a ** at the end will just swallow the rest.
@@ -760,19 +761,18 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
- for ( ; fi < fl; fi ++) {
- if (file[fi] === "." || file[fi] === ".." ||
- (!options.dot && file[fi].charAt(0) === ".")) return false
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
- WHILE: while (fr < fl) {
+ while (fr < fl) {
var swallowee = file[fr]
- this.debug('\nglobstar while',
- file, fr, pattern, pr, swallowee)
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
@@ -782,23 +782,24 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
- if (swallowee === "." || swallowee === ".." ||
- (!options.dot && swallowee.charAt(0) === ".")) {
- this.debug("dot detected!", file, fr, pattern, pr)
- break WHILE
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue')
- fr ++
+ fr++
}
}
+
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if (partial) {
// ran out of file
- this.debug("\n>>> no match, partial?", file, fr, pattern, pr)
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
if (fr === fl) return true
}
return false
@@ -808,16 +809,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
- if (typeof p === "string") {
+ if (typeof p === 'string') {
if (options.nocase) {
hit = f.toLowerCase() === p.toLowerCase()
} else {
hit = f === p
}
- this.debug("string match", p, f, hit)
+ this.debug('string match', p, f, hit)
} else {
hit = f.match(p)
- this.debug("pattern match", p, f, hit)
+ this.debug('pattern match', p, f, hit)
}
if (!hit) return false
@@ -849,26 +850,24 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
- var emptyFileEnd = (fi === fl - 1) && (file[fi] === "")
+ var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
return emptyFileEnd
}
// should be unreachable.
- throw new Error("wtf?")
+ throw new Error('wtf?')
}
-
// replace stuff like \* with *
function globUnescape (s) {
- return s.replace(/\\(.)/g, "$1")
+ return s.replace(/\\(.)/g, '$1')
}
-
function regExpEscape (s) {
- return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
}
-},{"brace-expansion":2}],2:[function(require,module,exports){
+},{"brace-expansion":2,"path":undefined}],2:[function(require,module,exports){
var concatMap = require('concat-map');
var balanced = require('balanced-match');
diff --git a/deps/npm/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/minimatch/minimatch.js
index 2bfdf62b7435a3..5e13d6d5b2e62b 100644
--- a/deps/npm/node_modules/minimatch/minimatch.js
+++ b/deps/npm/node_modules/minimatch/minimatch.js
@@ -1,35 +1,36 @@
module.exports = minimatch
minimatch.Minimatch = Minimatch
-var isWindows = false
-if (typeof process !== 'undefined' && process.platform === 'win32')
- isWindows = true
+var path = { sep: '/' }
+try {
+ path = require('path')
+} catch (er) {}
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
- , expand = require("brace-expansion")
+var expand = require('brace-expansion')
- // any single thing other than /
- // don't need to escape / when using new RegExp()
- , qmark = "[^/]"
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
- // * => any number of characters
- , star = qmark + "*?"
+// * => any number of characters
+var star = qmark + '*?'
- // ** when dots are allowed. Anything goes, except .. and .
- // not (^ or / followed by one or two dots followed by $ or /),
- // followed by anything, any number of times.
- , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?"
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
- // not a ^ or / followed by a dot,
- // followed by anything, any number of times.
- , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?"
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
- // characters that need to be escaped in RegExp.
- , reSpecials = charSet("().*{}+?[]^$\\!")
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
// "abc" -> { a:true, b:true, c:true }
function charSet (s) {
- return s.split("").reduce(function (set, c) {
+ return s.split('').reduce(function (set, c) {
set[c] = true
return set
}, {})
@@ -80,21 +81,20 @@ Minimatch.defaults = function (def) {
return minimatch.defaults(def).Minimatch
}
-
function minimatch (p, pattern, options) {
- if (typeof pattern !== "string") {
- throw new TypeError("glob pattern string required")
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
}
if (!options) options = {}
// shortcut: comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === "#") {
+ if (!options.nocomment && pattern.charAt(0) === '#') {
return false
}
// "" only matches ""
- if (pattern.trim() === "") return p === ""
+ if (pattern.trim() === '') return p === ''
return new Minimatch(pattern, options).match(p)
}
@@ -104,16 +104,17 @@ function Minimatch (pattern, options) {
return new Minimatch(pattern, options)
}
- if (typeof pattern !== "string") {
- throw new TypeError("glob pattern string required")
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
}
if (!options) options = {}
pattern = pattern.trim()
// windows support: need to use /, not \
- if (isWindows)
- pattern = pattern.split("\\").join("/")
+ if (path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
+ }
this.options = options
this.set = []
@@ -127,7 +128,7 @@ function Minimatch (pattern, options) {
this.make()
}
-Minimatch.prototype.debug = function() {}
+Minimatch.prototype.debug = function () {}
Minimatch.prototype.make = make
function make () {
@@ -138,7 +139,7 @@ function make () {
var options = this.options
// empty patterns and comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === "#") {
+ if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true
return
}
@@ -177,7 +178,7 @@ function make () {
// filter out everything that didn't compile properly.
set = set.filter(function (s) {
- return -1 === s.indexOf(false)
+ return s.indexOf(false) === -1
})
this.debug(this.pattern, set)
@@ -188,17 +189,17 @@ function make () {
Minimatch.prototype.parseNegate = parseNegate
function parseNegate () {
var pattern = this.pattern
- , negate = false
- , options = this.options
- , negateOffset = 0
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
if (options.nonegate) return
- for ( var i = 0, l = pattern.length
- ; i < l && pattern.charAt(i) === "!"
- ; i ++) {
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
negate = !negate
- negateOffset ++
+ negateOffset++
}
if (negateOffset) this.pattern = pattern.substr(negateOffset)
@@ -223,21 +224,22 @@ Minimatch.prototype.braceExpand = braceExpand
function braceExpand (pattern, options) {
if (!options) {
- if (this instanceof Minimatch)
+ if (this instanceof Minimatch) {
options = this.options
- else
+ } else {
options = {}
+ }
}
- pattern = typeof pattern === "undefined"
+ pattern = typeof pattern === 'undefined'
? this.pattern : pattern
- if (typeof pattern === "undefined") {
- throw new Error("undefined pattern")
+ if (typeof pattern === 'undefined') {
+ throw new Error('undefined pattern')
}
if (options.nobrace ||
- !pattern.match(/\{.*\}/)) {
+ !pattern.match(/\{.*\}/)) {
// shortcut. no need to expand.
return [pattern]
}
@@ -262,87 +264,86 @@ function parse (pattern, isSub) {
var options = this.options
// shortcuts
- if (!options.noglobstar && pattern === "**") return GLOBSTAR
- if (pattern === "") return ""
-
- var re = ""
- , hasMagic = !!options.nocase
- , escaping = false
- // ? => one single character
- , patternListStack = []
- , plType
- , stateChar
- , inClass = false
- , reClassStart = -1
- , classStart = -1
- // . and .. never match anything that doesn't start with .,
- // even when options.dot is set.
- , patternStart = pattern.charAt(0) === "." ? "" // anything
- // not (start or / followed by . or .. followed by / or end)
- : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))"
- : "(?!\\.)"
- , self = this
+ if (!options.noglobstar && pattern === '**') return GLOBSTAR
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var plType
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
function clearStateChar () {
if (stateChar) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch (stateChar) {
- case "*":
+ case '*':
re += star
hasMagic = true
- break
- case "?":
+ break
+ case '?':
re += qmark
hasMagic = true
- break
+ break
default:
- re += "\\"+stateChar
- break
+ re += '\\' + stateChar
+ break
}
self.debug('clearStateChar %j %j', stateChar, re)
stateChar = false
}
}
- for ( var i = 0, len = pattern.length, c
- ; (i < len) && (c = pattern.charAt(i))
- ; i ++ ) {
-
- this.debug("%s\t%s %s %j", pattern, i, re, c)
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
// skip over any that are escaped.
if (escaping && reSpecials[c]) {
- re += "\\" + c
+ re += '\\' + c
escaping = false
continue
}
- SWITCH: switch (c) {
- case "/":
+ switch (c) {
+ case '/':
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
- case "\\":
+ case '\\':
clearStateChar()
escaping = true
- continue
+ continue
// the various stateChar values
// for the "extglob" stuff.
- case "?":
- case "*":
- case "+":
- case "@":
- case "!":
- this.debug("%s\t%s %s %j <-- stateChar", pattern, i, re, c)
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if (inClass) {
this.debug(' in class')
- if (c === "!" && i === classStart + 1) c = "^"
+ if (c === '!' && i === classStart + 1) c = '^'
re += c
continue
}
@@ -357,70 +358,70 @@ function parse (pattern, isSub) {
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if (options.noext) clearStateChar()
- continue
+ continue
- case "(":
+ case '(':
if (inClass) {
- re += "("
+ re += '('
continue
}
if (!stateChar) {
- re += "\\("
+ re += '\\('
continue
}
plType = stateChar
- patternListStack.push({ type: plType
- , start: i - 1
- , reStart: re.length })
+ patternListStack.push({ type: plType, start: i - 1, reStart: re.length })
// negation is (?:(?!js)[^/]*)
- re += stateChar === "!" ? "(?:(?!" : "(?:"
+ re += stateChar === '!' ? '(?:(?!' : '(?:'
this.debug('plType %j %j', stateChar, re)
stateChar = false
- continue
+ continue
- case ")":
+ case ')':
if (inClass || !patternListStack.length) {
- re += "\\)"
+ re += '\\)'
continue
}
clearStateChar()
hasMagic = true
- re += ")"
+ re += ')'
plType = patternListStack.pop().type
// negation is (?:(?!js)[^/]*)
// The others are (?:)
switch (plType) {
- case "!":
- re += "[^/]*?)"
+ case '!':
+ re += '[^/]*?)'
break
- case "?":
- case "+":
- case "*": re += plType
- case "@": break // the default anyway
+ case '?':
+ case '+':
+ case '*':
+ re += plType
+ break
+ case '@': break // the default anyway
}
- continue
+ continue
- case "|":
+ case '|':
if (inClass || !patternListStack.length || escaping) {
- re += "\\|"
+ re += '\\|'
escaping = false
continue
}
clearStateChar()
- re += "|"
- continue
+ re += '|'
+ continue
// these are mostly the same in regexp and glob
- case "[":
+ case '[':
// swallow any state-tracking char before the [
clearStateChar()
if (inClass) {
- re += "\\" + c
+ re += '\\' + c
continue
}
@@ -428,15 +429,15 @@ function parse (pattern, isSub) {
classStart = i
reClassStart = re.length
re += c
- continue
+ continue
- case "]":
+ case ']':
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if (i === classStart + 1 || !inClass) {
- re += "\\" + c
+ re += '\\' + c
escaping = false
continue
}
@@ -453,11 +454,11 @@ function parse (pattern, isSub) {
// to do safely. For now, this is safe and works.
var cs = pattern.substring(classStart + 1, i)
try {
- new RegExp('[' + cs + ']')
+ RegExp('[' + cs + ']')
} catch (er) {
// not a valid class!
var sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + "\\[" + sp[0] + '\\]'
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
hasMagic = hasMagic || sp[1]
inClass = false
continue
@@ -468,7 +469,7 @@ function parse (pattern, isSub) {
hasMagic = true
inClass = false
re += c
- continue
+ continue
default:
// swallow any state char that wasn't consumed
@@ -478,8 +479,8 @@ function parse (pattern, isSub) {
// no need
escaping = false
} else if (reSpecials[c]
- && !(c === "^" && inClass)) {
- re += "\\"
+ && !(c === '^' && inClass)) {
+ re += '\\'
}
re += c
@@ -487,7 +488,6 @@ function parse (pattern, isSub) {
} // switch
} // for
-
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if (inClass) {
@@ -495,9 +495,9 @@ function parse (pattern, isSub) {
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
- var cs = pattern.substr(classStart + 1)
- , sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + "\\[" + sp[0]
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
hasMagic = hasMagic || sp[1]
}
@@ -507,14 +507,13 @@ function parse (pattern, isSub) {
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
- var pl
- while (pl = patternListStack.pop()) {
+ for (var pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
var tail = re.slice(pl.reStart + 3)
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
if (!$2) {
// the | isn't already escaped, so escape it.
- $2 = "\\"
+ $2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
@@ -523,46 +522,44 @@ function parse (pattern, isSub) {
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
- return $1 + $1 + $2 + "|"
+ return $1 + $1 + $2 + '|'
})
- this.debug("tail=%j\n %s", tail, tail)
- var t = pl.type === "*" ? star
- : pl.type === "?" ? qmark
- : "\\" + pl.type
+ this.debug('tail=%j\n %s', tail, tail)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
hasMagic = true
- re = re.slice(0, pl.reStart)
- + t + "\\("
- + tail
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar()
if (escaping) {
// trailing \\
- re += "\\\\"
+ re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch (re.charAt(0)) {
- case ".":
- case "[":
- case "(": addPatternStart = true
+ case '.':
+ case '[':
+ case '(': addPatternStart = true
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
- if (re !== "" && hasMagic) re = "(?=.)" + re
+ if (re !== '' && hasMagic) re = '(?=.)' + re
if (addPatternStart) re = patternStart + re
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
- return [ re, hasMagic ]
+ return [re, hasMagic]
}
// skip the regexp for non-magical patterns
@@ -572,8 +569,8 @@ function parse (pattern, isSub) {
return globUnescape(pattern)
}
- var flags = options.nocase ? "i" : ""
- , regExp = new RegExp("^" + re + "$", flags)
+ var flags = options.nocase ? 'i' : ''
+ var regExp = new RegExp('^' + re + '$', flags)
regExp._glob = pattern
regExp._src = re
@@ -597,34 +594,38 @@ function makeRe () {
// when you just want to work with a regex.
var set = this.set
- if (!set.length) return this.regexp = false
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
var options = this.options
var twoStar = options.noglobstar ? star
- : options.dot ? twoStarDot
- : twoStarNoDot
- , flags = options.nocase ? "i" : ""
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
var re = set.map(function (pattern) {
return pattern.map(function (p) {
return (p === GLOBSTAR) ? twoStar
- : (typeof p === "string") ? regExpEscape(p)
- : p._src
- }).join("\\\/")
- }).join("|")
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
// must match entire pattern
// ending in a * or ** will make it less strict.
- re = "^(?:" + re + ")$"
+ re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
- if (this.negate) re = "^(?!" + re + ").*$"
+ if (this.negate) re = '^(?!' + re + ').*$'
try {
- return this.regexp = new RegExp(re, flags)
+ this.regexp = new RegExp(re, flags)
} catch (ex) {
- return this.regexp = false
+ this.regexp = false
}
+ return this.regexp
}
minimatch.match = function (list, pattern, options) {
@@ -641,23 +642,24 @@ minimatch.match = function (list, pattern, options) {
Minimatch.prototype.match = match
function match (f, partial) {
- this.debug("match", f, this.pattern)
+ this.debug('match', f, this.pattern)
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) return false
- if (this.empty) return f === ""
+ if (this.empty) return f === ''
- if (f === "/" && partial) return true
+ if (f === '/' && partial) return true
var options = this.options
// windows: need to use /, not \
- if (isWindows)
- f = f.split("\\").join("/")
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
// treat the test path as a set of pathparts.
f = f.split(slashSplit)
- this.debug(this.pattern, "split", f)
+ this.debug(this.pattern, 'split', f)
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
@@ -665,17 +667,19 @@ function match (f, partial) {
// Either way, return on the first hit.
var set = this.set
- this.debug(this.pattern, "set", set)
+ this.debug(this.pattern, 'set', set)
// Find the basename of the path by looking for the last non-empty segment
- var filename;
- for (var i = f.length - 1; i >= 0; i--) {
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
filename = f[i]
if (filename) break
}
- for (var i = 0, l = set.length; i < l; i ++) {
- var pattern = set[i], file = f
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
if (options.matchBase && pattern.length === 1) {
file = [filename]
}
@@ -700,23 +704,20 @@ function match (f, partial) {
Minimatch.prototype.matchOne = function (file, pattern, partial) {
var options = this.options
- this.debug("matchOne",
- { "this": this
- , file: file
- , pattern: pattern })
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
- this.debug("matchOne", file.length, pattern.length)
+ this.debug('matchOne', file.length, pattern.length)
- for ( var fi = 0
- , pi = 0
- , fl = file.length
- , pl = pattern.length
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
; (fi < fl) && (pi < pl)
- ; fi ++, pi ++ ) {
-
- this.debug("matchOne loop")
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
var p = pattern[pi]
- , f = file[fi]
+ var f = file[fi]
this.debug(pattern, p, f)
@@ -750,7 +751,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
- , pr = pi + 1
+ var pr = pi + 1
if (pr === pl) {
this.debug('** at the end')
// a ** at the end will just swallow the rest.
@@ -759,19 +760,18 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
- for ( ; fi < fl; fi ++) {
- if (file[fi] === "." || file[fi] === ".." ||
- (!options.dot && file[fi].charAt(0) === ".")) return false
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
- WHILE: while (fr < fl) {
+ while (fr < fl) {
var swallowee = file[fr]
- this.debug('\nglobstar while',
- file, fr, pattern, pr, swallowee)
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
@@ -781,23 +781,24 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
- if (swallowee === "." || swallowee === ".." ||
- (!options.dot && swallowee.charAt(0) === ".")) {
- this.debug("dot detected!", file, fr, pattern, pr)
- break WHILE
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue')
- fr ++
+ fr++
}
}
+
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if (partial) {
// ran out of file
- this.debug("\n>>> no match, partial?", file, fr, pattern, pr)
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
if (fr === fl) return true
}
return false
@@ -807,16 +808,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
- if (typeof p === "string") {
+ if (typeof p === 'string') {
if (options.nocase) {
hit = f.toLowerCase() === p.toLowerCase()
} else {
hit = f === p
}
- this.debug("string match", p, f, hit)
+ this.debug('string match', p, f, hit)
} else {
hit = f.match(p)
- this.debug("pattern match", p, f, hit)
+ this.debug('pattern match', p, f, hit)
}
if (!hit) return false
@@ -848,21 +849,19 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
- var emptyFileEnd = (fi === fl - 1) && (file[fi] === "")
+ var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
return emptyFileEnd
}
// should be unreachable.
- throw new Error("wtf?")
+ throw new Error('wtf?')
}
-
// replace stuff like \* with *
function globUnescape (s) {
- return s.replace(/\\(.)/g, "$1")
+ return s.replace(/\\(.)/g, '$1')
}
-
function regExpEscape (s) {
- return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
}
diff --git a/deps/npm/node_modules/minimatch/package.json b/deps/npm/node_modules/minimatch/package.json
index 4d37a83ebfc535..0b23c287d512a5 100644
--- a/deps/npm/node_modules/minimatch/package.json
+++ b/deps/npm/node_modules/minimatch/package.json
@@ -6,13 +6,14 @@
},
"name": "minimatch",
"description": "a glob matcher in javascript",
- "version": "2.0.4",
+ "version": "2.0.7",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
},
"main": "minimatch.js",
"scripts": {
+ "pretest": "standard minimatch.js test/*.js",
"test": "tap test/*.js",
"prepublish": "browserify -o browser.js -e minimatch.js --bare"
},
@@ -24,6 +25,7 @@
},
"devDependencies": {
"browserify": "^9.0.3",
+ "standard": "^3.7.2",
"tap": ""
},
"license": {
@@ -34,19 +36,23 @@
"minimatch.js",
"browser.js"
],
- "gitHead": "c75d17c23df3b6050338ee654a58490255b36ebc",
+ "gitHead": "4bd6dc22c248c7ea07cc49d63181fe6f6aafae9c",
"bugs": {
"url": "https://github.com/isaacs/minimatch/issues"
},
"homepage": "https://github.com/isaacs/minimatch",
- "_id": "minimatch@2.0.4",
- "_shasum": "83bea115803e7a097a78022427287edb762fafed",
- "_from": "minimatch@>=2.0.4 <2.1.0",
- "_npmVersion": "2.7.1",
- "_nodeVersion": "1.4.2",
+ "_id": "minimatch@2.0.7",
+ "_shasum": "d23652ab10e663e7d914602e920e21f9f66492be",
+ "_from": "minimatch@>=2.0.7 <2.1.0",
+ "_npmVersion": "2.7.6",
+ "_nodeVersion": "1.7.1",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "d23652ab10e663e7d914602e920e21f9f66492be",
+ "tarball": "http://registry.npmjs.org/minimatch/-/minimatch-2.0.7.tgz"
},
"maintainers": [
{
@@ -54,10 +60,7 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "83bea115803e7a097a78022427287edb762fafed",
- "tarball": "http://registry.npmjs.org/minimatch/-/minimatch-2.0.4.tgz"
- },
- "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.4.tgz",
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.7.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/mkdirp/.travis.yml b/deps/npm/node_modules/mkdirp/.travis.yml
index c693a939df9809..74c57bf15e2391 100644
--- a/deps/npm/node_modules/mkdirp/.travis.yml
+++ b/deps/npm/node_modules/mkdirp/.travis.yml
@@ -1,5 +1,8 @@
language: node_js
node_js:
- - 0.6
- - 0.8
+ - "0.8"
- "0.10"
+ - "0.12"
+ - "iojs"
+before_install:
+ - npm install -g npm@~1.4.6
diff --git a/deps/npm/node_modules/mkdirp/index.js b/deps/npm/node_modules/mkdirp/index.js
index a1742b20694fe3..6ce241b58c100f 100644
--- a/deps/npm/node_modules/mkdirp/index.js
+++ b/deps/npm/node_modules/mkdirp/index.js
@@ -1,5 +1,6 @@
var path = require('path');
var fs = require('fs');
+var _0777 = parseInt('0777', 8);
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
@@ -16,7 +17,7 @@ function mkdirP (p, opts, f, made) {
var xfs = opts.fs || fs;
if (mode === undefined) {
- mode = 0777 & (~process.umask());
+ mode = _0777 & (~process.umask());
}
if (!made) made = null;
@@ -60,7 +61,7 @@ mkdirP.sync = function sync (p, opts, made) {
var xfs = opts.fs || fs;
if (mode === undefined) {
- mode = 0777 & (~process.umask());
+ mode = _0777 & (~process.umask());
}
if (!made) made = null;
diff --git a/deps/npm/node_modules/mkdirp/node_modules/minimist/package.json b/deps/npm/node_modules/mkdirp/node_modules/minimist/package.json
index 09e9ec44107f63..7cd80f4f41ac5a 100644
--- a/deps/npm/node_modules/mkdirp/node_modules/minimist/package.json
+++ b/deps/npm/node_modules/mkdirp/node_modules/minimist/package.json
@@ -62,6 +62,5 @@
],
"directories": {},
"_shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d",
- "_resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz"
}
diff --git a/deps/npm/node_modules/mkdirp/package.json b/deps/npm/node_modules/mkdirp/package.json
index adb67f6a671866..24411034800529 100644
--- a/deps/npm/node_modules/mkdirp/package.json
+++ b/deps/npm/node_modules/mkdirp/package.json
@@ -1,20 +1,20 @@
{
"name": "mkdirp",
"description": "Recursively mkdir, like `mkdir -p`",
- "version": "0.5.0",
+ "version": "0.5.1",
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
- "main": "./index",
+ "main": "index.js",
"keywords": [
"mkdir",
"directory"
],
"repository": {
"type": "git",
- "url": "https://github.com/substack/node-mkdirp.git"
+ "url": "git+https://github.com/substack/node-mkdirp.git"
},
"scripts": {
"test": "tap test/*.js"
@@ -23,27 +23,30 @@
"minimist": "0.0.8"
},
"devDependencies": {
- "tap": "~0.4.0",
- "mock-fs": "~2.2.0"
+ "tap": "1",
+ "mock-fs": "2 >=2.7.0"
},
"bin": {
"mkdirp": "bin/cmd.js"
},
"license": "MIT",
+ "gitHead": "d4eff0f06093aed4f387e88e9fc301cb76beedc7",
"bugs": {
"url": "https://github.com/substack/node-mkdirp/issues"
},
- "homepage": "https://github.com/substack/node-mkdirp",
- "_id": "mkdirp@0.5.0",
- "dist": {
- "shasum": "1d73076a6df986cd9344e15e71fcc05a4c9abf12",
- "tarball": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz"
- },
- "_from": "mkdirp@latest",
- "_npmVersion": "1.4.3",
+ "homepage": "https://github.com/substack/node-mkdirp#readme",
+ "_id": "mkdirp@0.5.1",
+ "_shasum": "30057438eac6cf7f8c4767f38648d6697d75c903",
+ "_from": "mkdirp@>=0.5.1 <0.6.0",
+ "_npmVersion": "2.9.0",
+ "_nodeVersion": "2.0.0",
"_npmUser": {
"name": "substack",
- "email": "mail@substack.net"
+ "email": "substack@gmail.com"
+ },
+ "dist": {
+ "shasum": "30057438eac6cf7f8c4767f38648d6697d75c903",
+ "tarball": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz"
},
"maintainers": [
{
@@ -52,7 +55,5 @@
}
],
"directories": {},
- "_shasum": "1d73076a6df986cd9344e15e71fcc05a4c9abf12",
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz"
}
diff --git a/deps/npm/node_modules/mkdirp/test/chmod.js b/deps/npm/node_modules/mkdirp/test/chmod.js
index 520dcb8e9b5a75..6a404b932f941f 100644
--- a/deps/npm/node_modules/mkdirp/test/chmod.js
+++ b/deps/npm/node_modules/mkdirp/test/chmod.js
@@ -2,6 +2,9 @@ var mkdirp = require('../').mkdirp;
var path = require('path');
var fs = require('fs');
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
+var _0744 = parseInt('0744', 8);
var ps = [ '', 'tmp' ];
@@ -13,20 +16,20 @@ for (var i = 0; i < 25; i++) {
var file = ps.join('/');
test('chmod-pre', function (t) {
- var mode = 0744
+ var mode = _0744
mkdirp(file, mode, function (er) {
t.ifError(er, 'should not error');
fs.stat(file, function (er, stat) {
t.ifError(er, 'should exist');
t.ok(stat && stat.isDirectory(), 'should be directory');
- t.equal(stat && stat.mode & 0777, mode, 'should be 0744');
+ t.equal(stat && stat.mode & _0777, mode, 'should be 0744');
t.end();
});
});
});
test('chmod', function (t) {
- var mode = 0755
+ var mode = _0755
mkdirp(file, mode, function (er) {
t.ifError(er, 'should not error');
fs.stat(file, function (er, stat) {
diff --git a/deps/npm/node_modules/mkdirp/test/clobber.js b/deps/npm/node_modules/mkdirp/test/clobber.js
index 0eb709987002f0..2433b9ad548557 100644
--- a/deps/npm/node_modules/mkdirp/test/clobber.js
+++ b/deps/npm/node_modules/mkdirp/test/clobber.js
@@ -2,6 +2,7 @@ var mkdirp = require('../').mkdirp;
var path = require('path');
var fs = require('fs');
var test = require('tap').test;
+var _0755 = parseInt('0755', 8);
var ps = [ '', 'tmp' ];
@@ -29,7 +30,7 @@ test('clobber-pre', function (t) {
test('clobber', function (t) {
t.plan(2);
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
t.ok(err);
t.equal(err.code, 'ENOTDIR');
t.end();
diff --git a/deps/npm/node_modules/mkdirp/test/mkdirp.js b/deps/npm/node_modules/mkdirp/test/mkdirp.js
index 3b624ddbeb0db8..eaa8921c7f1488 100644
--- a/deps/npm/node_modules/mkdirp/test/mkdirp.js
+++ b/deps/npm/node_modules/mkdirp/test/mkdirp.js
@@ -3,6 +3,8 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('woo', function (t) {
t.plan(5);
@@ -12,13 +14,13 @@ test('woo', function (t) {
var file = '/tmp/' + [x,y,z].join('/');
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
t.ifError(err);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
})
})
diff --git a/deps/npm/node_modules/mkdirp/test/opts_fs.js b/deps/npm/node_modules/mkdirp/test/opts_fs.js
index f1fbeca146d128..97186b62e0bb90 100644
--- a/deps/npm/node_modules/mkdirp/test/opts_fs.js
+++ b/deps/npm/node_modules/mkdirp/test/opts_fs.js
@@ -2,6 +2,8 @@ var mkdirp = require('../');
var path = require('path');
var test = require('tap').test;
var mockfs = require('mock-fs');
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('opts.fs', function (t) {
t.plan(5);
@@ -13,13 +15,13 @@ test('opts.fs', function (t) {
var file = '/beep/boop/' + [x,y,z].join('/');
var xfs = mockfs.fs();
- mkdirp(file, { fs: xfs, mode: 0755 }, function (err) {
+ mkdirp(file, { fs: xfs, mode: _0755 }, function (err) {
t.ifError(err);
xfs.exists(file, function (ex) {
t.ok(ex, 'created file');
xfs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
});
});
diff --git a/deps/npm/node_modules/mkdirp/test/opts_fs_sync.js b/deps/npm/node_modules/mkdirp/test/opts_fs_sync.js
index 224b50642feafd..6c370aa6e93a20 100644
--- a/deps/npm/node_modules/mkdirp/test/opts_fs_sync.js
+++ b/deps/npm/node_modules/mkdirp/test/opts_fs_sync.js
@@ -2,6 +2,8 @@ var mkdirp = require('../');
var path = require('path');
var test = require('tap').test;
var mockfs = require('mock-fs');
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('opts.fs sync', function (t) {
t.plan(4);
@@ -13,12 +15,12 @@ test('opts.fs sync', function (t) {
var file = '/beep/boop/' + [x,y,z].join('/');
var xfs = mockfs.fs();
- mkdirp.sync(file, { fs: xfs, mode: 0755 });
+ mkdirp.sync(file, { fs: xfs, mode: _0755 });
xfs.exists(file, function (ex) {
t.ok(ex, 'created file');
xfs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
});
});
diff --git a/deps/npm/node_modules/mkdirp/test/perm.js b/deps/npm/node_modules/mkdirp/test/perm.js
index 2c975905204646..fbce44b82bafee 100644
--- a/deps/npm/node_modules/mkdirp/test/perm.js
+++ b/deps/npm/node_modules/mkdirp/test/perm.js
@@ -3,18 +3,20 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('async perm', function (t) {
t.plan(5);
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16);
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
t.ifError(err);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
})
})
@@ -22,7 +24,7 @@ test('async perm', function (t) {
});
test('async root perm', function (t) {
- mkdirp('/tmp', 0755, function (err) {
+ mkdirp('/tmp', _0755, function (err) {
if (err) t.fail(err);
t.end();
});
diff --git a/deps/npm/node_modules/mkdirp/test/perm_sync.js b/deps/npm/node_modules/mkdirp/test/perm_sync.js
index 327e54b2e951f2..398229fe54ab04 100644
--- a/deps/npm/node_modules/mkdirp/test/perm_sync.js
+++ b/deps/npm/node_modules/mkdirp/test/perm_sync.js
@@ -3,17 +3,19 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('sync perm', function (t) {
t.plan(4);
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json';
- mkdirp.sync(file, 0755);
+ mkdirp.sync(file, _0755);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
});
});
@@ -23,7 +25,7 @@ test('sync root perm', function (t) {
t.plan(3);
var file = '/tmp';
- mkdirp.sync(file, 0755);
+ mkdirp.sync(file, _0755);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
diff --git a/deps/npm/node_modules/mkdirp/test/race.js b/deps/npm/node_modules/mkdirp/test/race.js
index 7c295f410de2cc..b0b9e183c902fb 100644
--- a/deps/npm/node_modules/mkdirp/test/race.js
+++ b/deps/npm/node_modules/mkdirp/test/race.js
@@ -3,9 +3,11 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('race', function (t) {
- t.plan(6);
+ t.plan(10);
var ps = [ '', 'tmp' ];
for (var i = 0; i < 25; i++) {
@@ -15,24 +17,19 @@ test('race', function (t) {
var file = ps.join('/');
var res = 2;
- mk(file, function () {
- if (--res === 0) t.end();
- });
+ mk(file);
- mk(file, function () {
- if (--res === 0) t.end();
- });
+ mk(file);
function mk (file, cb) {
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
t.ifError(err);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
- if (cb) cb();
});
})
});
diff --git a/deps/npm/node_modules/mkdirp/test/rel.js b/deps/npm/node_modules/mkdirp/test/rel.js
index d1f175c2406543..4ddb34276a72a2 100644
--- a/deps/npm/node_modules/mkdirp/test/rel.js
+++ b/deps/npm/node_modules/mkdirp/test/rel.js
@@ -3,6 +3,8 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('rel', function (t) {
t.plan(5);
@@ -15,14 +17,14 @@ test('rel', function (t) {
var file = [x,y,z].join('/');
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
t.ifError(err);
exists(file, function (ex) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
process.chdir(cwd);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
})
})
diff --git a/deps/npm/node_modules/mkdirp/test/root.js b/deps/npm/node_modules/mkdirp/test/root.js
index 97ad7a2f358e93..9e7d079d9fc757 100644
--- a/deps/npm/node_modules/mkdirp/test/root.js
+++ b/deps/npm/node_modules/mkdirp/test/root.js
@@ -2,12 +2,13 @@ var mkdirp = require('../');
var path = require('path');
var fs = require('fs');
var test = require('tap').test;
+var _0755 = parseInt('0755', 8);
test('root', function (t) {
// '/' on unix, 'c:/' on windows.
var file = path.resolve('/');
- mkdirp(file, 0755, function (err) {
+ mkdirp(file, _0755, function (err) {
if (err) throw err
fs.stat(file, function (er, stat) {
if (er) throw er
diff --git a/deps/npm/node_modules/mkdirp/test/sync.js b/deps/npm/node_modules/mkdirp/test/sync.js
index 88fa4324ee1901..8c8dc938c8b4a8 100644
--- a/deps/npm/node_modules/mkdirp/test/sync.js
+++ b/deps/npm/node_modules/mkdirp/test/sync.js
@@ -3,6 +3,8 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('sync', function (t) {
t.plan(4);
@@ -13,7 +15,7 @@ test('sync', function (t) {
var file = '/tmp/' + [x,y,z].join('/');
try {
- mkdirp.sync(file, 0755);
+ mkdirp.sync(file, _0755);
} catch (err) {
t.fail(err);
return t.end();
@@ -23,7 +25,7 @@ test('sync', function (t) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0755);
+ t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
});
});
diff --git a/deps/npm/node_modules/mkdirp/test/umask.js b/deps/npm/node_modules/mkdirp/test/umask.js
index 82c393a006a8f5..2033c63a414efa 100644
--- a/deps/npm/node_modules/mkdirp/test/umask.js
+++ b/deps/npm/node_modules/mkdirp/test/umask.js
@@ -3,6 +3,8 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('implicit mode from umask', function (t) {
t.plan(5);
@@ -18,7 +20,7 @@ test('implicit mode from umask', function (t) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, 0777 & (~process.umask()));
+ t.equal(stat.mode & _0777, _0777 & (~process.umask()));
t.ok(stat.isDirectory(), 'target not a directory');
});
})
diff --git a/deps/npm/node_modules/mkdirp/test/umask_sync.js b/deps/npm/node_modules/mkdirp/test/umask_sync.js
index e537fbe4beebc2..11a76147496a52 100644
--- a/deps/npm/node_modules/mkdirp/test/umask_sync.js
+++ b/deps/npm/node_modules/mkdirp/test/umask_sync.js
@@ -3,6 +3,8 @@ var path = require('path');
var fs = require('fs');
var exists = fs.exists || path.exists;
var test = require('tap').test;
+var _0777 = parseInt('0777', 8);
+var _0755 = parseInt('0755', 8);
test('umask sync modes', function (t) {
t.plan(4);
@@ -23,7 +25,7 @@ test('umask sync modes', function (t) {
t.ok(ex, 'file created');
fs.stat(file, function (err, stat) {
t.ifError(err);
- t.equal(stat.mode & 0777, (0777 & (~process.umask())));
+ t.equal(stat.mode & _0777, (_0777 & (~process.umask())));
t.ok(stat.isDirectory(), 'target not a directory');
});
});
diff --git a/deps/npm/node_modules/node-gyp/addon.gypi b/deps/npm/node_modules/node-gyp/addon.gypi
index 1604f248caad46..1fe142f70da367 100644
--- a/deps/npm/node_modules/node-gyp/addon.gypi
+++ b/deps/npm/node_modules/node-gyp/addon.gypi
@@ -1,7 +1,7 @@
{
'target_defaults': {
'type': 'loadable_module',
- 'win_delay_load_hook': 'false',
+ 'win_delay_load_hook': 'true',
'product_prefix': '',
'include_dirs': [
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/README.md b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/README.md
index 9b5c61797460f7..62bc7bae3fed28 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/README.md
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/README.md
@@ -1,6 +1,6 @@
# brace-expansion
-[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
+[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
as known from sh/bash, in JavaScript.
[](http://travis-ci.org/juliangruber/brace-expansion)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/example.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/example.js
index 36cde4de5c114b..60ecfc74d41618 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/example.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/example.js
@@ -5,3 +5,4 @@ console.log(expand('http://www.numericals.com/file{1..100..10}.txt'));
console.log(expand('http://www.letters.com/file{a..z..2}.txt'));
console.log(expand('mkdir /usr/local/src/bash/{old,new,dist,bugs}'));
console.log(expand('chown root /usr/{ucb/{ex,edit},lib/{ex?.?*,how_ex}}'));
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js
index f8d40f79acde0a..a23104e9550173 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js
@@ -188,3 +188,4 @@ function expand(str, isTop) {
return expansions;
}
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/Makefile b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/Makefile
index dd2730cfde0cab..fa5da71a6d0d34 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/Makefile
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/Makefile
@@ -3,3 +3,4 @@ test:
@node_modules/.bin/tape test/*.js
.PHONY: test
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/example.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/example.js
index 9ce76f480a4321..c02ad348e69aec 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/example.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/example.js
@@ -2,3 +2,4 @@ var balanced = require('./');
console.log(balanced('{', '}', 'pre{in{nested}}post'));
console.log(balanced('{', '}', 'pre{first}between{second}post'));
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/empty-option.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/empty-option.js
index fd1132826a00c9..e429121eab8059 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/empty-option.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/empty-option.js
@@ -7,3 +7,4 @@ test('empty option', function(t) {
]);
t.end();
});
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/nested.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/nested.js
index 54d1e88f6865bf..0862dc51f90aee 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/nested.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/nested.js
@@ -13,3 +13,4 @@ test('nested', function(t) {
]);
t.end();
});
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/order.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/order.js
index d5f62566c95d6d..c00ad155fe6760 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/order.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/order.js
@@ -7,3 +7,4 @@ test('order', function(t) {
]);
t.end();
});
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/pad.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/pad.js
index f830f7c46621bb..e4158775f1bd06 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/pad.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/pad.js
@@ -10,3 +10,4 @@ test('pad', function(t) {
]);
t.end();
});
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/sequence.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/sequence.js
index 18ee665fb99cd9..f73a9579ab398b 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/sequence.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/test/sequence.js
@@ -47,3 +47,4 @@ test('alphabetic sequences with step count', function(t) {
]);
t.end();
});
+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
index 8d113ad30d05e9..39f3f0888a2cfd 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
@@ -33,3 +33,4 @@ fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
console.error(" <<
static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
+ HMODULE m;
if (event != dliNotePreLoadLibrary)
return NULL;
@@ -23,7 +24,7 @@ static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
_stricmp(info->szDll, "node.exe") != 0)
return NULL;
- HMODULE m = GetModuleHandle(NULL);
+ m = GetModuleHandle(NULL);
return (FARPROC) m;
}
diff --git a/deps/npm/node_modules/normalize-package-data/README.md b/deps/npm/node_modules/normalize-package-data/README.md
index 4b159126d98f12..0b9d7b5b423c47 100644
--- a/deps/npm/node_modules/normalize-package-data/README.md
+++ b/deps/npm/node_modules/normalize-package-data/README.md
@@ -93,6 +93,10 @@ If `name` field is given, the value of the name field must be a string. The stri
If `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver).
+### Rules for license field
+
+The `license` field should be a valid *SDPDX license expression* string, as determined by the `spdx.valid` method. See [documentation for the spdx module](https://github.com/kemitchell/spdx.js).
+
## Credits
This package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson.
diff --git a/deps/npm/node_modules/normalize-package-data/lib/fixer.js b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
index 59cd05f755795e..e8f917d86aad62 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/fixer.js
+++ b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
@@ -1,4 +1,5 @@
var semver = require("semver")
+var spdx = require('spdx');
var hostedGitInfo = require("hosted-git-info")
var depTypes = ["dependencies","devDependencies","optionalDependencies"]
var extractDescription = require("./extract_description")
@@ -283,6 +284,18 @@ var fixer = module.exports = {
data.homepage = "https://" + data.homepage
}
}
+
+, fixLicenseField: function(data) {
+ if (!data.license) {
+ return this.warn("missingLicense")
+ } else if (
+ typeof(data.license) !== 'string' ||
+ data.license.length < 1 ||
+ !spdx.valid(data.license)
+ ) {
+ this.warn("nonSPDXLicense")
+ }
+ }
}
function isValidScopedPackageName(spec) {
diff --git a/deps/npm/node_modules/normalize-package-data/lib/normalize.js b/deps/npm/node_modules/normalize-package-data/lib/normalize.js
index 7e6beefdae8f02..c43a9263789e3b 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/normalize.js
+++ b/deps/npm/node_modules/normalize-package-data/lib/normalize.js
@@ -4,7 +4,7 @@ var fixer = require("./fixer")
var makeWarning = require("./make_warning")
var fieldsToFix = ['name','version','description','repository','modules','scripts'
- ,'files','bin','man','bugs','keywords','readme','homepage']
+ ,'files','bin','man','bugs','keywords','readme','homepage','license']
var otherThingsToFix = ['dependencies','people', 'typos']
var thingsToFix = fieldsToFix.map(function(fieldName) {
diff --git a/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json b/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
index 1877fe5de390a0..3bfce72cd4dd4a 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
+++ b/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
@@ -19,11 +19,13 @@
,"nonStringDescription": "'description' field should be a string"
,"missingDescription": "No description"
,"missingReadme": "No README data"
+ ,"missingLicense": "No license field."
,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
,"nonUrlHomepage": "homepage field must be a string url. Deleted."
+ ,"nonSPDXLicense": "license should be a valid SPDX license expression"
,"missingProtocolHomepage": "homepage field must start with a protocol."
,"typo": "%s should probably be %s."
}
diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json
index 8f4aeadca2c23b..5dbc3af3986d2b 100644
--- a/deps/npm/node_modules/normalize-package-data/package.json
+++ b/deps/npm/node_modules/normalize-package-data/package.json
@@ -1,6 +1,6 @@
{
"name": "normalize-package-data",
- "version": "2.0.0",
+ "version": "2.1.0",
"author": {
"name": "Meryn Stol",
"email": "merynstol@gmail.com"
@@ -16,7 +16,8 @@
},
"dependencies": {
"hosted-git-info": "^2.0.2",
- "semver": "2 || 3 || 4"
+ "semver": "2 || 3 || 4",
+ "spdx": "^0.4.0"
},
"devDependencies": {
"tap": "~0.2.5",
@@ -37,43 +38,14 @@
"email": "rok@kowalski.gd"
}
],
- "gitHead": "ea0b959633e4803685bae2283d3d79a0115e6f8a",
+ "readme": "# normalize-package-data [](https://travis-ci.org/npm/normalize-package-data)\n\nnormalize-package data exports a function that normalizes package metadata. This data is typically found in a package.json file, but in principle could come from any source - for example the npm registry.\n\nnormalize-package-data is used by [read-package-json](https://npmjs.org/package/read-package-json) to normalize the data it reads from a package.json file. In turn, read-package-json is used by [npm](https://npmjs.org/package/npm) and various npm-related tools.\n\n## Installation\n\n```\nnpm install normalize-package-data\n```\n\n## Usage\n\nBasic usage is really simple. You call the function that normalize-package-data exports. Let's call it `normalizeData`.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nnormalizeData(packageData)\n// packageData is now normalized\n```\n\n#### Strict mode\n\nYou may activate strict validation by passing true as the second argument.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, true)\n// packageData is now normalized\n```\n\nIf strict mode is activated, only Semver 2.0 version strings are accepted. Otherwise, Semver 1.0 strings are accepted as well. Packages must have a name, and the name field must not have contain leading or trailing whitespace.\n\n#### Warnings\n\nOptionally, you may pass a \"warning\" function. It gets called whenever the `normalizeData` function encounters something that doesn't look right. It indicates less than perfect input data.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, warnFn)\n// packageData is now normalized. Any number of warnings may have been logged.\n```\n\nYou may combine strict validation with warnings by passing `true` as the second argument, and `warnFn` as third.\n\nWhen `private` field is set to `true`, warnings will be suppressed.\n\n### Potential exceptions\n\nIf the supplied data has an invalid name or version vield, `normalizeData` will throw an error. Depending on where you call `normalizeData`, you may want to catch these errors so can pass them to a callback.\n\n## What normalization (currently) entails\n\n* The value of `name` field gets trimmed (unless in strict mode).\n* The value of the `version` field gets cleaned by `semver.clean`. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n* If `name` and/or `version` fields are missing, they are set to empty strings.\n* If `files` field is not an array, it will be removed.\n* If `bin` field is a string, then `bin` field will become an object with `name` set to the value of the `name` field, and `bin` set to the original string value.\n* If `man` field is a string, it will become an array with the original string as its sole member.\n* If `keywords` field is string, it is considered to be a list of keywords separated by one or more white-space characters. It gets converted to an array by splitting on `\\s+`.\n* All people fields (`author`, `maintainers`, `contributors`) get converted into objects with name, email and url properties.\n* If `bundledDependencies` field (a typo) exists and `bundleDependencies` field does not, `bundledDependencies` will get renamed to `bundleDependencies`.\n* If the value of any of the dependencies fields (`dependencies`, `devDependencies`, `optionalDependencies`) is a string, it gets converted into an object with familiar `name=>value` pairs.\n* The values in `optionalDependencies` get added to `dependencies`. The `optionalDependencies` array is left untouched.\n* As of v2: Dependencies that point at known hosted git providers (currently: github, bitbucket, gitlab) will have their URLs canonicalized, but protocols will be preserved.\n* As of v2: Dependencies that use shortcuts for hosted git providers (`org/proj`, `github:org/proj`, `bitbucket:org/proj`, `gitlab:org/proj`, `gist:docid`) will have the shortcut left in place. (In the case of github, the `org/proj` form will be expanded to `github:org/proj`.) THIS MARKS A BREAKING CHANGE FROM V1, where the shorcut was previously expanded to a URL.\n* If `description` field does not exist, but `readme` field does, then (more or less) the first paragraph of text that's found in the readme is taken as value for `description`.\n* If `repository` field is a string, it will become an object with `url` set to the original string value, and `type` set to `\"git\"`.\n* If `repository.url` is not a valid url, but in the style of \"[owner-name]/[repo-name]\", `repository.url` will be set to https://github.com/[owner-name]/[repo-name]\n* If `bugs` field is a string, the value of `bugs` field is changed into an object with `url` set to the original string value.\n* If `bugs` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `bugs` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/issues . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n* If `bugs` field is an object, the resulting value only has email and url properties. If email and url properties are not strings, they are ignored. If no valid values for either email or url is found, bugs field will be removed.\n* If `homepage` field is not a string, it will be removed.\n* If the url in the `homepage` field does not specify a protocol, then http is assumed. For example, `myproject.org` will be changed to `http://myproject.org`.\n* If `homepage` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `homepage` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/ . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n\n### Rules for name field\n\nIf `name` field is given, the value of the name field must be a string. The string may not:\n\n* start with a period.\n* contain the following characters: `/@\\s+%`\n* contain and characters that would need to be encoded for use in urls.\n* resemble the word `node_modules` or `favicon.ico` (case doesn't matter).\n\n### Rules for version field\n\nIf `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n\n### Rules for license field\n\nThe `license` field should be a valid *SDPDX license expression* string, as determined by the `spdx.valid` method. See [documentation for the spdx module](https://github.com/kemitchell/spdx.js).\n\n## Credits\n\nThis package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson.\n\n## License\n\nnormalize-package-data is released under the [BSD 2-Clause License](http://opensource.org/licenses/MIT). \nCopyright (c) 2013 Meryn Stol \n",
+ "readmeFilename": "README.md",
+ "gitHead": "a168f6153570465db33d96601576b612a63ed446",
"bugs": {
"url": "https://github.com/npm/normalize-package-data/issues"
},
- "homepage": "https://github.com/npm/normalize-package-data",
- "_id": "normalize-package-data@2.0.0",
- "_shasum": "8795d0d5c70c0e9ca36f419548aac0abf1f638bc",
- "_from": "normalize-package-data@>=2.0.0 <2.1.0",
- "_npmVersion": "2.7.5",
- "_nodeVersion": "1.6.2",
- "_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
- },
- "maintainers": [
- {
- "name": "meryn",
- "email": "merynstol@gmail.com"
- },
- {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- },
- {
- "name": "iarna",
- "email": "me@re-becca.org"
- }
- ],
- "dist": {
- "shasum": "8795d0d5c70c0e9ca36f419548aac0abf1f638bc",
- "tarball": "http://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.0.0.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.0.0.tgz",
- "readme": "ERROR: No README data found!"
+ "homepage": "https://github.com/npm/normalize-package-data#readme",
+ "_id": "normalize-package-data@2.1.0",
+ "_shasum": "31381afb0567a8ab0ae730230c8652ff8a0cdd11",
+ "_from": "normalize-package-data@>=2.1.0 <2.2.0"
}
diff --git a/deps/npm/node_modules/normalize-package-data/test/fixtures/read-package-json.json b/deps/npm/node_modules/normalize-package-data/test/fixtures/read-package-json.json
index f4a2b96f47caf8..7d0dae1d57a4e1 100644
--- a/deps/npm/node_modules/normalize-package-data/test/fixtures/read-package-json.json
+++ b/deps/npm/node_modules/normalize-package-data/test/fixtures/read-package-json.json
@@ -7,6 +7,7 @@
"type": "git",
"url": "git://github.com/isaacs/read-package-json.git"
},
+ "license": "MIT",
"main": "read-json.js",
"scripts": {
"test": "tap test/*.js"
@@ -24,4 +25,4 @@
"npmlog": "0",
"graceful-fs": "~1.1.8"
}
-}
\ No newline at end of file
+}
diff --git a/deps/npm/node_modules/normalize-package-data/test/normalize.js b/deps/npm/node_modules/normalize-package-data/test/normalize.js
index 96b2544533a004..3dce3c0577189d 100644
--- a/deps/npm/node_modules/normalize-package-data/test/normalize.js
+++ b/deps/npm/node_modules/normalize-package-data/test/normalize.js
@@ -53,7 +53,8 @@ tap.test("empty object", function(t) {
t.same(warnings, [
warningMessages.missingDescription,
warningMessages.missingRepository,
- warningMessages.missingReadme
+ warningMessages.missingReadme,
+ warningMessages.missingLicense
])
t.end()
})
@@ -76,7 +77,8 @@ tap.test("core module name", function(t) {
safeFormat(warningMessages.conflictingName, 'http'),
warningMessages.nonEmailUrlBugsString,
warningMessages.emptyNormalizedBugs,
- warningMessages.nonUrlHomepage
+ warningMessages.nonUrlHomepage,
+ warningMessages.missingLicense
]
t.same(warnings, expect)
t.end()
@@ -110,9 +112,11 @@ tap.test("urls required", function(t) {
warningMessages.nonEmailBugsEmailField,
warningMessages.emptyNormalizedBugs,
warningMessages.missingReadme,
+ warningMessages.missingLicense,
warningMessages.nonEmailUrlBugsString,
warningMessages.emptyNormalizedBugs,
- warningMessages.nonUrlHomepage ]
+ warningMessages.nonUrlHomepage,
+ warningMessages.missingLicense]
t.same(warnings, expect)
t.end()
})
@@ -133,12 +137,34 @@ tap.test("homepage field must start with a protocol.", function(t) {
[ warningMessages.missingDescription,
warningMessages.missingRepository,
warningMessages.missingReadme,
- warningMessages.missingProtocolHomepage ]
+ warningMessages.missingProtocolHomepage,
+ warningMessages.missingLicense]
t.same(warnings, expect)
t.same(a.homepage, 'http://example.org')
t.end()
})
+tap.test("license field should be a valid SPDX expression", function(t) {
+ var warnings = []
+ function warn(w) {
+ warnings.push(w)
+ }
+ var a
+ normalize(a={
+ license: 'Apache 2'
+ }, warn)
+
+ console.error(a)
+
+ var expect =
+ [ warningMessages.missingDescription,
+ warningMessages.missingRepository,
+ warningMessages.missingReadme,
+ warningMessages.nonSPDXLicense]
+ t.same(warnings, expect)
+ t.end()
+})
+
tap.test("gist bugs url", function(t) {
var d = {
repository: "git@gist.github.com:123456.git"
@@ -159,14 +185,14 @@ tap.test("singularize repositories", function(t) {
tap.test("treat visionmedia/express as github repo", function(t) {
var d = {repository: {type: "git", url: "visionmedia/express"}}
normalize(d)
- t.same(d.repository, { type: "git", url: "https://github.com/visionmedia/express.git" })
+ t.same(d.repository, { type: "git", url: "git+https://github.com/visionmedia/express.git" })
t.end()
});
tap.test("treat isaacs/node-graceful-fs as github repo", function(t) {
var d = {repository: {type: "git", url: "isaacs/node-graceful-fs"}}
normalize(d)
- t.same(d.repository, { type: "git", url: "https://github.com/isaacs/node-graceful-fs.git" })
+ t.same(d.repository, { type: "git", url: "git+https://github.com/isaacs/node-graceful-fs.git" })
t.end()
});
diff --git a/deps/npm/node_modules/normalize-package-data/test/typo.js b/deps/npm/node_modules/normalize-package-data/test/typo.js
index dfa2b90e53016d..0cd3eb49d00ccf 100644
--- a/deps/npm/node_modules/normalize-package-data/test/typo.js
+++ b/deps/npm/node_modules/normalize-package-data/test/typo.js
@@ -15,6 +15,7 @@ test('typos', function(t) {
var expect =
[ warningMessages.missingRepository,
+ warningMessages.missingLicense,
typoMessage('dependancies', 'dependencies'),
typoMessage('dependecies', 'dependencies'),
typoMessage('depdenencies', 'dependencies'),
@@ -66,7 +67,8 @@ test('typos', function(t) {
typoMessage("bugs['name']", "bugs['url']"),
warningMessages.nonUrlBugsUrlField,
warningMessages.emptyNormalizedBugs,
- warningMessages.missingReadme ]
+ warningMessages.missingReadme,
+ warningMessages.missingLicense]
normalize({name:"name"
,version:"1.2.5"
@@ -79,6 +81,7 @@ test('typos', function(t) {
[ warningMessages.missingDescription,
warningMessages.missingRepository,
warningMessages.missingReadme,
+ warningMessages.missingLicense,
typoMessage('script', 'scripts') ]
normalize({name:"name"
@@ -93,7 +96,8 @@ test('typos', function(t) {
warningMessages.missingRepository,
typoMessage("scripts['server']", "scripts['start']"),
typoMessage("scripts['tests']", "scripts['test']"),
- warningMessages.missingReadme ]
+ warningMessages.missingReadme,
+ warningMessages.missingLicense]
normalize({name:"name"
,version:"1.2.5"
@@ -105,7 +109,8 @@ test('typos', function(t) {
expect =
[ warningMessages.missingDescription,
warningMessages.missingRepository,
- warningMessages.missingReadme ]
+ warningMessages.missingReadme,
+ warningMessages.missingLicense]
normalize({name:"name"
,version:"1.2.5"
diff --git a/deps/npm/node_modules/npm-registry-client/README.md b/deps/npm/node_modules/npm-registry-client/README.md
index 702ae2ea1d31fd..fb3226cf401b5b 100644
--- a/deps/npm/node_modules/npm-registry-client/README.md
+++ b/deps/npm/node_modules/npm-registry-client/README.md
@@ -9,7 +9,7 @@ It handles all the caching and HTTP calls.
```javascript
var RegClient = require('npm-registry-client')
var client = new RegClient(config)
-var uri = "npm://registry.npmjs.org/npm"
+var uri = "https://registry.npmjs.org/npm"
var params = {timeout: 1000}
client.get(uri, params, function (error, data, raw, res) {
diff --git a/deps/npm/node_modules/npm-registry-client/lib/request.js b/deps/npm/node_modules/npm-registry-client/lib/request.js
index 963eabaec0548b..168a9d160af215 100644
--- a/deps/npm/node_modules/npm-registry-client/lib/request.js
+++ b/deps/npm/node_modules/npm-registry-client/lib/request.js
@@ -232,7 +232,7 @@ function requestDone (method, where, cb) {
var name
if (!w.match(/^-/)) {
w = w.split('/')
- name = w[w.indexOf('_rewrite') + 1]
+ name = decodeURIComponent(w[w.indexOf('_rewrite') + 1])
}
if (!parsed.error) {
@@ -245,7 +245,7 @@ function requestDone (method, where, cb) {
er = new Error('404 Not Found: ' + name)
} else {
er = new Error(
- parsed.error + ' ' + (parsed.reason || '') + ': ' + w
+ parsed.error + ' ' + (parsed.reason || '') + ': ' + (name || w)
)
}
if (name) er.pkgid = name
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json
index 466dfdfe0139b3..b67333380c265e 100644
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json
@@ -29,25 +29,9 @@
},
"readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n",
"readmeFilename": "README.md",
- "homepage": "https://github.com/isaacs/core-util-is",
+ "homepage": "https://github.com/isaacs/core-util-is#readme",
"_id": "core-util-is@1.0.1",
- "dist": {
- "shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
- "tarball": "http://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz"
- },
- "_from": "core-util-is@>=1.0.0 <1.1.0",
- "_npmVersion": "1.3.23",
- "_npmUser": {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- }
- ],
- "directories": {},
"_shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
- "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz"
+ "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz",
+ "_from": "core-util-is@>=1.0.0 <1.1.0"
}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json
index 19228ab6fdcaaf..fb1eb3786d8168 100644
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json
@@ -26,28 +26,13 @@
"url": "http://juliangruber.com"
},
"license": "MIT",
- "_id": "isarray@0.0.1",
- "dist": {
- "shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
- "tarball": "http://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
- },
- "_from": "isarray@0.0.1",
- "_npmVersion": "1.2.18",
- "_npmUser": {
- "name": "juliangruber",
- "email": "julian@juliangruber.com"
- },
- "maintainers": [
- {
- "name": "juliangruber",
- "email": "julian@juliangruber.com"
- }
- ],
- "directories": {},
- "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
- "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "readme": "\n# isarray\n\n`Array#isArray` for older browsers.\n\n## Usage\n\n```js\nvar isArray = require('isarray');\n\nconsole.log(isArray([])); // => true\nconsole.log(isArray({})); // => false\n```\n\n## Installation\n\nWith [npm](http://npmjs.org) do\n\n```bash\n$ npm install isarray\n```\n\nThen bundle for the browser with\n[browserify](https://github.com/substack/browserify).\n\nWith [component](http://component.io) do\n\n```bash\n$ component install juliangruber/isarray\n```\n\n## License\n\n(MIT)\n\nCopyright (c) 2013 Julian Gruber <julian@juliangruber.com>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
+ "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/juliangruber/isarray/issues"
},
- "readme": "ERROR: No README data found!"
+ "_id": "isarray@0.0.1",
+ "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
+ "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "_from": "isarray@0.0.1"
}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json
index 0364d54ba46af6..ee70702359198d 100644
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json
@@ -22,33 +22,13 @@
"browserify"
],
"license": "MIT",
- "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0",
+ "readme": "**string_decoder.js** (`require('string_decoder')`) from Node.js core\n\nCopyright Joyent, Inc. and other Node contributors. See LICENCE file for details.\n\nVersion numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**\n\nThe *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.",
+ "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/rvagg/string_decoder/issues"
},
"_id": "string_decoder@0.10.31",
"_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
- "_from": "string_decoder@>=0.10.0 <0.11.0",
- "_npmVersion": "1.4.23",
- "_npmUser": {
- "name": "rvagg",
- "email": "rod@vagg.org"
- },
- "maintainers": [
- {
- "name": "substack",
- "email": "mail@substack.net"
- },
- {
- "name": "rvagg",
- "email": "rod@vagg.org"
- }
- ],
- "dist": {
- "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
- "tarball": "http://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
- },
- "directories": {},
"_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "readme": "ERROR: No README data found!"
+ "_from": "string_decoder@>=0.10.0 <0.11.0"
}
diff --git a/deps/npm/node_modules/npm-registry-client/package.json b/deps/npm/node_modules/npm-registry-client/package.json
index c46ed80ed2913b..31a644ccb5e2d4 100644
--- a/deps/npm/node_modules/npm-registry-client/package.json
+++ b/deps/npm/node_modules/npm-registry-client/package.json
@@ -6,7 +6,7 @@
},
"name": "npm-registry-client",
"description": "Client for the npm registry",
- "version": "6.3.2",
+ "version": "6.3.3",
"repository": {
"url": "git://github.com/isaacs/npm-registry-client.git"
},
@@ -39,14 +39,14 @@
"npmlog": ""
},
"license": "ISC",
- "readme": "# npm-registry-client\n\nThe code that npm uses to talk to the registry.\n\nIt handles all the caching and HTTP calls.\n\n## Usage\n\n```javascript\nvar RegClient = require('npm-registry-client')\nvar client = new RegClient(config)\nvar uri = \"npm://registry.npmjs.org/npm\"\nvar params = {timeout: 1000}\n\nclient.get(uri, params, function (error, data, raw, res) {\n // error is an error if there was a problem.\n // data is the parsed data object\n // raw is the json string\n // res is the response from couch\n})\n```\n\n# Registry URLs\n\nThe registry calls take either a full URL pointing to a resource in the\nregistry, or a base URL for the registry as a whole (including the registry\npath – but be sure to terminate the path with `/`). `http` and `https` URLs are\nthe only ones supported.\n\n## Using the client\n\nEvery call to the client follows the same pattern:\n\n* `uri` {String} The *fully-qualified* URI of the registry API method being\n invoked.\n* `params` {Object} Per-request parameters.\n* `callback` {Function} Callback to be invoked when the call is complete.\n\n### Credentials\n\nMany requests to the registry can by authenticated, and require credentials\nfor authorization. These credentials always look the same:\n\n* `username` {String}\n* `password` {String}\n* `email` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n**or**\n\n* `token` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n## API\n\n### client.access(uri, params, cb)\n\n* `uri` {String} Registry URL for the package's access API endpoint.\n Looks like `/-/package//access`.\n* `params` {Object} Object containing per-request properties.\n * `access` {String} New access level for the package. Can be either\n `public` or `restricted`. Registry will raise an error if trying\n to change the access level of an unscoped package.\n * `auth` {Credentials}\n\nSet the access level for scoped packages. For now, there are only two\naccess levels: \"public\" and \"restricted\".\n\n### client.adduser(uri, params, cb)\n\n* `uri` {String} Base registry URL.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nAdd a user account to the registry, or verify the credentials.\n\n### client.deprecate(uri, params, cb)\n\n* `uri` {String} Full registry URI for the deprecated package.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Semver version range.\n * `message` {String} The message to use as a deprecation warning.\n * `auth` {Credentials}\n* `cb` {Function}\n\nDeprecate a version of a package in the registry.\n\n### client.distTags.fetch(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `auth` {Credentials}\n* `cb` {Function}\n\nFetch all of the `dist-tags` for the named package.\n\n### client.distTags.add(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `version` {String} Exact version to be mapped to the `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nAdd (or replace) a single dist-tag onto the named package.\n\n### client.distTags.set(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSet all of the `dist-tags` for the named package at once, creating any\n`dist-tags` that do not already exit. Any `dist-tags` not included in the\n`distTags` map will be removed.\n\n### client.distTags.update(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nUpdate the values of multiple `dist-tags`, creating any `dist-tags` that do\nnot already exist. Any pre-existing `dist-tags` not included in the `distTags`\nmap will be left alone.\n\n### client.distTags.rm(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a single `dist-tag` from the named package.\n\n### client.get(uri, params, cb)\n\n* `uri` {String} The complete registry URI to fetch\n* `params` {Object} Object containing per-request properties.\n * `timeout` {Number} Duration before the request times out. Optional\n (default: never).\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `staleOk` {Boolean} If there's cached data available, then return that to\n the callback quickly, and update the cache the background. Optional\n (default: false).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetches data from the registry via a GET request, saving it in the cache folder\nwith the ETag or the \"Last Modified\" timestamp.\n\n### client.publish(uri, params, cb)\n\n* `uri` {String} The registry URI for the package to publish.\n* `params` {Object} Object containing per-request properties.\n * `metadata` {Object} Package metadata.\n * `access` {String} Access for the package. Can be `public` or `restricted` (no default).\n * `body` {Stream} Stream of the package body / tarball.\n * `auth` {Credentials}\n* `cb` {Function}\n\nPublish a package to the registry.\n\nNote that this does not create the tarball from a folder.\n\n### client.star(uri, params, cb)\n\n* `uri` {String} The complete registry URI for the package to star.\n* `params` {Object} Object containing per-request properties.\n * `starred` {Boolean} True to star the package, false to unstar it. Optional\n (default: false).\n * `auth` {Credentials}\n* `cb` {Function}\n\nStar or unstar a package.\n\nNote that the user does not have to be the package owner to star or unstar a\npackage, though other writes do require that the user be the package owner.\n\n### client.stars(uri, params, cb)\n\n* `uri` {String} The base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `username` {String} Name of user to fetch starred packages for. Optional\n (default: user in `auth`).\n * `auth` {Credentials} Optional (required if `username` is omitted).\n* `cb` {Function}\n\nView your own or another user's starred packages.\n\n### client.tag(uri, params, cb)\n\n* `uri` {String} The complete registry URI to tag\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Version to tag.\n * `tag` {String} Tag name to apply.\n * `auth` {Credentials}\n* `cb` {Function}\n\nMark a version in the `dist-tags` hash, so that `pkg@tag` will fetch the\nspecified version.\n\n### client.unpublish(uri, params, cb)\n\n* `uri` {String} The complete registry URI of the package to unpublish.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} version to unpublish. Optional – omit to unpublish all\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a version of a package (or all versions) from the registry. When the\nlast version us unpublished, the entire document is removed from the database.\n\n### client.whoami(uri, params, cb)\n\n* `uri` {String} The base registry for the URI.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSimple call to see who the registry thinks you are. Especially useful with\ntoken-based auth.\n\n\n## PLUMBING\n\nThe below are primarily intended for use by the rest of the API, or by the npm\ncaching logic directly.\n\n### client.request(uri, params, cb)\n\n* `uri` {String} URI pointing to the resource to request.\n* `params` {Object} Object containing per-request properties.\n * `method` {String} HTTP method. Optional (default: \"GET\").\n * `body` {Stream | Buffer | String | Object} The request body. Objects\n that are not Buffers or Streams are encoded as JSON. Optional – body\n only used for write operations.\n * `etag` {String} The cached ETag. Optional.\n * `lastModified` {String} The cached Last-Modified timestamp. Optional.\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nMake a generic request to the registry. All the other methods are wrappers\naround `client.request`.\n\n### client.fetch(uri, params, cb)\n\n* `uri` {String} The complete registry URI to upload to\n* `params` {Object} Object containing per-request properties.\n * `headers` {Stream} HTTP headers to be included with the request. Optional.\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetch a package from a URL, with auth set appropriately if included. Used to\ncache remote tarballs as well as request package tarballs from the registry.\n\n# Configuration\n\nThe client uses its own configuration, which is just passed in as a simple\nnested object. The following are the supported values (with their defaults, if\nany):\n\n* `proxy.http` {URL} The URL to proxy HTTP requests through.\n* `proxy.https` {URL} The URL to proxy HTTPS requests through. Defaults to be\n the same as `proxy.http` if unset.\n* `proxy.localAddress` {IP} The local address to use on multi-homed systems.\n* `ssl.ca` {String} Certificate signing authority certificates to trust.\n* `ssl.certificate` {String} Client certificate (PEM encoded). Enable access\n to servers that require client certificates.\n* `ssl.key` {String} Private key (PEM encoded) for client certificate.\n* `ssl.strict` {Boolean} Whether or not to be strict with SSL certificates.\n Default = `true`\n* `retry.count` {Number} Number of times to retry on GET failures. Default = 2.\n* `retry.factor` {Number} `factor` setting for `node-retry`. Default = 10.\n* `retry.minTimeout` {Number} `minTimeout` setting for `node-retry`.\n Default = 10000 (10 seconds)\n* `retry.maxTimeout` {Number} `maxTimeout` setting for `node-retry`.\n Default = 60000 (60 seconds)\n* `userAgent` {String} User agent header to send. Default =\n `\"node/{process.version}\"`\n* `log` {Object} The logger to use. Defaults to `require(\"npmlog\")` if\n that works, otherwise logs are disabled.\n* `defaultTag` {String} The default tag to use when publishing new packages.\n Default = `\"latest\"`\n* `couchToken` {Object} A token for use with\n [couch-login](https://npmjs.org/package/couch-login).\n* `sessionToken` {string} A random identifier for this set of client requests.\n Default = 8 random hexadecimal bytes.\n",
+ "readme": "# npm-registry-client\n\nThe code that npm uses to talk to the registry.\n\nIt handles all the caching and HTTP calls.\n\n## Usage\n\n```javascript\nvar RegClient = require('npm-registry-client')\nvar client = new RegClient(config)\nvar uri = \"https://registry.npmjs.org/npm\"\nvar params = {timeout: 1000}\n\nclient.get(uri, params, function (error, data, raw, res) {\n // error is an error if there was a problem.\n // data is the parsed data object\n // raw is the json string\n // res is the response from couch\n})\n```\n\n# Registry URLs\n\nThe registry calls take either a full URL pointing to a resource in the\nregistry, or a base URL for the registry as a whole (including the registry\npath – but be sure to terminate the path with `/`). `http` and `https` URLs are\nthe only ones supported.\n\n## Using the client\n\nEvery call to the client follows the same pattern:\n\n* `uri` {String} The *fully-qualified* URI of the registry API method being\n invoked.\n* `params` {Object} Per-request parameters.\n* `callback` {Function} Callback to be invoked when the call is complete.\n\n### Credentials\n\nMany requests to the registry can by authenticated, and require credentials\nfor authorization. These credentials always look the same:\n\n* `username` {String}\n* `password` {String}\n* `email` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n**or**\n\n* `token` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n## API\n\n### client.access(uri, params, cb)\n\n* `uri` {String} Registry URL for the package's access API endpoint.\n Looks like `/-/package//access`.\n* `params` {Object} Object containing per-request properties.\n * `access` {String} New access level for the package. Can be either\n `public` or `restricted`. Registry will raise an error if trying\n to change the access level of an unscoped package.\n * `auth` {Credentials}\n\nSet the access level for scoped packages. For now, there are only two\naccess levels: \"public\" and \"restricted\".\n\n### client.adduser(uri, params, cb)\n\n* `uri` {String} Base registry URL.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nAdd a user account to the registry, or verify the credentials.\n\n### client.deprecate(uri, params, cb)\n\n* `uri` {String} Full registry URI for the deprecated package.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Semver version range.\n * `message` {String} The message to use as a deprecation warning.\n * `auth` {Credentials}\n* `cb` {Function}\n\nDeprecate a version of a package in the registry.\n\n### client.distTags.fetch(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `auth` {Credentials}\n* `cb` {Function}\n\nFetch all of the `dist-tags` for the named package.\n\n### client.distTags.add(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `version` {String} Exact version to be mapped to the `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nAdd (or replace) a single dist-tag onto the named package.\n\n### client.distTags.set(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSet all of the `dist-tags` for the named package at once, creating any\n`dist-tags` that do not already exit. Any `dist-tags` not included in the\n`distTags` map will be removed.\n\n### client.distTags.update(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nUpdate the values of multiple `dist-tags`, creating any `dist-tags` that do\nnot already exist. Any pre-existing `dist-tags` not included in the `distTags`\nmap will be left alone.\n\n### client.distTags.rm(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a single `dist-tag` from the named package.\n\n### client.get(uri, params, cb)\n\n* `uri` {String} The complete registry URI to fetch\n* `params` {Object} Object containing per-request properties.\n * `timeout` {Number} Duration before the request times out. Optional\n (default: never).\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `staleOk` {Boolean} If there's cached data available, then return that to\n the callback quickly, and update the cache the background. Optional\n (default: false).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetches data from the registry via a GET request, saving it in the cache folder\nwith the ETag or the \"Last Modified\" timestamp.\n\n### client.publish(uri, params, cb)\n\n* `uri` {String} The registry URI for the package to publish.\n* `params` {Object} Object containing per-request properties.\n * `metadata` {Object} Package metadata.\n * `access` {String} Access for the package. Can be `public` or `restricted` (no default).\n * `body` {Stream} Stream of the package body / tarball.\n * `auth` {Credentials}\n* `cb` {Function}\n\nPublish a package to the registry.\n\nNote that this does not create the tarball from a folder.\n\n### client.star(uri, params, cb)\n\n* `uri` {String} The complete registry URI for the package to star.\n* `params` {Object} Object containing per-request properties.\n * `starred` {Boolean} True to star the package, false to unstar it. Optional\n (default: false).\n * `auth` {Credentials}\n* `cb` {Function}\n\nStar or unstar a package.\n\nNote that the user does not have to be the package owner to star or unstar a\npackage, though other writes do require that the user be the package owner.\n\n### client.stars(uri, params, cb)\n\n* `uri` {String} The base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `username` {String} Name of user to fetch starred packages for. Optional\n (default: user in `auth`).\n * `auth` {Credentials} Optional (required if `username` is omitted).\n* `cb` {Function}\n\nView your own or another user's starred packages.\n\n### client.tag(uri, params, cb)\n\n* `uri` {String} The complete registry URI to tag\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Version to tag.\n * `tag` {String} Tag name to apply.\n * `auth` {Credentials}\n* `cb` {Function}\n\nMark a version in the `dist-tags` hash, so that `pkg@tag` will fetch the\nspecified version.\n\n### client.unpublish(uri, params, cb)\n\n* `uri` {String} The complete registry URI of the package to unpublish.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} version to unpublish. Optional – omit to unpublish all\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a version of a package (or all versions) from the registry. When the\nlast version us unpublished, the entire document is removed from the database.\n\n### client.whoami(uri, params, cb)\n\n* `uri` {String} The base registry for the URI.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSimple call to see who the registry thinks you are. Especially useful with\ntoken-based auth.\n\n\n## PLUMBING\n\nThe below are primarily intended for use by the rest of the API, or by the npm\ncaching logic directly.\n\n### client.request(uri, params, cb)\n\n* `uri` {String} URI pointing to the resource to request.\n* `params` {Object} Object containing per-request properties.\n * `method` {String} HTTP method. Optional (default: \"GET\").\n * `body` {Stream | Buffer | String | Object} The request body. Objects\n that are not Buffers or Streams are encoded as JSON. Optional – body\n only used for write operations.\n * `etag` {String} The cached ETag. Optional.\n * `lastModified` {String} The cached Last-Modified timestamp. Optional.\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nMake a generic request to the registry. All the other methods are wrappers\naround `client.request`.\n\n### client.fetch(uri, params, cb)\n\n* `uri` {String} The complete registry URI to upload to\n* `params` {Object} Object containing per-request properties.\n * `headers` {Stream} HTTP headers to be included with the request. Optional.\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetch a package from a URL, with auth set appropriately if included. Used to\ncache remote tarballs as well as request package tarballs from the registry.\n\n# Configuration\n\nThe client uses its own configuration, which is just passed in as a simple\nnested object. The following are the supported values (with their defaults, if\nany):\n\n* `proxy.http` {URL} The URL to proxy HTTP requests through.\n* `proxy.https` {URL} The URL to proxy HTTPS requests through. Defaults to be\n the same as `proxy.http` if unset.\n* `proxy.localAddress` {IP} The local address to use on multi-homed systems.\n* `ssl.ca` {String} Certificate signing authority certificates to trust.\n* `ssl.certificate` {String} Client certificate (PEM encoded). Enable access\n to servers that require client certificates.\n* `ssl.key` {String} Private key (PEM encoded) for client certificate.\n* `ssl.strict` {Boolean} Whether or not to be strict with SSL certificates.\n Default = `true`\n* `retry.count` {Number} Number of times to retry on GET failures. Default = 2.\n* `retry.factor` {Number} `factor` setting for `node-retry`. Default = 10.\n* `retry.minTimeout` {Number} `minTimeout` setting for `node-retry`.\n Default = 10000 (10 seconds)\n* `retry.maxTimeout` {Number} `maxTimeout` setting for `node-retry`.\n Default = 60000 (60 seconds)\n* `userAgent` {String} User agent header to send. Default =\n `\"node/{process.version}\"`\n* `log` {Object} The logger to use. Defaults to `require(\"npmlog\")` if\n that works, otherwise logs are disabled.\n* `defaultTag` {String} The default tag to use when publishing new packages.\n Default = `\"latest\"`\n* `couchToken` {Object} A token for use with\n [couch-login](https://npmjs.org/package/couch-login).\n* `sessionToken` {string} A random identifier for this set of client requests.\n Default = 8 random hexadecimal bytes.\n",
"readmeFilename": "README.md",
- "gitHead": "dd4029904febd6e0c30371b2addad5715526cd68",
+ "gitHead": "bd0ab6fb27f614fca299bc9426dcfbea2fc83adc",
"bugs": {
"url": "https://github.com/isaacs/npm-registry-client/issues"
},
"homepage": "https://github.com/isaacs/npm-registry-client#readme",
- "_id": "npm-registry-client@6.3.2",
- "_shasum": "a662a36c6eda56c184099631cf429ba69b73d65b",
- "_from": "npm-registry-client@>=6.3.2 <6.4.0"
+ "_id": "npm-registry-client@6.3.3",
+ "_shasum": "fe9cc45b3b8404dfa888b99d7aff3964f3470fb0",
+ "_from": "npm-registry-client@6.3.3"
}
diff --git a/deps/npm/node_modules/npm-registry-client/test/request.js b/deps/npm/node_modules/npm-registry-client/test/request.js
index 0024c9b215cd8f..b3086b4729f6b9 100644
--- a/deps/npm/node_modules/npm-registry-client/test/request.js
+++ b/deps/npm/node_modules/npm-registry-client/test/request.js
@@ -81,7 +81,7 @@ test('request call contract', function (t) {
})
test('run request through its paces', function (t) {
- t.plan(27)
+ t.plan(28)
server.expect('/request-defaults', function (req, res) {
t.equal(req.method, 'GET', 'uses GET by default')
@@ -166,6 +166,13 @@ test('run request through its paces', function (t) {
}))
})
+ server.expect('GET', '/@scoped%2Fpackage-failing', function (req, res) {
+ req.pipe(concat(function () {
+ res.statusCode = 402
+ res.json({ error: 'payment required' })
+ }))
+ })
+
var defaults = {}
client.request(
common.registry + '/request-defaults',
@@ -249,4 +256,8 @@ test('run request through its paces', function (t) {
client.request(common.registry + '/body-error-object', defaults, function (er) {
t.ifError(er, 'call worked')
})
+
+ client.request(common.registry + '/@scoped%2Fpackage-failing', defaults, function (er) {
+ t.equals(er.message, 'payment required : @scoped/package-failing')
+ })
})
diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/README.md b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/README.md
index 3491c5956cc236..52f9f9ae1ed4a2 100644
--- a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/README.md
+++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/README.md
@@ -25,7 +25,7 @@ single.completeWork(20)
console.log(top.completed()) // 0.2
fs.stat("file", function(er, stat) {
- if (er) throw er
+ if (er) throw er
var stream = top.newStream("file", stat.size)
console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete
// and 50% * 20% == 10%
diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/README.md b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/README.md
index 9e9b6eee9f349f..e46b823903d2c6 100644
--- a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/README.md
+++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/README.md
@@ -12,3 +12,4 @@ If you want to guarantee a stable streams base, regardless of what version of No
**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"`
+
diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/float.patch b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/float.patch
index 7abb6dc30b21bf..b984607a41cc1f 100644
--- a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/float.patch
+++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/float.patch
@@ -3,36 +3,36 @@ index c5a741c..a2e0d8e 100644
--- a/lib/_stream_duplex.js
+++ b/lib/_stream_duplex.js
@@ -26,8 +26,8 @@
-
+
module.exports = Duplex;
var util = require('util');
-var Readable = require('_stream_readable');
-var Writable = require('_stream_writable');
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
-
+
util.inherits(Duplex, Readable);
-
+
diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
index a5e9864..330c247 100644
--- a/lib/_stream_passthrough.js
+++ b/lib/_stream_passthrough.js
@@ -25,7 +25,7 @@
-
+
module.exports = PassThrough;
-
+
-var Transform = require('_stream_transform');
+var Transform = require('./_stream_transform');
var util = require('util');
util.inherits(PassThrough, Transform);
-
+
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index 0c3fe3e..90a8298 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -23,10 +23,34 @@ module.exports = Readable;
Readable.ReadableState = ReadableState;
-
+
var EE = require('events').EventEmitter;
+if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
+ return emitter.listeners(type).length;
@@ -63,12 +63,12 @@ index 0c3fe3e..90a8298 100644
+} catch (er) {
+ debug = function() {};
+}
-
+
util.inherits(Readable, Stream);
-
+
@@ -380,7 +404,7 @@ function chunkInvalid(state, chunk) {
-
-
+
+
function onEofChunk(stream, state) {
- if (state.decoder && !state.ended) {
+ if (state.decoder && !state.ended && state.decoder.end) {
@@ -80,9 +80,9 @@ index b1f9fcc..b0caf57 100644
--- a/lib/_stream_transform.js
+++ b/lib/_stream_transform.js
@@ -64,8 +64,14 @@
-
+
module.exports = Transform;
-
+
-var Duplex = require('_stream_duplex');
+var Duplex = require('./_stream_duplex');
var util = require('util');
@@ -93,15 +93,15 @@ index b1f9fcc..b0caf57 100644
+ }
+}
util.inherits(Transform, Duplex);
-
-
+
+
diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
index ba2e920..f49288b 100644
--- a/lib/_stream_writable.js
+++ b/lib/_stream_writable.js
@@ -27,6 +27,12 @@ module.exports = Writable;
Writable.WritableState = WritableState;
-
+
var util = require('util');
+if (!util.isUndefined) {
+ var utilIs = require('core-util-is');
@@ -110,7 +110,7 @@ index ba2e920..f49288b 100644
+ }
+}
var Stream = require('stream');
-
+
util.inherits(Writable, Stream);
@@ -119,7 +125,7 @@ function WritableState(options, stream) {
function Writable(options) {
@@ -119,29 +119,29 @@ index ba2e920..f49288b 100644
- if (!(this instanceof Writable) && !(this instanceof Stream.Duplex))
+ if (!(this instanceof Writable) && !(this instanceof require('./_stream_duplex')))
return new Writable(options);
-
+
this._writableState = new WritableState(options, this);
diff --git a/test/simple/test-stream-big-push.js b/test/simple/test-stream-big-push.js
index e3787e4..8cd2127 100644
--- a/test/simple/test-stream-big-push.js
+++ b/test/simple/test-stream-big-push.js
@@ -21,7 +21,7 @@
-
+
var common = require('../common');
var assert = require('assert');
-var stream = require('stream');
+var stream = require('../../');
var str = 'asdfasdfasdfasdfasdf';
-
+
var r = new stream.Readable({
diff --git a/test/simple/test-stream-end-paused.js b/test/simple/test-stream-end-paused.js
index bb73777..d40efc7 100644
--- a/test/simple/test-stream-end-paused.js
+++ b/test/simple/test-stream-end-paused.js
@@ -25,7 +25,7 @@ var gotEnd = false;
-
+
// Make sure we don't miss the end event for paused 0-length streams
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
var stream = new Readable();
@@ -154,13 +154,13 @@ index b46ee90..0be8366 100644
@@ -22,8 +22,8 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('_stream_readable');
-var Writable = require('_stream_writable');
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
var util = require('util');
-
+
util.inherits(TestReadable, Readable);
diff --git a/test/simple/test-stream-pipe-cleanup.js b/test/simple/test-stream-pipe-cleanup.js
deleted file mode 100644
@@ -295,12 +295,12 @@ index c5d724b..c7d6b7d 100644
--- a/test/simple/test-stream-pipe-error-handling.js
+++ b/test/simple/test-stream-pipe-error-handling.js
@@ -21,7 +21,7 @@
-
+
var common = require('../common');
var assert = require('assert');
-var Stream = require('stream').Stream;
+var Stream = require('../../').Stream;
-
+
(function testErrorListenerCatches() {
var source = new Stream();
diff --git a/test/simple/test-stream-pipe-event.js b/test/simple/test-stream-pipe-event.js
@@ -309,25 +309,25 @@ index cb9d5fe..56f8d61 100644
+++ b/test/simple/test-stream-pipe-event.js
@@ -20,7 +20,7 @@
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
+
var common = require('../common');
-var stream = require('stream');
+var stream = require('../../');
var assert = require('assert');
var util = require('util');
-
+
diff --git a/test/simple/test-stream-push-order.js b/test/simple/test-stream-push-order.js
index f2e6ec2..a5c9bf9 100644
--- a/test/simple/test-stream-push-order.js
+++ b/test/simple/test-stream-push-order.js
@@ -20,7 +20,7 @@
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
+
var common = require('../common.js');
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
var assert = require('assert');
-
+
var s = new Readable({
diff --git a/test/simple/test-stream-push-strings.js b/test/simple/test-stream-push-strings.js
index 06f43dc..1701a9a 100644
@@ -336,11 +336,11 @@ index 06f43dc..1701a9a 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
var util = require('util');
-
+
util.inherits(MyStream, Readable);
diff --git a/test/simple/test-stream-readable-event.js b/test/simple/test-stream-readable-event.js
index ba6a577..a8e6f7b 100644
@@ -349,10 +349,10 @@ index ba6a577..a8e6f7b 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
-
+
(function first() {
// First test, not reading when the readable is added.
diff --git a/test/simple/test-stream-readable-flow-recursion.js b/test/simple/test-stream-readable-flow-recursion.js
@@ -362,10 +362,10 @@ index 2891ad6..11689ba 100644
@@ -27,7 +27,7 @@ var assert = require('assert');
// more data continuously, but without triggering a nextTick
// warning or RangeError.
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
-
+
// throw an error if we trigger a nextTick warning.
process.throwDeprecation = true;
diff --git a/test/simple/test-stream-unshift-empty-chunk.js b/test/simple/test-stream-unshift-empty-chunk.js
@@ -373,12 +373,12 @@ index 0c96476..7827538 100644
--- a/test/simple/test-stream-unshift-empty-chunk.js
+++ b/test/simple/test-stream-unshift-empty-chunk.js
@@ -24,7 +24,7 @@ var assert = require('assert');
-
- // This test verifies that stream.unshift(Buffer(0)) or
+
+ // This test verifies that stream.unshift(Buffer(0)) or
// stream.unshift('') does not set state.reading=false.
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
-
+
var r = new Readable();
var nChunks = 10;
diff --git a/test/simple/test-stream-unshift-read-race.js b/test/simple/test-stream-unshift-read-race.js
@@ -388,14 +388,14 @@ index 83fd9fa..17c18aa 100644
@@ -29,7 +29,7 @@ var assert = require('assert');
// 3. push() after the EOF signaling null is an error.
// 4. _read() is not called after pushing the EOF null chunk.
-
+
-var stream = require('stream');
+var stream = require('../../');
var hwm = 10;
var r = stream.Readable({ highWaterMark: hwm });
var chunks = 10;
@@ -51,7 +51,14 @@ r._read = function(n) {
-
+
function push(fast) {
assert(!pushedNull, 'push() after null push');
- var c = pos >= data.length ? null : data.slice(pos, pos + n);
@@ -417,10 +417,10 @@ index 5b49e6e..b5321f3 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var stream = require('stream');
+var stream = require('../../');
-
+
var queue = [];
for (var decode = 0; decode < 2; decode++) {
diff --git a/test/simple/test-stream2-basic.js b/test/simple/test-stream2-basic.js
@@ -428,26 +428,26 @@ index 3814bf0..248c1be 100644
--- a/test/simple/test-stream2-basic.js
+++ b/test/simple/test-stream2-basic.js
@@ -21,7 +21,7 @@
-
-
+
+
var common = require('../common.js');
-var R = require('_stream_readable');
+var R = require('../../lib/_stream_readable');
var assert = require('assert');
-
+
var util = require('util');
diff --git a/test/simple/test-stream2-compatibility.js b/test/simple/test-stream2-compatibility.js
index 6cdd4e9..f0fa84b 100644
--- a/test/simple/test-stream2-compatibility.js
+++ b/test/simple/test-stream2-compatibility.js
@@ -21,7 +21,7 @@
-
-
+
+
var common = require('../common.js');
-var R = require('_stream_readable');
+var R = require('../../lib/_stream_readable');
var assert = require('assert');
-
+
var util = require('util');
diff --git a/test/simple/test-stream2-finish-pipe.js b/test/simple/test-stream2-finish-pipe.js
index 39b274f..006a19b 100644
@@ -455,12 +455,12 @@ index 39b274f..006a19b 100644
+++ b/test/simple/test-stream2-finish-pipe.js
@@ -20,7 +20,7 @@
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
+
var common = require('../common.js');
-var stream = require('stream');
+var stream = require('../../');
var Buffer = require('buffer').Buffer;
-
+
var r = new stream.Readable();
diff --git a/test/simple/test-stream2-fs.js b/test/simple/test-stream2-fs.js
deleted file mode 100644
@@ -605,7 +605,7 @@ index 2fbfbca..667985b 100644
@@ -30,7 +30,7 @@ var PUSHSIZE = 20;
var PUSHCOUNT = 1000;
var HWM = 50;
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
var r = new Readable({
@@ -613,7 +613,7 @@ index 2fbfbca..667985b 100644
});
@@ -39,23 +39,23 @@ var rs = r._readableState;
r._read = push;
-
+
r.on('readable', function() {
- console.error('>> readable');
+ //console.error('>> readable');
@@ -624,7 +624,7 @@ index 2fbfbca..667985b 100644
- console.error(' < %j (%d remain)', ret && ret.length, rs.length);
+ //console.error(' < %j (%d remain)', ret && ret.length, rs.length);
} while (ret && ret.length === READSIZE);
-
+
- console.error('<< after read()',
- ret && ret.length,
- rs.needReadable,
@@ -634,24 +634,24 @@ index 2fbfbca..667985b 100644
+ // rs.needReadable,
+ // rs.length);
});
-
+
var endEmitted = false;
r.on('end', function() {
endEmitted = true;
- console.error('end');
+ //console.error('end');
});
-
+
var pushes = 0;
@@ -64,11 +64,11 @@ function push() {
return;
-
+
if (pushes++ === PUSHCOUNT) {
- console.error(' push(EOF)');
+ //console.error(' push(EOF)');
return r.push(null);
}
-
+
- console.error(' push #%d', pushes);
+ //console.error(' push #%d', pushes);
if (r.push(new Buffer(PUSHSIZE)))
@@ -662,27 +662,27 @@ index 3e6931d..ff47d89 100644
--- a/test/simple/test-stream2-objects.js
+++ b/test/simple/test-stream2-objects.js
@@ -21,8 +21,8 @@
-
-
+
+
var common = require('../common.js');
-var Readable = require('_stream_readable');
-var Writable = require('_stream_writable');
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
var assert = require('assert');
-
+
// tiny node-tap lookalike.
diff --git a/test/simple/test-stream2-pipe-error-handling.js b/test/simple/test-stream2-pipe-error-handling.js
index cf7531c..e3f3e4e 100644
--- a/test/simple/test-stream2-pipe-error-handling.js
+++ b/test/simple/test-stream2-pipe-error-handling.js
@@ -21,7 +21,7 @@
-
+
var common = require('../common');
var assert = require('assert');
-var stream = require('stream');
+var stream = require('../../');
-
+
(function testErrorListenerCatches() {
var count = 1000;
diff --git a/test/simple/test-stream2-pipe-error-once-listener.js b/test/simple/test-stream2-pipe-error-once-listener.js
@@ -691,12 +691,12 @@ index 5e8e3cb..53b2616 100755
+++ b/test/simple/test-stream2-pipe-error-once-listener.js
@@ -24,7 +24,7 @@ var common = require('../common.js');
var assert = require('assert');
-
+
var util = require('util');
-var stream = require('stream');
+var stream = require('../../');
-
-
+
+
var Read = function() {
diff --git a/test/simple/test-stream2-push.js b/test/simple/test-stream2-push.js
index b63edc3..eb2b0e9 100644
@@ -704,7 +704,7 @@ index b63edc3..eb2b0e9 100644
+++ b/test/simple/test-stream2-push.js
@@ -20,7 +20,7 @@
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
+
var common = require('../common.js');
-var stream = require('stream');
+var stream = require('../../');
@@ -716,14 +716,14 @@ index e8a7305..9740a47 100644
--- a/test/simple/test-stream2-read-sync-stack.js
+++ b/test/simple/test-stream2-read-sync-stack.js
@@ -21,7 +21,7 @@
-
+
var common = require('../common');
var assert = require('assert');
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
var r = new Readable();
var N = 256 * 1024;
-
+
diff --git a/test/simple/test-stream2-readable-empty-buffer-no-eof.js b/test/simple/test-stream2-readable-empty-buffer-no-eof.js
index cd30178..4b1659d 100644
--- a/test/simple/test-stream2-readable-empty-buffer-no-eof.js
@@ -731,13 +731,13 @@ index cd30178..4b1659d 100644
@@ -22,10 +22,9 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('stream').Readable;
+var Readable = require('../../').Readable;
-
+
test1();
-test2();
-
+
function test1() {
var r = new Readable();
@@ -88,31 +87,3 @@ function test1() {
@@ -777,12 +777,12 @@ index 7c96ffe..04a96f5 100644
--- a/test/simple/test-stream2-readable-from-list.js
+++ b/test/simple/test-stream2-readable-from-list.js
@@ -21,7 +21,7 @@
-
+
var assert = require('assert');
var common = require('../common.js');
-var fromList = require('_stream_readable')._fromList;
+var fromList = require('../../lib/_stream_readable')._fromList;
-
+
// tiny node-tap lookalike.
var tests = [];
diff --git a/test/simple/test-stream2-readable-legacy-drain.js b/test/simple/test-stream2-readable-legacy-drain.js
@@ -792,23 +792,23 @@ index 675da8e..51fd3d5 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Stream = require('stream');
+var Stream = require('../../');
var Readable = Stream.Readable;
-
+
var r = new Readable();
diff --git a/test/simple/test-stream2-readable-non-empty-end.js b/test/simple/test-stream2-readable-non-empty-end.js
index 7314ae7..c971898 100644
--- a/test/simple/test-stream2-readable-non-empty-end.js
+++ b/test/simple/test-stream2-readable-non-empty-end.js
@@ -21,7 +21,7 @@
-
+
var assert = require('assert');
var common = require('../common.js');
-var Readable = require('_stream_readable');
+var Readable = require('../../lib/_stream_readable');
-
+
var len = 0;
var chunks = new Array(10);
diff --git a/test/simple/test-stream2-readable-wrap-empty.js b/test/simple/test-stream2-readable-wrap-empty.js
@@ -818,11 +818,11 @@ index 2e5cf25..fd8a3dc 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('_stream_readable');
+var Readable = require('../../lib/_stream_readable');
var EE = require('events').EventEmitter;
-
+
var oldStream = new EE();
diff --git a/test/simple/test-stream2-readable-wrap.js b/test/simple/test-stream2-readable-wrap.js
index 90eea01..6b177f7 100644
@@ -831,40 +831,40 @@ index 90eea01..6b177f7 100644
@@ -22,8 +22,8 @@
var common = require('../common');
var assert = require('assert');
-
+
-var Readable = require('_stream_readable');
-var Writable = require('_stream_writable');
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
var EE = require('events').EventEmitter;
-
+
var testRuns = 0, completedRuns = 0;
diff --git a/test/simple/test-stream2-set-encoding.js b/test/simple/test-stream2-set-encoding.js
index 5d2c32a..685531b 100644
--- a/test/simple/test-stream2-set-encoding.js
+++ b/test/simple/test-stream2-set-encoding.js
@@ -22,7 +22,7 @@
-
+
var common = require('../common.js');
var assert = require('assert');
-var R = require('_stream_readable');
+var R = require('../../lib/_stream_readable');
var util = require('util');
-
+
// tiny node-tap lookalike.
diff --git a/test/simple/test-stream2-transform.js b/test/simple/test-stream2-transform.js
index 9c9ddd8..a0cacc6 100644
--- a/test/simple/test-stream2-transform.js
+++ b/test/simple/test-stream2-transform.js
@@ -21,8 +21,8 @@
-
+
var assert = require('assert');
var common = require('../common.js');
-var PassThrough = require('_stream_passthrough');
-var Transform = require('_stream_transform');
+var PassThrough = require('../../').PassThrough;
+var Transform = require('../../').Transform;
-
+
// tiny node-tap lookalike.
var tests = [];
diff --git a/test/simple/test-stream2-unpipe-drain.js b/test/simple/test-stream2-unpipe-drain.js
@@ -872,41 +872,41 @@ index d66dc3c..365b327 100644
--- a/test/simple/test-stream2-unpipe-drain.js
+++ b/test/simple/test-stream2-unpipe-drain.js
@@ -22,7 +22,7 @@
-
+
var common = require('../common.js');
var assert = require('assert');
-var stream = require('stream');
+var stream = require('../../');
var crypto = require('crypto');
-
+
var util = require('util');
diff --git a/test/simple/test-stream2-unpipe-leak.js b/test/simple/test-stream2-unpipe-leak.js
index 99f8746..17c92ae 100644
--- a/test/simple/test-stream2-unpipe-leak.js
+++ b/test/simple/test-stream2-unpipe-leak.js
@@ -22,7 +22,7 @@
-
+
var common = require('../common.js');
var assert = require('assert');
-var stream = require('stream');
+var stream = require('../../');
-
+
var chunk = new Buffer('hallo');
-
+
diff --git a/test/simple/test-stream2-writable.js b/test/simple/test-stream2-writable.js
index 704100c..209c3a6 100644
--- a/test/simple/test-stream2-writable.js
+++ b/test/simple/test-stream2-writable.js
@@ -20,8 +20,8 @@
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
+
var common = require('../common.js');
-var W = require('_stream_writable');
-var D = require('_stream_duplex');
+var W = require('../../').Writable;
+var D = require('../../').Duplex;
var assert = require('assert');
-
+
var util = require('util');
diff --git a/test/simple/test-stream3-pause-then-read.js b/test/simple/test-stream3-pause-then-read.js
index b91bde3..2f72c15 100644
@@ -915,8 +915,9 @@ index b91bde3..2f72c15 100644
@@ -22,7 +22,7 @@
var common = require('../common');
var assert = require('assert');
-
+
-var stream = require('stream');
+var stream = require('../../');
var Readable = stream.Readable;
var Writable = stream.Writable;
+
diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/node_modules/isarray/build/build.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/node_modules/isarray/build/build.js
index e1856ef0943728..ec58596aeebe4e 100644
--- a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/node_modules/isarray/build/build.js
+++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/node_modules/readable-stream/node_modules/isarray/build/build.js
@@ -206,3 +206,4 @@ module.exports = Array.isArray || function (arr) {
});
require.alias("isarray/index.js", "isarray/index.js");
+
diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/test/trackergroup.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/test/trackergroup.js
index f97e1034ff9e07..a64e121c03a1f1 100644
--- a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/test/trackergroup.js
+++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/test/trackergroup.js
@@ -49,7 +49,7 @@ test("TrackerGroup", function (t) {
t.is(er, null, "finishAll: on change event fired")
t.is(onChangeName, name, "finishAll: on change emits the correct name")
t.is(track.completed(), 1, "Finishing everything ")
-
+
track = new TrackerGroup(name)
a = track.newItem("a", 10, 2)
b = track.newItem("b", 10, 1)
@@ -68,7 +68,7 @@ test("TrackerGroup", function (t) {
t.is(er, null, "weightedFinishAll: on change event fired")
t.is(onChangeName, name, "weightedFinishAll: on change emits the correct name")
t.is(track.completed(), 1, "weightedFinishaAll: Finishing everything ")
-
+
track = new TrackerGroup(name)
a = track.newGroup("a", 10)
b = track.newGroup("b", 10)
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md
index 2486d3ce30120f..fb9eb0a7d1e125 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md
@@ -131,7 +131,7 @@ be be included verbatum in the output.
If the template element is an object, it can have the following keys:
* *type* can be:
- * `name` – The most recent name passed to `show`; if this is in response to a
+ * `name` – The most recent name passed to `show`; if this is in response to a
`pulse` then the name passed to `pulse` will be appended along with the
subsection property from the theme.
* `spinner` – If you've ever called `pulse` this will be one of the characters
@@ -148,7 +148,7 @@ If the template element is an object, it can have the following keys:
will be padded according to the *align* value.
* *align* – (Default: left) Possible values "left", "right" and "center". Works
as you'd expect from word processors.
-* *length* – Provides a single value for both *minLength* and *maxLength*. If both
+* *length* – Provides a single value for both *minLength* and *maxLength*. If both
*length* and *minLength or *maxLength* are specifed then the latter take precedence.
### Tracking Completion
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md~ b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md~
index cdab5bc27f4b09..eec841b6ca9e37 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md~
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md~
@@ -123,7 +123,7 @@ be be included verbatum in the output.
If the template element is an object, it can have the following keys:
* *type* can be:
- * `name` – The most recent name passed to `show`; if this is in response to a
+ * `name` – The most recent name passed to `show`; if this is in response to a
`pulse` then the name passed to `pulse` will be appended along with the
subsection property from the theme.
* `spinner` – If you've ever called `pulse` this will be one of the characters
@@ -140,7 +140,7 @@ If the template element is an object, it can have the following keys:
will be padded according to the *align* value.
* *align* – (Default: left) Possible values "left", "right" and "center". Works
as you'd expect from word processors.
-* *length* – Provides a single value for both *minLength* and *maxLength*. If both
+* *length* – Provides a single value for both *minLength* and *maxLength*. If both
*length* and *minLength or *maxLength* are specifed then the latter take precedence.
### Tracking Completion
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/LICENSE
index e756052969b780..d42e25e95655bb 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/LICENSE
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/LICENSE
@@ -11,3 +11,4 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/README.md
index 4393106fda3a0a..e9d3cc326c144a 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/README.md
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/has-unicode/README.md
@@ -33,7 +33,8 @@ As such, we report any Windows installation as unicode capable.
### Unix Like Operating Systems
We look at the environment variables `LC_ALL`, `LC_CTYPE`, and `LANG` in
-that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value.
+that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value.
For `LC_CTYPE` it looks to see if the value is `UTF-8`. This is sufficient
for most POSIX systems. While locale data can be put in `/etc/locale.conf`
as well, AFAIK it's always copied into the environment.
+
diff --git a/deps/npm/node_modules/npmlog/test/progress.js b/deps/npm/node_modules/npmlog/test/progress.js
index 14dfb32740fb34..97b13ded2c6660 100644
--- a/deps/npm/node_modules/npmlog/test/progress.js
+++ b/deps/npm/node_modules/npmlog/test/progress.js
@@ -31,7 +31,7 @@ function didActions(t, msg, output) {
}
t.is(actions.length, output.length, msg)
tests.forEach(function (test) {
- t.is(actions[test.cmd] ? actions[test.cmd][test.arg] : null,
+ t.is(actions[test.cmd] ? actions[test.cmd][test.arg] : null,
output[test.cmd][test.arg],
msg + ': ' + output[test.cmd] + (test.arg ? ' arg #'+test.arg : ''))
})
diff --git a/deps/npm/node_modules/once/LICENSE b/deps/npm/node_modules/once/LICENSE
index 0c44ae716db8f3..19129e315fe593 100644
--- a/deps/npm/node_modules/once/LICENSE
+++ b/deps/npm/node_modules/once/LICENSE
@@ -1,27 +1,15 @@
-Copyright (c) Isaac Z. Schlueter ("Author")
-All rights reserved.
+The ISC License
-The BSD License
+Copyright (c) Isaac Z. Schlueter and Contributors
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
-BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
-IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/once/package.json b/deps/npm/node_modules/once/package.json
index eb8a4217a2734a..c85f12ebe1d4fa 100644
--- a/deps/npm/node_modules/once/package.json
+++ b/deps/npm/node_modules/once/package.json
@@ -1,6 +1,6 @@
{
"name": "once",
- "version": "1.3.1",
+ "version": "1.3.2",
"description": "Run a function exactly one time",
"main": "once.js",
"directories": {
@@ -17,7 +17,7 @@
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/once"
+ "url": "git://github.com/isaacs/once.git"
},
"keywords": [
"once",
@@ -30,20 +30,24 @@
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
- "license": "BSD",
- "gitHead": "c90ac02a74f433ce47f6938869e68dd6196ffc2c",
+ "license": "ISC",
+ "gitHead": "e35eed5a7867574e2bf2260a1ba23970958b22f2",
"bugs": {
"url": "https://github.com/isaacs/once/issues"
},
- "homepage": "https://github.com/isaacs/once",
- "_id": "once@1.3.1",
- "_shasum": "f3f3e4da5b7d27b5c732969ee3e67e729457b31f",
- "_from": "once@>=1.3.1 <2.0.0",
- "_npmVersion": "2.0.0",
- "_nodeVersion": "0.10.31",
+ "homepage": "https://github.com/isaacs/once#readme",
+ "_id": "once@1.3.2",
+ "_shasum": "d8feeca93b039ec1dcdee7741c92bdac5e28081b",
+ "_from": "once@>=1.3.2 <1.4.0",
+ "_npmVersion": "2.9.1",
+ "_nodeVersion": "2.0.0",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "d8feeca93b039ec1dcdee7741c92bdac5e28081b",
+ "tarball": "http://registry.npmjs.org/once/-/once-1.3.2.tgz"
},
"maintainers": [
{
@@ -51,10 +55,6 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "f3f3e4da5b7d27b5c732969ee3e67e729457b31f",
- "tarball": "http://registry.npmjs.org/once/-/once-1.3.1.tgz"
- },
- "_resolved": "https://registry.npmjs.org/once/-/once-1.3.1.tgz",
+ "_resolved": "https://registry.npmjs.org/once/-/once-1.3.2.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE
index c3d2eb3550079b..e637724b3bc595 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE
@@ -19,3 +19,4 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
index 3d61083fb04dd0..85d52a2dcea030 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
@@ -240,3 +240,4 @@ JSON.stringify will split this into 15 lines, and it's hard to read.
Yet again, this feature comes with a performance hit, so if user experience matters to you more than performance, use this module. If your JSON will be consumed by machines, use JSON.stringify instead.
As a rule of thumb, if you use "space" argument to indent your JSON, you'd better use this module instead.
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/docs/Grammar.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/docs/Grammar.md
index 61ae49b3800d21..eb7c8bc667fd54 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/docs/Grammar.md
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/docs/Grammar.md
@@ -1,5 +1,5 @@
-JSON5 grammar expressed in EBNF form.
+JSON5 grammar expressed in EBNF form.
PS: I don't know what is appropriate syntax highlighter for this, so I'm using "modula2" because why not. I also inserted after backslash to preserve syntax highlighting, this character has nothing to do with actual JSON5 syntax and should be ignored.
@@ -81,11 +81,11 @@ hex_digit = '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | 'a'
ascii_letter = ascii_letter_lowercase
| ascii_letter_uppercase
-ascii_letter_lowercase = 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i'
+ascii_letter_lowercase = 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i'
| 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r'
| 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z'
-ascii_letter_uppercase = 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I'
+ascii_letter_uppercase = 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I'
| 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R'
| 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z'
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
index 39303b0969081c..9b0f9af01cd9e8 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
@@ -89,3 +89,4 @@ module.exports.analyze = function analyzeJSON(input, options) {
return result
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
index af1a01a03d062b..cfab8691fc9aba 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
@@ -482,3 +482,4 @@ module.exports.Document = Document
module.exports.update = function updateJSON(source, new_value, options) {
return Document(source, options).update(new_value).toString()
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
index 2b7894937862d5..5f9fe998610d0f 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
@@ -749,3 +749,4 @@ module.exports.tokenize = function tokenizeJSON(input, options) {
tokens.data = module.exports.parse(input, options)
return tokens
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
index 754019eac551c1..ce89d77ee1f433 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
@@ -380,3 +380,4 @@ module.exports.stringify = function stringifyJSON(object, options, _space) {
return _stringify(object, options, 0, '')
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
index dd4752c73a4078..a8476b6c4630e1 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
@@ -43,3 +43,4 @@ module.exports.middleware = function() {
throw Error('this function is removed, use express-json5 instead')
}
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
index c971ba28267804..cab7b5d0bcad28 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
@@ -43,3 +43,4 @@ publishConfig:
license:
type: WTFPL
url: http://www.wtfpl.net/txt/copying/
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_analyze.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_analyze.js
index e353efcd1adb42..2a24e01eac2c2d 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_analyze.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_analyze.js
@@ -50,3 +50,4 @@ addTest(t.quote_keys, false)
var t = analyze("{foo:'bar', \"bar\":'baz', \"baz\":\"quux\"}")
addTest(t.quote, '"')
addTest(t.quote_keys, false)
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_errors.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_errors.js
index b88fcd38928087..8b2cdb7dcbdf67 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_errors.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_errors.js
@@ -53,3 +53,4 @@ try {
var x = err.stack.match(/parseObject/g)
assert(!x || x.length < 2, "shouldn't blow up the stack with internal calls")
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_parse.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_parse.js
index c539cc7858b13d..d33e61ee7e37be 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_parse.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_parse.js
@@ -168,3 +168,4 @@ for (var i=0; i<100; i++) {
//console.log(rnd, x, y, z)
if (x !== y && x !== z) throw 'ERROR'
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_portable.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_portable.js
index bf24aa68b3934b..0143e7d8e5bff6 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_portable.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_portable.js
@@ -57,3 +57,4 @@ for (var k in tests) {
})
})(k)
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_tokenize.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_tokenize.js
index 02726154aeb984..64fb7ec93d9fef 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_tokenize.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_tokenize.js
@@ -96,3 +96,4 @@ addTest('[1,2,[[],[1]],{},{1:2},{q:{q:{}}},]',
{ raw: '}', type: 'separator', stack: [5] },
{ raw: ',', type: 'separator', stack: [] },
{ raw: ']', type: 'separator', stack: [] } ])
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_updates.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_updates.js
index 560f33692a592d..b7482519a919a7 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_updates.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/test_updates.js
@@ -19,3 +19,4 @@ FS.readdirSync(__dirname + '/update').filter(function(file) {
assert.strictEqual(test.test(jju, test.input), test.output)
})
})
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/author.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/author.yaml
index b991dc9a0d3728..4b08bb61b49056 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/author.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/author.yaml
@@ -28,3 +28,4 @@ test: !!js/function |
}
return jju.update(input, obj)
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/deep-object.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/deep-object.yaml
index 1bc1681c886f01..e0795a37874e9e 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/deep-object.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/deep-object.yaml
@@ -33,3 +33,4 @@ test: !!js/function |
obj.foo.bar.qwe = {rty: {aaa: {bbb: 1}}}
return jju.update(input, obj, {mode:'json'})
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/delete.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/delete.yaml
index c388332138f0cc..b964715d37b0b4 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/delete.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/delete.yaml
@@ -33,3 +33,4 @@ test: !!js/function |
delete obj.dependencies.bar
return jju.update(input, obj, {mode:'json'})
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-array.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-array.yaml
index 1a67db77ae6df5..c5b9dd952d1255 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-array.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-array.yaml
@@ -29,3 +29,4 @@ test: !!js/function |
obj.bundleDependencies.push('quux')
return jju.update(input, obj, {mode:'json'})
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-object.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-object.yaml
index 1734f738e3e397..93878675b7abc7 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-object.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/norm-object.yaml
@@ -29,3 +29,4 @@ test: !!js/function |
obj.dependencies.qwerty = '1'
return jju.update(input, obj, {mode:'json'})
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/npm-array-bin.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/npm-array-bin.yaml
index 9308007e48060a..35e1639bfda28b 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/npm-array-bin.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/npm-array-bin.yaml
@@ -26,3 +26,4 @@ test: !!js/function |
_from: 'npm-test-array-bin/' }
return jju.update(input, obj)
}
+
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/pkg-json5.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/pkg-json5.yaml
index 5721f41ac7348e..21a5c6eb141ed4 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/pkg-json5.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/test/update/pkg-json5.yaml
@@ -33,3 +33,4 @@ test: !!js/function |
}
return jju.update(input, upd)
}
+
diff --git a/deps/npm/node_modules/request/.eslintrc b/deps/npm/node_modules/request/.eslintrc
deleted file mode 100644
index 8538b419c1119f..00000000000000
--- a/deps/npm/node_modules/request/.eslintrc
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "env": {
- "node": true
- },
- "rules": {
- // 2-space indentation
- "indent": [2, 2],
- // Disallow semi-colons, unless needed to disambiguate statement
- "semi": [2, "never"],
- // Require strings to use single quotes
- "quotes": [2, "single"],
- // Require curly braces for all control statements
- "curly": 2,
- // Disallow using variables and functions before they've been defined
- "no-use-before-define": 2,
- // Allow any case for variable naming
- "camelcase": 0,
- // Disallow unused variables, except as function arguments
- "no-unused-vars": [2, {"args":"none"}],
- // Allow leading underscores for method names
- // REASON: we use underscores to denote private methods
- "no-underscore-dangle": 0,
- // Allow multi spaces around operators since they are
- // used for alignment. This is not consistent in the
- // code.
- "no-multi-spaces": 0,
- // Style rule is: most objects use { beforeColon: false, afterColon: true }, unless aligning which uses:
- //
- // {
- // beforeColon : true,
- // afterColon : true
- // }
- //
- // eslint can't handle this, so the check is disabled.
- "key-spacing": 0,
- // Allow shadowing vars in outer scope (needs discussion)
- "no-shadow": 0
- }
-}
diff --git a/deps/npm/node_modules/request/node_modules/bl/.jshintrc b/deps/npm/node_modules/request/node_modules/bl/.jshintrc
deleted file mode 100644
index c8ef3ca4097f82..00000000000000
--- a/deps/npm/node_modules/request/node_modules/bl/.jshintrc
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "predef": [ ]
- , "bitwise": false
- , "camelcase": false
- , "curly": false
- , "eqeqeq": false
- , "forin": false
- , "immed": false
- , "latedef": false
- , "noarg": true
- , "noempty": true
- , "nonew": true
- , "plusplus": false
- , "quotmark": true
- , "regexp": false
- , "undef": true
- , "unused": true
- , "strict": false
- , "trailing": true
- , "maxlen": 120
- , "asi": true
- , "boss": true
- , "debug": true
- , "eqnull": true
- , "esnext": true
- , "evil": true
- , "expr": true
- , "funcscope": false
- , "globalstrict": false
- , "iterator": false
- , "lastsemic": true
- , "laxbreak": true
- , "laxcomma": true
- , "loopfunc": true
- , "multistr": false
- , "onecase": false
- , "proto": false
- , "regexdash": false
- , "scripturl": true
- , "smarttabs": false
- , "shadow": false
- , "sub": true
- , "supernew": false
- , "validthis": true
- , "browser": true
- , "couch": false
- , "devel": false
- , "dojo": false
- , "mootools": false
- , "node": true
- , "nonstandard": true
- , "prototypejs": false
- , "rhino": false
- , "worker": true
- , "wsh": false
- , "nomen": false
- , "onevar": false
- , "passfail": false
-}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/forever-agent/index.js b/deps/npm/node_modules/request/node_modules/forever-agent/index.js
index 9719ff735dfe79..416c7abd709233 100644
--- a/deps/npm/node_modules/request/node_modules/forever-agent/index.js
+++ b/deps/npm/node_modules/request/node_modules/forever-agent/index.js
@@ -6,8 +6,8 @@ var util = require('util')
, net = require('net')
, tls = require('tls')
, AgentSSL = require('https').Agent
-
-function getConnectionName(host, port) {
+
+function getConnectionName(host, port) {
var name = ''
if (typeof host === 'string') {
name = host + ':' + port
@@ -16,7 +16,7 @@ function getConnectionName(host, port) {
name = host.host + ':' + host.port + ':' + (host.localAddress ? (host.localAddress + ':') : ':')
}
return name
-}
+}
function ForeverAgent(options) {
var self = this
@@ -60,7 +60,7 @@ ForeverAgent.prototype.createConnection = net.createConnection
ForeverAgent.prototype.addRequestNoreuse = Agent.prototype.addRequest
ForeverAgent.prototype.addRequest = function(req, host, port) {
var name = getConnectionName(host, port)
-
+
if (typeof host !== 'string') {
var options = host
port = options.port
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
index 392c64100a5e0b..0bea5311a8b03c 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
@@ -53,7 +53,7 @@ a method of another library isn't working as an iterator, study this example:
// Here is a simple object with an (unnecessarily roundabout) squaring method
var AsyncSquaringLibrary = {
squareExponent: 2,
- square: function(number, callback){
+ square: function(number, callback){
var result = Math.pow(number, this.squareExponent);
setTimeout(function(){
callback(null, result);
@@ -71,7 +71,7 @@ async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){
async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){
// result is [1, 4, 9]
// With the help of bind we can attach a context to the iterator before
- // passing it to async. Now the square function will be executed in its
+ // passing it to async. Now the square function will be executed in its
// 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`
// will be as expected.
});
@@ -89,7 +89,7 @@ __Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async
## In the Browser
-So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5.
+So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5.
Usage:
@@ -181,8 +181,8 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err)` which must be called once it has
- completed. If no error has occured, the `callback` should be run without
+ The iterator is passed a `callback(err)` which must be called once it has
+ completed. If no error has occured, the `callback` should be run without
arguments or with an explicit `null` argument.
* `callback(err)` - A callback which is called when all `iterator` functions
have finished, or an error occurs.
@@ -200,13 +200,13 @@ async.each(openFiles, saveFile, function(err){
```
```js
-// assuming openFiles is an array of file names
+// assuming openFiles is an array of file names
async.each(openFiles, function( file, callback) {
-
+
// Perform operation on file here.
console.log('Processing file ' + file);
-
+
if( file.length > 32 ) {
console.log('This file name is too long');
callback('File name too long');
@@ -234,7 +234,7 @@ async.each(openFiles, function( file, callback) {
### eachSeries(arr, iterator, callback)
The same as [`each`](#each), only `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
+series. The next `iterator` is only called once the current one has completed.
This means the `iterator` functions will complete in order.
@@ -244,10 +244,10 @@ This means the `iterator` functions will complete in order.
### eachLimit(arr, limit, iterator, callback)
-The same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously
+The same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously
running at any time.
-Note that the items in `arr` are not processed in batches, so there is no guarantee that
+Note that the items in `arr` are not processed in batches, so there is no guarantee that
the first `limit` `iterator` functions will complete before any others are started.
__Arguments__
@@ -255,8 +255,8 @@ __Arguments__
* `arr` - An array to iterate over.
* `limit` - The maximum number of `iterator`s to run at any time.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err)` which must be called once it has
- completed. If no error has occured, the callback should be run without
+ The iterator is passed a `callback(err)` which must be called once it has
+ completed. If no error has occured, the callback should be run without
arguments or with an explicit `null` argument.
* `callback(err)` - A callback which is called when all `iterator` functions
have finished, or an error occurs.
@@ -279,19 +279,19 @@ async.eachLimit(documents, 20, requestApi, function(err){
Produces a new array of values by mapping each value in `arr` through
the `iterator` function. The `iterator` is called with an item from `arr` and a
-callback for when it has finished processing. Each of these callback takes 2 arguments:
-an `error`, and the transformed item from `arr`. If `iterator` passes an error to this
+callback for when it has finished processing. Each of these callback takes 2 arguments:
+an `error`, and the transformed item from `arr`. If `iterator` passes an error to this
callback, the main `callback` (for the `map` function) is immediately called with the error.
Note, that since this function applies the `iterator` to each item in parallel,
-there is no guarantee that the `iterator` functions will complete in order.
+there is no guarantee that the `iterator` functions will complete in order.
However, the results array will be in the same order as the original `arr`.
__Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, transformed)` which must be called once
+ The iterator is passed a `callback(err, transformed)` which must be called once
it has completed with an error (which can be `null`) and a transformed item.
* `callback(err, results)` - A callback which is called when all `iterator`
functions have finished, or an error occurs. Results is an array of the
@@ -311,7 +311,7 @@ async.map(['file1','file2','file3'], fs.stat, function(err, results){
### mapSeries(arr, iterator, callback)
The same as [`map`](#map), only the `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
+series. The next `iterator` is only called once the current one has completed.
The results array will be in the same order as the original.
@@ -320,10 +320,10 @@ The results array will be in the same order as the original.
### mapLimit(arr, limit, iterator, callback)
-The same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously
+The same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously
running at any time.
-Note that the items are not processed in batches, so there is no guarantee that
+Note that the items are not processed in batches, so there is no guarantee that
the first `limit` `iterator` functions will complete before any others are started.
__Arguments__
@@ -331,7 +331,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `limit` - The maximum number of `iterator`s to run at any time.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, transformed)` which must be called once
+ The iterator is passed a `callback(err, transformed)` which must be called once
it has completed with an error (which can be `null`) and a transformed item.
* `callback(err, results)` - A callback which is called when all `iterator`
calls have finished, or an error occurs. The result is an array of the
@@ -364,7 +364,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in `arr`.
- The `iterator` is passed a `callback(truthValue)`, which must be called with a
+ The `iterator` is passed a `callback(truthValue)`, which must be called with a
boolean argument once it has completed.
* `callback(results)` - A callback which is called after all the `iterator`
functions have finished.
@@ -386,7 +386,7 @@ async.filter(['file1','file2','file3'], fs.exists, function(results){
__Alias:__ `selectSeries`
The same as [`filter`](#filter) only the `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
+series. The next `iterator` is only called once the current one has completed.
The results array will be in the same order as the original.
---------------------------------------
@@ -413,12 +413,12 @@ in series.
__Aliases:__ `inject`, `foldl`
Reduces `arr` into a single value using an async `iterator` to return
-each successive step. `memo` is the initial state of the reduction.
-This function only operates in series.
+each successive step. `memo` is the initial state of the reduction.
+This function only operates in series.
-For performance reasons, it may make sense to split a call to this function into
-a parallel map, and then use the normal `Array.prototype.reduce` on the results.
-This function is for situations where each step in the reduction needs to be async;
+For performance reasons, it may make sense to split a call to this function into
+a parallel map, and then use the normal `Array.prototype.reduce` on the results.
+This function is for situations where each step in the reduction needs to be async;
if you can get the data before reducing it, then it's probably a good idea to do so.
__Arguments__
@@ -427,9 +427,9 @@ __Arguments__
* `memo` - The initial state of the reduction.
* `iterator(memo, item, callback)` - A function applied to each item in the
array to produce the next step in the reduction. The `iterator` is passed a
- `callback(err, reduction)` which accepts an optional error as its first
- argument, and the state of the reduction as the second. If an error is
- passed to the callback, the reduction is stopped and the main `callback` is
+ `callback(err, reduction)` which accepts an optional error as its first
+ argument, and the state of the reduction as the second. If an error is
+ passed to the callback, the reduction is stopped and the main `callback` is
immediately called with the error.
* `callback(err, result)` - A callback which is called after all the `iterator`
functions have finished. Result is the reduced value.
@@ -473,7 +473,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in `arr`.
- The iterator is passed a `callback(truthValue)` which must be called with a
+ The iterator is passed a `callback(truthValue)` which must be called with a
boolean argument once it has completed.
* `callback(result)` - A callback which is called as soon as any iterator returns
`true`, or after all the `iterator` functions have finished. Result will be
@@ -566,7 +566,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in the array
- in parallel. The iterator is passed a callback(truthValue) which must be
+ in parallel. The iterator is passed a callback(truthValue) which must be
called with a boolean argument once it has completed.
* `callback(result)` - A callback which is called as soon as any iterator returns
`true`, or after all the iterator functions have finished. Result will be
@@ -596,7 +596,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in the array
- in parallel. The iterator is passed a callback(truthValue) which must be
+ in parallel. The iterator is passed a callback(truthValue) which must be
called with a boolean argument once it has completed.
* `callback(result)` - A callback which is called after all the `iterator`
functions have finished. Result will be either `true` or `false` depending on
@@ -624,7 +624,7 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, results)` which must be called once it
+ The iterator is passed a `callback(err, results)` which must be called once it
has completed with an error (which can be `null`) and an array of results.
* `callback(err, results)` - A callback which is called after all the `iterator`
functions have finished, or an error occurs. Results is an array containing
@@ -653,7 +653,7 @@ Same as [`concat`](#concat), but executes in series instead of parallel.
Run the functions in the `tasks` array in series, each one running once the previous
function has completed. If any functions in the series pass an error to its
-callback, no more functions are run, and `callback` is immediately called with the value of the error.
+callback, no more functions are run, and `callback` is immediately called with the value of the error.
Otherwise, `callback` receives an array of results when `tasks` have completed.
It is also possible to use an object instead of an array. Each property will be
@@ -662,13 +662,13 @@ instead of an array. This can be a more readable way of handling results from
[`series`](#series).
**Note** that while many implementations preserve the order of object properties, the
-[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6)
+[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6)
explicitly states that
> The mechanics and order of enumerating the properties is not specified.
So if you rely on the order in which your series of functions are executed, and want
-this to work on all platforms, consider using an array.
+this to work on all platforms, consider using an array.
__Arguments__
@@ -676,7 +676,7 @@ __Arguments__
a `callback(err, result)` it must call on completion with an error `err` (which can
be `null`) and an optional `result` value.
* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
+ have completed. This function gets a results array (or object) containing all
the result arguments passed to the `task` callbacks.
__Example__
@@ -735,11 +735,11 @@ instead of an array. This can be a more readable way of handling results from
__Arguments__
-* `tasks` - An array or object containing functions to run. Each function is passed
- a `callback(err, result)` which it must call on completion with an error `err`
+* `tasks` - An array or object containing functions to run. Each function is passed
+ a `callback(err, result)` which it must call on completion with an error `err`
(which can be `null`) and an optional `result` value.
* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
+ have completed. This function gets a results array (or object) containing all
the result arguments passed to the task callbacks.
__Example__
@@ -787,20 +787,20 @@ function(err, results) {
### parallelLimit(tasks, limit, [callback])
-The same as [`parallel`](#parallel), only `tasks` are executed in parallel
+The same as [`parallel`](#parallel), only `tasks` are executed in parallel
with a maximum of `limit` tasks executing at any time.
-Note that the `tasks` are not executed in batches, so there is no guarantee that
+Note that the `tasks` are not executed in batches, so there is no guarantee that
the first `limit` tasks will complete before any others are started.
__Arguments__
-* `tasks` - An array or object containing functions to run, each function is passed
+* `tasks` - An array or object containing functions to run, each function is passed
a `callback(err, result)` it must call on completion with an error `err` (which can
be `null`) and an optional `result` value.
* `limit` - The maximum number of `tasks` to run at any time.
* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
+ have completed. This function gets a results array (or object) containing all
the result arguments passed to the `task` callbacks.
---------------------------------------
@@ -815,7 +815,7 @@ __Arguments__
* `test()` - synchronous truth test to perform before each execution of `fn`.
* `fn(callback)` - A function which is called each time `test` passes. The function is
- passed a `callback(err)`, which must be called once it has completed with an
+ passed a `callback(err)`, which must be called once it has completed with an
optional `err` argument.
* `callback(err)` - A callback which is called after the test fails and repeated
execution of `fn` has stopped.
@@ -842,8 +842,8 @@ async.whilst(
### doWhilst(fn, test, callback)
-The post-check version of [`whilst`](#whilst). To reflect the difference in
-the order of operations, the arguments `test` and `fn` are switched.
+The post-check version of [`whilst`](#whilst). To reflect the difference in
+the order of operations, the arguments `test` and `fn` are switched.
`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.
@@ -900,9 +900,9 @@ the error.
__Arguments__
-* `tasks` - An array of functions to run, each function is passed a
+* `tasks` - An array of functions to run, each function is passed a
`callback(err, result1, result2, ...)` it must call on completion. The first
- argument is an error (which can be `null`) and any further arguments will be
+ argument is an error (which can be `null`) and any further arguments will be
passed as arguments in order to the next task.
* `callback(err, [results])` - An optional callback to run once all the functions
have completed. This will be passed the results of the last task's callback.
@@ -925,7 +925,7 @@ async.waterfall([
callback(null, 'done');
}
], function (err, result) {
- // result now equals 'done'
+ // result now equals 'done'
});
```
@@ -972,7 +972,7 @@ add1mul3(4, function (err, result) {
### seq(fn1, fn2...)
Version of the compose function that is more natural to read.
-Each following function consumes the return value of the latter function.
+Each following function consumes the return value of the latter function.
Each function is executed with the `this` binding of the composed function.
@@ -986,7 +986,7 @@ __Example__
```js
// Requires lodash (or underscore), express3 and dresende's orm2.
// Part of an app, that fetches cats of the logged user.
-// This example uses `seq` function to avoid overnesting and error
+// This example uses `seq` function to avoid overnesting and error
// handling clutter.
app.get('/cats', function(request, response) {
function handleError(err, data, callback) {
@@ -1018,7 +1018,7 @@ app.get('/cats', function(request, response) {
### applyEach(fns, args..., callback)
-Applies the provided arguments to each function in the array, calling
+Applies the provided arguments to each function in the array, calling
`callback` after all functions have completed. If you only provide the first
argument, then it will return a function which lets you pass in the
arguments as if it were a single function call.
@@ -1058,13 +1058,13 @@ The same as [`applyEach`](#applyEach) only the functions are applied in series.
Creates a `queue` object with the specified `concurrency`. Tasks added to the
`queue` are processed in parallel (up to the `concurrency` limit). If all
-`worker`s are in progress, the task is queued until one becomes available.
+`worker`s are in progress, the task is queued until one becomes available.
Once a `worker` completes a `task`, that `task`'s callback is called.
__Arguments__
* `worker(task, callback)` - An asynchronous function for processing a queued
- task, which must call its `callback(err)` argument when finished, with an
+ task, which must call its `callback(err)` argument when finished, with an
optional `error` as an argument.
* `concurrency` - An `integer` for determining how many `worker` functions should be
run in parallel.
@@ -1081,11 +1081,11 @@ methods:
* `concurrency` - an integer for determining how many `worker` functions should be
run in parallel. This property can be changed after a `queue` is created to
alter the concurrency on-the-fly.
-* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once
+* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once
the `worker` has finished processing the task. Instead of a single task, a `tasks` array
can be submitted. The respective callback is used for every task in the list.
* `unshift(task, [callback])` - add a new task to the front of the `queue`.
-* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit,
+* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit,
and further tasks will be queued.
* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`.
* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`.
@@ -1162,7 +1162,7 @@ when the worker is finished.
__Arguments__
* `worker(tasks, callback)` - An asynchronous function for processing an array of
- queued tasks, which must call its `callback(err)` argument when finished, with
+ queued tasks, which must call its `callback(err)` argument when finished, with
an optional `err` argument.
* `payload` - An optional `integer` for determining how many tasks should be
processed per round; if omitted, the default is unlimited.
@@ -1177,7 +1177,7 @@ methods:
process per round. This property can be changed after a `cargo` is created to
alter the payload on-the-fly.
* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called
- once the `worker` has finished processing the task. Instead of a single task, an array of `tasks`
+ once the `worker` has finished processing the task. Instead of a single task, an array of `tasks`
can be submitted. The respective callback is used for every task in the list.
* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued.
* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`.
@@ -1214,18 +1214,18 @@ cargo.push({name: 'baz'}, function (err) {
### auto(tasks, [callback])
-Determines the best order for running the functions in `tasks`, based on their
-requirements. Each function can optionally depend on other functions being completed
-first, and each function is run as soon as its requirements are satisfied.
+Determines the best order for running the functions in `tasks`, based on their
+requirements. Each function can optionally depend on other functions being completed
+first, and each function is run as soon as its requirements are satisfied.
-If any of the functions pass an error to their callback, it will not
-complete (so any other functions depending on it will not run), and the main
-`callback` is immediately called with the error. Functions also receive an
+If any of the functions pass an error to their callback, it will not
+complete (so any other functions depending on it will not run), and the main
+`callback` is immediately called with the error. Functions also receive an
object containing the results of functions which have completed so far.
-Note, all functions are called with a `results` object as a second argument,
+Note, all functions are called with a `results` object as a second argument,
so it is unsafe to pass functions in the `tasks` object which cannot handle the
-extra argument.
+extra argument.
For example, this snippet of code:
@@ -1242,7 +1242,7 @@ argument, which will fail:
fs.readFile('data.txt', 'utf-8', cb, {});
```
-Instead, wrap the call to `readFile` in a function which does not forward the
+Instead, wrap the call to `readFile` in a function which does not forward the
`results` object:
```js
@@ -1259,13 +1259,13 @@ __Arguments__
requirements, with the function itself the last item in the array. The object's key
of a property serves as the name of the task defined by that property,
i.e. can be used when specifying requirements for other tasks.
- The function receives two arguments: (1) a `callback(err, result)` which must be
- called when finished, passing an `error` (which can be `null`) and the result of
+ The function receives two arguments: (1) a `callback(err, result)` which must be
+ called when finished, passing an `error` (which can be `null`) and the result of
the function's execution, and (2) a `results` object, containing the results of
the previously executed functions.
* `callback(err, results)` - An optional callback which is called when all the
- tasks have been completed. It receives the `err` argument if any `tasks`
- pass an error to their callback. Results are always returned; however, if
+ tasks have been completed. It receives the `err` argument if any `tasks`
+ pass an error to their callback. Results are always returned; however, if
an error occurs, no further `tasks` will be performed, and the results
object will only contain partial results.
@@ -1356,7 +1356,7 @@ __Arguments__
* `times` - An integer indicating how many times to attempt the `task` before giving up. Defaults to 5.
* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)`
- which must be called when finished, passing `err` (which can be `null`) and the `result` of
+ which must be called when finished, passing `err` (which can be `null`) and the `result` of
the function's execution, and (2) a `results` object, containing the results of
the previously executed functions (if nested inside another control flow).
* `callback(err, results)` - An optional callback which is called when the
@@ -1425,7 +1425,7 @@ node> nextfn();
### apply(function, arguments..)
-Creates a continuation function with some arguments already applied.
+Creates a continuation function with some arguments already applied.
Useful as a shorthand when combined with other control flow functions. Any arguments
passed to the returned function are added to the arguments originally passed
@@ -1532,7 +1532,7 @@ async.times(5, function(n, next){
### timesSeries(n, callback)
The same as [`times`](#times), only the iterator is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
+series. The next `iterator` is only called once the current one has completed.
The results array will be in the same order as the original.
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
index 1077aafc4c6bec..01e8afcc4f182c 100755
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
@@ -831,13 +831,13 @@
};
return q;
};
-
+
async.priorityQueue = function (worker, concurrency) {
-
+
function _compareTasks(a, b){
return a.priority - b.priority;
};
-
+
function _binarySearch(sequence, item, compare) {
var beg = -1,
end = sequence.length - 1;
@@ -851,7 +851,7 @@
}
return beg;
}
-
+
function _insert(q, data, priority, callback) {
if (!q.started){
q.started = true;
@@ -873,7 +873,7 @@
priority: priority,
callback: typeof callback === 'function' ? callback : null
};
-
+
q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item);
if (q.saturated && q.tasks.length === q.concurrency) {
@@ -882,15 +882,15 @@
async.setImmediate(q.process);
});
}
-
+
// Start with a normal queue
var q = async.queue(worker, concurrency);
-
+
// Override push to accept second parameter representing priority
q.push = function (data, priority, callback) {
_insert(q, data, priority, callback);
};
-
+
// Remove unshift function
delete q.unshift;
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
index 9bfa8b21cf1499..c42fe299040c40 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
@@ -1,18 +1,18 @@
/* @preserve
* The MIT License (MIT)
- *
+ *
* Copyright (c) 2014 Petka Antonov
- *
+ *
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
- *
+ *
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
- *
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
@@ -20,7 +20,7 @@
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
- *
+ *
*/
/**
* bluebird build version 2.9.24
@@ -2114,7 +2114,7 @@ function errorAdapter(reason, nodeback) {
}
}
-Promise.prototype.asCallback =
+Promise.prototype.asCallback =
Promise.prototype.nodeify = function (nodeback, options) {
if (typeof nodeback == "function") {
var adapter = successAdapter;
@@ -2886,30 +2886,30 @@ _dereq_('./any.js')(Promise);
_dereq_('./each.js')(Promise, INTERNAL);
_dereq_('./timers.js')(Promise, INTERNAL);
_dereq_('./filter.js')(Promise, INTERNAL);
-
- util.toFastProperties(Promise);
- util.toFastProperties(Promise.prototype);
- function fillTypes(value) {
- var p = new Promise(INTERNAL);
- p._fulfillmentHandler0 = value;
- p._rejectionHandler0 = value;
- p._progressHandler0 = value;
- p._promise0 = value;
- p._receiver0 = value;
- p._settledValue = value;
- }
- // Complete slack tracking, opt out of field-type tracking and
- // stabilize map
- fillTypes({a: 1});
- fillTypes({b: 2});
- fillTypes({c: 3});
- fillTypes(1);
- fillTypes(function(){});
- fillTypes(undefined);
- fillTypes(false);
- fillTypes(new Promise(INTERNAL));
- CapturedTrace.setBounds(async.firstLineError, util.lastLineError);
- return Promise;
+
+ util.toFastProperties(Promise);
+ util.toFastProperties(Promise.prototype);
+ function fillTypes(value) {
+ var p = new Promise(INTERNAL);
+ p._fulfillmentHandler0 = value;
+ p._rejectionHandler0 = value;
+ p._progressHandler0 = value;
+ p._promise0 = value;
+ p._receiver0 = value;
+ p._settledValue = value;
+ }
+ // Complete slack tracking, opt out of field-type tracking and
+ // stabilize map
+ fillTypes({a: 1});
+ fillTypes({b: 2});
+ fillTypes({c: 3});
+ fillTypes(1);
+ fillTypes(function(){});
+ fillTypes(undefined);
+ fillTypes(false);
+ fillTypes(new Promise(INTERNAL));
+ CapturedTrace.setBounds(async.firstLineError, util.lastLineError);
+ return Promise;
};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
index 36a90773e0cd5e..bee550ccc55706 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
@@ -1,18 +1,18 @@
/* @preserve
* The MIT License (MIT)
- *
+ *
* Copyright (c) 2014 Petka Antonov
- *
+ *
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
- *
+ *
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
- *
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
@@ -20,7 +20,7 @@
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
- *
+ *
*/
/**
* bluebird build version 2.9.24
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
index 1175f17641b09f..f305b936bf05f6 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
@@ -38,7 +38,7 @@ function errorAdapter(reason, nodeback) {
}
}
-Promise.prototype.asCallback =
+Promise.prototype.asCallback =
Promise.prototype.nodeify = function (nodeback, options) {
if (typeof nodeback == "function") {
var adapter = successAdapter;
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
index 433d844f696e0c..f80d247b1402d0 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
@@ -672,29 +672,29 @@ require('./any.js')(Promise);
require('./each.js')(Promise, INTERNAL);
require('./timers.js')(Promise, INTERNAL);
require('./filter.js')(Promise, INTERNAL);
-
- util.toFastProperties(Promise);
- util.toFastProperties(Promise.prototype);
- function fillTypes(value) {
- var p = new Promise(INTERNAL);
- p._fulfillmentHandler0 = value;
- p._rejectionHandler0 = value;
- p._progressHandler0 = value;
- p._promise0 = value;
- p._receiver0 = value;
- p._settledValue = value;
- }
- // Complete slack tracking, opt out of field-type tracking and
- // stabilize map
- fillTypes({a: 1});
- fillTypes({b: 2});
- fillTypes({c: 3});
- fillTypes(1);
- fillTypes(function(){});
- fillTypes(undefined);
- fillTypes(false);
- fillTypes(new Promise(INTERNAL));
- CapturedTrace.setBounds(async.firstLineError, util.lastLineError);
- return Promise;
+
+ util.toFastProperties(Promise);
+ util.toFastProperties(Promise.prototype);
+ function fillTypes(value) {
+ var p = new Promise(INTERNAL);
+ p._fulfillmentHandler0 = value;
+ p._rejectionHandler0 = value;
+ p._progressHandler0 = value;
+ p._promise0 = value;
+ p._receiver0 = value;
+ p._settledValue = value;
+ }
+ // Complete slack tracking, opt out of field-type tracking and
+ // stabilize map
+ fillTypes({a: 1});
+ fillTypes({b: 2});
+ fillTypes({c: 3});
+ fillTypes(1);
+ fillTypes(function(){});
+ fillTypes(undefined);
+ fillTypes(false);
+ fillTypes(new Promise(INTERNAL));
+ CapturedTrace.setBounds(async.firstLineError, util.lastLineError);
+ return Promise;
};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
index b9bb6523607ec2..035534459bbf13 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
@@ -288,3 +288,4 @@ Promise.promisifyAll = function (target, options) {
return promisifyAll(target, suffix, filter, promisifier);
};
};
+
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/Readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/Readme.md
index f38e4df9cf8ea3..4e091d2ab4d4ed 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/Readme.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/Readme.md
@@ -6,7 +6,7 @@
[](https://www.npmjs.org/package/commander)
[](https://gitter.im/tj/commander.js?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
- The complete solution for [node.js](http://nodejs.org) command-line interfaces, inspired by Ruby's [commander](https://github.com/tj/commander).
+ The complete solution for [node.js](http://nodejs.org) command-line interfaces, inspired by Ruby's [commander](https://github.com/tj/commander).
[API documentation](http://tj.github.com/commander.js/)
@@ -95,7 +95,7 @@ program
.option('-s --size ', 'Pizza size', /^(large|medium|small)$/i, 'medium')
.option('-d --drink [drink]', 'Drink', /^(coke|pepsi|izze)$/i)
.parse(process.argv);
-
+
console.log(' size: %j', program.size);
console.log(' drink: %j', program.drink);
```
@@ -146,7 +146,7 @@ program
.parse(process.argv);
```
-When `.command()` is invoked with a description argument, no `.action(callback)` should be called to handle sub-commands, otherwise there will be an error. This tells commander that you're going to use separate executables for sub-commands, much like `git(1)` and other popular tools.
+When `.command()` is invoked with a description argument, no `.action(callback)` should be called to handle sub-commands, otherwise there will be an error. This tells commander that you're going to use separate executables for sub-commands, much like `git(1)` and other popular tools.
The commander will try to search the executables in the directory of the entry script (like `./examples/pm`) with the name `program-command`, like `pm-install`, `pm-search`.
If the program is designed to installed globally, make sure the executables have proper modes, like `755`.
@@ -155,7 +155,7 @@ If the program is designed to installed globally, make sure the executables have
The help information is auto-generated based on the information commander already knows about your program, so the following `--help` info is for free:
-```
+```
$ ./examples/pizza --help
Usage: pizza [options]
@@ -308,3 +308,4 @@ More Demos can be found in the [examples](https://github.com/tj/commander.js/tre
## License
MIT
+
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/LICENSE b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/LICENSE
index 50d1e5c666c05b..d1f842f0bb2722 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/LICENSE
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/LICENSE
@@ -19,3 +19,4 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/index.js
index c4a79d1afad211..7e9fc70f0ac251 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/index.js
@@ -8,3 +8,5 @@ exports.readlinkSync = function (p) {
return p;
}
};
+
+
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
index dfd68c6e8a729c..445f7db2d62a72 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
@@ -174,7 +174,7 @@ var compile = function(schema, cache, root, reporter, opts) {
validate('for (var %s = %d; %s < %s.length; %s++) {', i, node.items.length, i, name, i)
visit(name+'['+i+']', node.additionalItems, reporter, filter)
validate('}')
- }
+ }
}
if (node.format && fmts[node.format]) {
@@ -379,7 +379,7 @@ var compile = function(schema, cache, root, reporter, opts) {
node.anyOf.forEach(function(sch, i) {
if (i === 0) {
validate('var %s = errors', prev)
- } else {
+ } else {
validate('if (errors !== %s) {', prev)
('errors = %s', prev)
}
@@ -430,7 +430,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (node.maxProperties !== undefined) {
if (type !== 'object') validate('if (%s) {', types.object(name))
-
+
validate('if (Object.keys(%s).length > %d) {', name, node.maxProperties)
error('has more properties than allowed')
validate('}')
@@ -440,7 +440,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (node.minProperties !== undefined) {
if (type !== 'object') validate('if (%s) {', types.object(name))
-
+
validate('if (Object.keys(%s).length < %d) {', name, node.minProperties)
error('has less properties than allowed')
validate('}')
@@ -450,7 +450,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (node.maxItems !== undefined) {
if (type !== 'array') validate('if (%s) {', types.array(name))
-
+
validate('if (%s.length > %d) {', name, node.maxItems)
error('has more items than allowed')
validate('}')
@@ -460,7 +460,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (node.minItems !== undefined) {
if (type !== 'array') validate('if (%s) {', types.array(name))
-
+
validate('if (%s.length < %d) {', name, node.minItems)
error('has less items than allowed')
validate('}')
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/README.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/README.md
index 9846a8bb9fc51b..ef1d00b62f8022 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/README.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/README.md
@@ -16,7 +16,7 @@ Install
-------
npm install is-property
-
+
### `require("is-property")(str)`
Checks if str is a property
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc
deleted file mode 100644
index 77887b5f0f2efc..00000000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc
+++ /dev/null
@@ -1,30 +0,0 @@
-{
- "maxdepth": 4,
- "maxstatements": 200,
- "maxcomplexity": 12,
- "maxlen": 80,
- "maxparams": 5,
-
- "curly": true,
- "eqeqeq": true,
- "immed": true,
- "latedef": false,
- "noarg": true,
- "noempty": true,
- "nonew": true,
- "undef": true,
- "unused": "vars",
- "trailing": true,
-
- "quotmark": true,
- "expr": true,
- "asi": true,
-
- "browser": false,
- "esnext": true,
- "devel": false,
- "node": false,
- "nonstandard": false,
-
- "predef": ["require", "module", "__dirname", "__filename"]
-}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/not.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/not.json
index f66690fe1bbff5..cbb7f46bf8bc5b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/not.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/not.json
@@ -74,7 +74,7 @@
"description": "forbidden property",
"schema": {
"properties": {
- "foo": {
+ "foo": {
"not": {}
}
}
diff --git a/deps/npm/node_modules/request/node_modules/isstream/.jshintrc b/deps/npm/node_modules/request/node_modules/isstream/.jshintrc
deleted file mode 100644
index c8ef3ca4097f82..00000000000000
--- a/deps/npm/node_modules/request/node_modules/isstream/.jshintrc
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "predef": [ ]
- , "bitwise": false
- , "camelcase": false
- , "curly": false
- , "eqeqeq": false
- , "forin": false
- , "immed": false
- , "latedef": false
- , "noarg": true
- , "noempty": true
- , "nonew": true
- , "plusplus": false
- , "quotmark": true
- , "regexp": false
- , "undef": true
- , "unused": true
- , "strict": false
- , "trailing": true
- , "maxlen": 120
- , "asi": true
- , "boss": true
- , "debug": true
- , "eqnull": true
- , "esnext": true
- , "evil": true
- , "expr": true
- , "funcscope": false
- , "globalstrict": false
- , "iterator": false
- , "lastsemic": true
- , "laxbreak": true
- , "laxcomma": true
- , "loopfunc": true
- , "multistr": false
- , "onecase": false
- , "proto": false
- , "regexdash": false
- , "scripturl": true
- , "smarttabs": false
- , "shadow": false
- , "sub": true
- , "supernew": false
- , "validthis": true
- , "browser": true
- , "couch": false
- , "devel": false
- , "dojo": false
- , "mootools": false
- , "node": true
- , "nonstandard": true
- , "prototypejs": false
- , "rhino": false
- , "worker": true
- , "wsh": false
- , "nomen": false
- , "onevar": false
- , "passfail": false
-}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/node-uuid/uuid.js b/deps/npm/node_modules/request/node_modules/node-uuid/uuid.js
index 80ed720db3ed56..0a617697969af7 100644
--- a/deps/npm/node_modules/request/node_modules/node-uuid/uuid.js
+++ b/deps/npm/node_modules/request/node_modules/node-uuid/uuid.js
@@ -230,7 +230,7 @@
} else if (typeof define === 'function' && define.amd) {
// Publish as AMD module
define(function() {return uuid;});
-
+
} else {
// Publish as global (in browsers)
diff --git a/deps/npm/node_modules/request/node_modules/qs/.jshintrc b/deps/npm/node_modules/request/node_modules/qs/.jshintrc
deleted file mode 100644
index 997b3f7d45e31b..00000000000000
--- a/deps/npm/node_modules/request/node_modules/qs/.jshintrc
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "node": true,
-
- "curly": true,
- "latedef": true,
- "quotmark": true,
- "undef": true,
- "unused": true,
- "trailing": true
-}
diff --git a/deps/npm/node_modules/request/node_modules/qs/README.md b/deps/npm/node_modules/request/node_modules/qs/Readme.md
similarity index 100%
rename from deps/npm/node_modules/request/node_modules/qs/README.md
rename to deps/npm/node_modules/request/node_modules/qs/Readme.md
diff --git a/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc b/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc
deleted file mode 100644
index 4c1c8d49723519..00000000000000
--- a/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "node": true,
- "asi": true,
- "laxcomma": true
-}
diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md
index c178dedc349530..58e7ac30344534 100644
--- a/deps/npm/node_modules/rimraf/README.md
+++ b/deps/npm/node_modules/rimraf/README.md
@@ -1,4 +1,4 @@
-The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node.
+The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node.
Install with `npm install rimraf`, or just drop rimraf.js somewhere.
diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json
index 9f836e93040ee9..7b28e7302ac026 100644
--- a/deps/npm/node_modules/rimraf/package.json
+++ b/deps/npm/node_modules/rimraf/package.json
@@ -1,6 +1,6 @@
{
"name": "rimraf",
- "version": "2.3.2",
+ "version": "2.3.3",
"main": "rimraf.js",
"description": "A deep deletion module for node (like `rm -rf`)",
"author": {
@@ -31,19 +31,23 @@
"LICENSE",
"README.md"
],
- "gitHead": "9d5ab4a8b6986ec909af04f6d91315e98f5893e8",
+ "gitHead": "ad4efe8102a72c77bf2b13165ecc2229a9a68955",
"bugs": {
"url": "https://github.com/isaacs/rimraf/issues"
},
"homepage": "https://github.com/isaacs/rimraf",
- "_id": "rimraf@2.3.2",
- "_shasum": "7304bd9275c401b89103b106b3531c1ef0c02fe9",
- "_from": "rimraf@>=2.3.2 <2.4.0",
- "_npmVersion": "2.7.0",
- "_nodeVersion": "1.4.2",
+ "_id": "rimraf@2.3.3",
+ "_shasum": "d0073d8b3010611e8f3ad377b08e9a3c18b98f06",
+ "_from": "rimraf@>=2.3.3 <2.4.0",
+ "_npmVersion": "2.7.6",
+ "_nodeVersion": "1.7.1",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "d0073d8b3010611e8f3ad377b08e9a3c18b98f06",
+ "tarball": "http://registry.npmjs.org/rimraf/-/rimraf-2.3.3.tgz"
},
"maintainers": [
{
@@ -51,11 +55,6 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "7304bd9275c401b89103b106b3531c1ef0c02fe9",
- "tarball": "http://registry.npmjs.org/rimraf/-/rimraf-2.3.2.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.3.2.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.3.3.tgz"
}
diff --git a/deps/npm/node_modules/rimraf/rimraf.js b/deps/npm/node_modules/rimraf/rimraf.js
index c189d5444dc396..8d420d25165ca3 100644
--- a/deps/npm/node_modules/rimraf/rimraf.js
+++ b/deps/npm/node_modules/rimraf/rimraf.js
@@ -42,13 +42,14 @@ function rimraf (p, options, cb) {
cb = options
options = {}
}
- assert(p)
- assert(options)
- assert(typeof cb === 'function')
- defaults(options)
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+ assert.equal(typeof cb, 'function', 'rimraf: callback function required')
- if (!cb) throw new Error("No callback passed to rimraf()")
+ defaults(options)
var busyTries = 0
var errState = null
@@ -254,8 +255,9 @@ function rimrafSync (p, options) {
options = options || {}
defaults(options)
- assert(p)
- assert(options)
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
var results
diff --git a/deps/npm/node_modules/semver/LICENSE b/deps/npm/node_modules/semver/LICENSE
index 0c44ae716db8f3..19129e315fe593 100644
--- a/deps/npm/node_modules/semver/LICENSE
+++ b/deps/npm/node_modules/semver/LICENSE
@@ -1,27 +1,15 @@
-Copyright (c) Isaac Z. Schlueter ("Author")
-All rights reserved.
+The ISC License
-The BSD License
+Copyright (c) Isaac Z. Schlueter and Contributors
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
-BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
-IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 7617ef8b50afb3..fa73f2a9f69b50 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,6 +1,6 @@
{
"name": "semver",
- "version": "4.3.3",
+ "version": "4.3.4",
"description": "The semantic version parser used by npm.",
"main": "semver.js",
"browser": "semver.browser.js",
@@ -13,7 +13,7 @@
"tap": "0.x >=0.0.4",
"uglify-js": "~2.3.6"
},
- "license": "BSD",
+ "license": "ISC",
"repository": {
"type": "git",
"url": "git://github.com/npm/node-semver.git"
@@ -21,34 +21,35 @@
"bin": {
"semver": "./bin/semver"
},
- "gitHead": "bb32a43bdfa7223e4c450d181e5a2184b00f24d4",
+ "gitHead": "d7d791dc9d321cb5f3211e39ce8857f6476922f9",
"bugs": {
"url": "https://github.com/npm/node-semver/issues"
},
- "homepage": "https://github.com/npm/node-semver",
- "_id": "semver@4.3.3",
- "_shasum": "15466b61220bc371cd8f0e666a9f785329ea8228",
- "_from": "semver@>=4.3.3 <4.4.0",
- "_npmVersion": "2.7.4",
- "_nodeVersion": "1.4.2",
+ "homepage": "https://github.com/npm/node-semver#readme",
+ "_id": "semver@4.3.4",
+ "_shasum": "bf43a1aae304de040e12a13f84200ca7aeab7589",
+ "_from": "semver@>=4.3.4 <4.4.0",
+ "_npmVersion": "2.9.1",
+ "_nodeVersion": "2.0.0",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
+ "dist": {
+ "shasum": "bf43a1aae304de040e12a13f84200ca7aeab7589",
+ "tarball": "http://registry.npmjs.org/semver/-/semver-4.3.4.tgz"
+ },
"maintainers": [
{
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
},
{
"name": "othiym23",
"email": "ogd@aoaioxxysz.net"
}
],
- "dist": {
- "shasum": "15466b61220bc371cd8f0e666a9f785329ea8228",
- "tarball": "http://registry.npmjs.org/semver/-/semver-4.3.3.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/semver/-/semver-4.3.3.tgz"
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-4.3.4.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/spdx/LICENSE.md b/deps/npm/node_modules/spdx/LICENSE.md
new file mode 100644
index 00000000000000..2180a8c1a3676e
--- /dev/null
+++ b/deps/npm/node_modules/spdx/LICENSE.md
@@ -0,0 +1,7 @@
+Copyright Kyle E. Mitchell
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
diff --git a/deps/npm/node_modules/spdx/README.md b/deps/npm/node_modules/spdx/README.md
new file mode 100644
index 00000000000000..4bf07fe1715fba
--- /dev/null
+++ b/deps/npm/node_modules/spdx/README.md
@@ -0,0 +1,141 @@
+spdx.js
+=======
+
+[](https://www.npmjs.com/package/spdx)
+[](http://spdx.org/SPDX-specifications/spdx-version-2.0)
+[](http://www.apache.org/licenses/LICENSE-2.0)
+[](http://travis-ci.org/kemitchell/spdx.js)
+
+SPDX License Expression Syntax parser
+
+
+
+Simple License Expressions
+--------------------------
+```js
+spdx.valid('Invalid-Identifier'); // => null
+spdx.valid('GPL-2.0'); // => true
+spdx.valid('GPL-2.0+'); // => true
+spdx.valid('LicenseRef-23'); // => true
+spdx.valid('LicenseRef-MIT-Style-1'); // => true
+spdx.valid('DocumentRef-spdx-tool-1.2:LicenseRef-MIT-Style-2'); // => true
+```
+
+Composite License Expressions
+-----------------------------
+
+### Disjunctive `OR` Operator
+```js
+spdx.valid('(LGPL-2.1 OR MIT)'); // => true
+spdx.valid('(LGPL-2.1 OR MIT OR BSD-3-Clause)'); // => true
+```
+
+### Conjunctive `AND` Operator
+```js
+spdx.valid('(LGPL-2.1 AND MIT)'); // => true
+spdx.valid('(LGPL-2.1 AND MIT AND BSD-2-Clause)'); // => true
+```
+
+### Exception `WITH` Operator
+```js
+spdx.valid('(GPL-2.0+ WITH Bison-exception-2.2)'); // => true
+```
+
+### Order of Precedence and Parentheses
+```js
+var firstAST = {
+ left: {license: 'LGPL-2.1'},
+ conjunction: 'or',
+ right: {
+ left: {license: 'BSD-3-Clause'},
+ conjunction: 'and',
+ right: {license: 'MIT'}
+ }
+};
+spdx.parse('(LGPL-2.1 OR BSD-3-Clause AND MIT)'); // => firstAST
+
+var secondAST = {
+ left: {license: 'MIT'},
+ conjunction: 'and',
+ right: {
+ left: {license: 'LGPL-2.1', plus: true},
+ conjunction: 'and',
+ right: {license: 'BSD-3-Clause'}
+ }
+};
+spdx.parse('(MIT AND (LGPL-2.1+ AND BSD-3-Clause))'); // => secondAST
+```
+
+Strict Whitespace Rules
+-----------------------
+```js
+spdx.valid('MIT '); // => false
+spdx.valid(' MIT'); // => false
+spdx.valid('MIT AND BSD-3-Clause'); // => false
+```
+
+Identifier Lists
+----------------
+```js
+Array.isArray(spdx.licenses); // => true
+spdx.licenses.indexOf('ISC') > -1; // => true
+spdx.licenses.indexOf('Apache-1.7') > -1; // => false
+spdx.licenses.every(function(element) {
+ return typeof element === 'string';
+}); // => true
+
+Array.isArray(spdx.exceptions); // => true
+spdx.exceptions.indexOf('GCC-exception-3.1') > -1; // => true
+spdx.exceptions.every(function(element) {
+ return typeof element === 'string';
+}); // => true
+```
+
+Comparison
+----------
+```js
+spdx.gt('GPL-3.0', 'GPL-2.0'); // => true
+spdx.lt('MPL-1.0', 'MPL-2.0'); // => true
+
+spdx.gt('LPPL-1.3a', 'LPPL-1.0'); // => true
+spdx.gt('LPPL-1.3a', 'LPPL-1.3a'); // => false
+spdx.gt('MIT', 'ISC'); // => false
+
+try {
+ spdx.gt('(MIT OR ISC)', 'GPL-3.0');
+} catch (error) {
+ error.message; // => '"(MIT OR ISC)" is not a simple license identifier'
+}
+
+spdx.satisfies('MIT', 'MIT'); // => true
+spdx.satisfies('MIT', '(ISC OR MIT)'); // => true
+spdx.satisfies('Zlib', '(ISC OR (MIT OR Zlib))'); // => true
+spdx.satisfies('GPL-3.0', '(ISC OR MIT)'); // => false
+spdx.satisfies('GPL-2.0', 'GPL-2.0+'); // => true
+spdx.satisfies('GPL-3.0', 'GPL-2.0+'); // => true
+spdx.satisfies('GPL-1.0', 'GPL-2.0+'); // => false
+
+spdx.satisfies('GPL-2.0', 'GPL-2.0+ WITH Bison-exception-2.2'); // => false
+spdx.satisfies('GPL-3.0 WITH Bison-exception-2.2', 'GPL-2.0+ WITH Bison-exception-2.2'); // => true
+
+spdx.satisfies('(MIT OR GPL-2.0)', '(ISC OR MIT)'); // => true
+spdx.satisfies('(MIT AND GPL-2.0)', '(MIT OR GPL-2.0)'); // => true
+spdx.satisfies('(MIT AND GPL-2.0)', '(ISC OR GPL-2.0)'); // => false
+```
+
+Version Metadata
+----------------
+```js
+spdx.specificationVersion; // => '2.0'
+spdx.implementationVersion; // => package.version
+```
+
+The Specification
+-----------------
+[The Software Package Data Exchange (SPDX) specification](http://spdx.org) is the work of the [Linux Foundation](http://www.linuxfoundation.org) and its contributors, and is licensed under the terms of [the Creative Commons Attribution License 3.0 Unported (SPDX: "CC-BY-3.0")](http://spdx.org/licenses/CC-BY-3.0). "SPDX" is a United States federally registered trademark of the Linux Foundation.
diff --git a/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/LICENSE b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/LICENSE
new file mode 100644
index 00000000000000..68a49daad8ff7e
--- /dev/null
+++ b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/LICENSE
@@ -0,0 +1,24 @@
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to
diff --git a/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/README.md b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/README.md
new file mode 100755
index 00000000000000..c96e56b1e1d7d4
--- /dev/null
+++ b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/README.md
@@ -0,0 +1,55 @@
+# spdx-license-ids
+
+A list of [SPDX license](http://spdx.org/licenses/) identifiers
+
+[**Download JSON**](https://raw.githubusercontent.com/shinnn/spdx-license-ids/master/spdx-license-ids.json)
+
+## Use as a JavaScript Library
+
+[](https://www.npmjs.org/package/spdx-license-ids)
+[](https://github.com/shinnn/spdx-license-ids/releases)
+[](https://travis-ci.org/shinnn/spdx-license-ids)
+[](https://coveralls.io/r/shinnn/spdx-license-ids)
+[](https://david-dm.org/shinnn/spdx-license-ids#info=devDependencies)
+
+### Installation
+
+#### Package managers
+
+##### [npm](https://www.npmjs.com/)
+
+```sh
+npm install spdx-license-ids
+```
+
+##### [bower](http://bower.io/)
+
+```sh
+bower install spdx-license-ids
+```
+
+##### [Duo](http://duojs.org/)
+
+```javascript
+var spdxLicenseIds = require('shinnn/spdx-license-ids');
+```
+
+#### Standalone
+
+[Download the script file directly.](https://raw.githubusercontent.com/shinnn/spdx-license-ids/master/spdx-license-ids-browser.js)
+
+### API
+
+#### spdxLicenseIds
+
+Type: `Array` of `String`
+
+It returns an array of SPDX license identifiers.
+
+```javascript
+var spdxLicenseIds = require('spdx-license-ids'); //=> ['Glide', 'Abstyles', 'AFL-1.1', ... ]
+```
+
+## License
+
+[The Uicense](./LICENSE).
diff --git a/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/package.json b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/package.json
new file mode 100644
index 00000000000000..7ba26183b67a2f
--- /dev/null
+++ b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/package.json
@@ -0,0 +1,83 @@
+{
+ "name": "spdx-license-ids",
+ "version": "1.0.0",
+ "description": "A list of SPDX license identifiers",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/shinnn/spdx-license-ids.git"
+ },
+ "author": {
+ "name": "Shinnosuke Watanabe",
+ "url": "https://github.com/shinnn"
+ },
+ "scripts": {
+ "build": "node --harmony_arrow_functions build.js",
+ "lint": "eslint --config node_modules/@shinnn/eslintrc/rc.json --ignore-path .gitignore .",
+ "pretest": "${npm_package_scripts_build} && ${npm_package_scripts_lint}",
+ "test": "node --harmony_arrow_functions test.js",
+ "coverage": "node --harmony_arrow_functions node_modules/.bin/istanbul cover test.js",
+ "coveralls": "${npm_package_scripts_coverage} && istanbul-coveralls"
+ },
+ "licenses": [
+ {
+ "type": "Unlicense",
+ "url": "https://github.com/shinnn/spdx-license-ids/blob/master/LICENSE"
+ }
+ ],
+ "main": "spdx-license-ids.json",
+ "files": [
+ "spdx-license-ids.json"
+ ],
+ "keywords": [
+ "spdx",
+ "license",
+ "licenses",
+ "id",
+ "identifier",
+ "identifiers",
+ "json",
+ "array",
+ "oss",
+ "browser",
+ "client-side"
+ ],
+ "devDependencies": {
+ "@shinnn/eslintrc": "^1.0.0",
+ "each-async": "^1.1.1",
+ "eslint": "^0.20.0",
+ "istanbul": "^0.3.13",
+ "istanbul-coveralls": "^1.0.2",
+ "require-bower-files": "^2.0.0",
+ "rm-rf": "^0.1.0",
+ "spdx-license-list": "^2.0.0",
+ "stringify-object": "^1.0.1",
+ "tape": "^4.0.0"
+ },
+ "gitHead": "64eecd85af21ddfc325dc024906b281b1ebdc120",
+ "bugs": {
+ "url": "https://github.com/shinnn/spdx-license-ids/issues"
+ },
+ "homepage": "https://github.com/shinnn/spdx-license-ids#readme",
+ "_id": "spdx-license-ids@1.0.0",
+ "_shasum": "a966050150dec883ffce877431b361b36742a28d",
+ "_from": "spdx-license-ids@>=1.0.0 <2.0.0",
+ "_npmVersion": "2.8.4",
+ "_nodeVersion": "1.8.1",
+ "_npmUser": {
+ "name": "shinnn",
+ "email": "snnskwtnb@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "shinnn",
+ "email": "snnskwtnb@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "a966050150dec883ffce877431b361b36742a28d",
+ "tarball": "http://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.0.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/spdx-license-ids.json b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/spdx-license-ids.json
new file mode 100644
index 00000000000000..c04844d6dfb2c0
--- /dev/null
+++ b/deps/npm/node_modules/spdx/node_modules/spdx-license-ids/spdx-license-ids.json
@@ -0,0 +1,299 @@
+[
+ "Glide",
+ "Abstyles",
+ "AFL-1.1",
+ "AFL-1.2",
+ "AFL-2.0",
+ "AFL-2.1",
+ "AFL-3.0",
+ "AMPAS",
+ "APL-1.0",
+ "Adobe-Glyph",
+ "APAFML",
+ "Adobe-2006",
+ "AGPL-1.0",
+ "Afmparse",
+ "Aladdin",
+ "ADSL",
+ "AMDPLPA",
+ "ANTLR-PD",
+ "Apache-1.0",
+ "Apache-1.1",
+ "Apache-2.0",
+ "AML",
+ "APSL-1.0",
+ "APSL-1.1",
+ "APSL-1.2",
+ "APSL-2.0",
+ "Artistic-1.0",
+ "Artistic-1.0-Perl",
+ "Artistic-1.0-cl8",
+ "Artistic-2.0",
+ "AAL",
+ "Bahyph",
+ "Barr",
+ "Beerware",
+ "BitTorrent-1.0",
+ "BitTorrent-1.1",
+ "BSL-1.0",
+ "Borceux",
+ "BSD-2-Clause",
+ "BSD-2-Clause-FreeBSD",
+ "BSD-2-Clause-NetBSD",
+ "BSD-3-Clause",
+ "BSD-3-Clause-Clear",
+ "BSD-4-Clause",
+ "BSD-Protection",
+ "BSD-3-Clause-Attribution",
+ "BSD-4-Clause-UC",
+ "bzip2-1.0.5",
+ "bzip2-1.0.6",
+ "Caldera",
+ "CECILL-1.0",
+ "CECILL-1.1",
+ "CECILL-2.0",
+ "CECILL-B",
+ "CECILL-C",
+ "ClArtistic",
+ "MIT-CMU",
+ "CNRI-Jython",
+ "CNRI-Python",
+ "CNRI-Python-GPL-Compatible",
+ "CPOL-1.02",
+ "CDDL-1.0",
+ "CDDL-1.1",
+ "CPAL-1.0",
+ "CPL-1.0",
+ "CATOSL-1.1",
+ "Condor-1.1",
+ "CC-BY-1.0",
+ "CC-BY-2.0",
+ "CC-BY-2.5",
+ "CC-BY-3.0",
+ "CC-BY-4.0",
+ "CC-BY-ND-1.0",
+ "CC-BY-ND-2.0",
+ "CC-BY-ND-2.5",
+ "CC-BY-ND-3.0",
+ "CC-BY-ND-4.0",
+ "CC-BY-NC-1.0",
+ "CC-BY-NC-2.0",
+ "CC-BY-NC-2.5",
+ "CC-BY-NC-3.0",
+ "CC-BY-NC-4.0",
+ "CC-BY-NC-ND-1.0",
+ "CC-BY-NC-ND-2.0",
+ "CC-BY-NC-ND-2.5",
+ "CC-BY-NC-ND-3.0",
+ "CC-BY-NC-ND-4.0",
+ "CC-BY-NC-SA-1.0",
+ "CC-BY-NC-SA-2.0",
+ "CC-BY-NC-SA-2.5",
+ "CC-BY-NC-SA-3.0",
+ "CC-BY-NC-SA-4.0",
+ "CC-BY-SA-1.0",
+ "CC-BY-SA-2.0",
+ "CC-BY-SA-2.5",
+ "CC-BY-SA-3.0",
+ "CC-BY-SA-4.0",
+ "CC0-1.0",
+ "Crossword",
+ "CUA-OPL-1.0",
+ "Cube",
+ "D-FSL-1.0",
+ "diffmark",
+ "WTFPL",
+ "DOC",
+ "Dotseqn",
+ "DSDP",
+ "dvipdfm",
+ "EPL-1.0",
+ "ECL-1.0",
+ "ECL-2.0",
+ "eGenix",
+ "EFL-1.0",
+ "EFL-2.0",
+ "MIT-advertising",
+ "MIT-enna",
+ "Entessa",
+ "ErlPL-1.1",
+ "EUDatagrid",
+ "EUPL-1.0",
+ "EUPL-1.1",
+ "Eurosym",
+ "Fair",
+ "MIT-feh",
+ "Frameworx-1.0",
+ "FreeImage",
+ "FTL",
+ "FSFUL",
+ "FSFULLR",
+ "Giftware",
+ "GL2PS",
+ "Glulxe",
+ "AGPL-3.0",
+ "GFDL-1.1",
+ "GFDL-1.2",
+ "GFDL-1.3",
+ "GPL-1.0",
+ "GPL-2.0",
+ "GPL-3.0",
+ "LGPL-2.1",
+ "LGPL-3.0",
+ "LGPL-2.0",
+ "gnuplot",
+ "gSOAP-1.3b",
+ "HaskellReport",
+ "HPND",
+ "IBM-pibs",
+ "IPL-1.0",
+ "ICU",
+ "ImageMagick",
+ "iMatix",
+ "Imlib2",
+ "IJG",
+ "Intel-ACPI",
+ "Intel",
+ "IPA",
+ "ISC",
+ "JasPer-2.0",
+ "JSON",
+ "LPPL-1.3a",
+ "LPPL-1.0",
+ "LPPL-1.1",
+ "LPPL-1.2",
+ "LPPL-1.3c",
+ "Latex2e",
+ "BSD-3-Clause-LBNL",
+ "Leptonica",
+ "Libpng",
+ "libtiff",
+ "LPL-1.02",
+ "LPL-1.0",
+ "MakeIndex",
+ "MTLL",
+ "MS-PL",
+ "MS-RL",
+ "MirOS",
+ "MITNFA",
+ "MIT",
+ "Motosoto",
+ "MPL-1.0",
+ "MPL-1.1",
+ "MPL-2.0",
+ "MPL-2.0-no-copyleft-exception",
+ "mpich2",
+ "Multics",
+ "Mup",
+ "NASA-1.3",
+ "Naumen",
+ "NBPL-1.0",
+ "NetCDF",
+ "NGPL",
+ "NOSL",
+ "NPL-1.0",
+ "NPL-1.1",
+ "Newsletr",
+ "NLPL",
+ "Nokia",
+ "NPOSL-3.0",
+ "Noweb",
+ "NRL",
+ "NTP",
+ "Nunit",
+ "OCLC-2.0",
+ "ODbL-1.0",
+ "PDDL-1.0",
+ "OGTSL",
+ "OLDAP-2.2.2",
+ "OLDAP-1.1",
+ "OLDAP-1.2",
+ "OLDAP-1.3",
+ "OLDAP-1.4",
+ "OLDAP-2.0",
+ "OLDAP-2.0.1",
+ "OLDAP-2.1",
+ "OLDAP-2.2",
+ "OLDAP-2.2.1",
+ "OLDAP-2.3",
+ "OLDAP-2.4",
+ "OLDAP-2.5",
+ "OLDAP-2.6",
+ "OLDAP-2.7",
+ "OLDAP-2.8",
+ "OML",
+ "OPL-1.0",
+ "OSL-1.0",
+ "OSL-1.1",
+ "OSL-2.0",
+ "OSL-2.1",
+ "OSL-3.0",
+ "OpenSSL",
+ "PHP-3.0",
+ "PHP-3.01",
+ "Plexus",
+ "PostgreSQL",
+ "psfrag",
+ "psutils",
+ "Python-2.0",
+ "QPL-1.0",
+ "Qhull",
+ "Rdisc",
+ "RPSL-1.0",
+ "RPL-1.1",
+ "RPL-1.5",
+ "RHeCos-1.1",
+ "RSCPL",
+ "RSA-MD",
+ "Ruby",
+ "SAX-PD",
+ "Saxpath",
+ "SCEA",
+ "SWL",
+ "SGI-B-1.0",
+ "SGI-B-1.1",
+ "SGI-B-2.0",
+ "OFL-1.0",
+ "OFL-1.1",
+ "SimPL-2.0",
+ "Sleepycat",
+ "SNIA",
+ "SMLNJ",
+ "SugarCRM-1.1.3",
+ "SISSL",
+ "SISSL-1.2",
+ "SPL-1.0",
+ "Watcom-1.0",
+ "TCL",
+ "Unlicense",
+ "TMate",
+ "TORQUE-1.1",
+ "TOSL",
+ "Unicode-TOU",
+ "UPL-1.0",
+ "NCSA",
+ "Vim",
+ "VOSTROM",
+ "VSL-1.0",
+ "W3C-19980720",
+ "W3C",
+ "Wsuipa",
+ "Xnet",
+ "X11",
+ "Xerox",
+ "XFree86-1.1",
+ "xinetd",
+ "xpp",
+ "XSkat",
+ "YPL-1.0",
+ "YPL-1.1",
+ "Zed",
+ "Zend-2.0",
+ "Zimbra-1.3",
+ "Zimbra-1.4",
+ "Zlib",
+ "zlib-acknowledgement",
+ "ZPL-1.1",
+ "ZPL-2.0",
+ "ZPL-2.1"
+]
diff --git a/deps/npm/node_modules/spdx/package.json b/deps/npm/node_modules/spdx/package.json
new file mode 100644
index 00000000000000..9c31ddb5076dff
--- /dev/null
+++ b/deps/npm/node_modules/spdx/package.json
@@ -0,0 +1,72 @@
+{
+ "name": "spdx",
+ "description": "SPDX License Expression Syntax parser",
+ "version": "0.4.0",
+ "author": {
+ "name": "Kyle E. Mitchell",
+ "email": "kyle@kemitchell.com",
+ "url": "http://kemitchell.com"
+ },
+ "bugs": {
+ "url": "https://github.com/kemitchell/spdx.js/issues"
+ },
+ "dependencies": {
+ "spdx-license-ids": "^1.0.0"
+ },
+ "devDependencies": {
+ "docco": "^0.7.0",
+ "fixpack": "^2.2.0",
+ "jison": "^0.4.15",
+ "jscs": "^1.12.0",
+ "jshint": "^2.7.0",
+ "jsmd": "^0.3.0"
+ },
+ "homepage": "https://github.com/kemitchell/spdx.js",
+ "keywords": [
+ "SPDX",
+ "law",
+ "legal",
+ "license",
+ "metadata",
+ "package",
+ "package.json",
+ "standards"
+ ],
+ "license": "Apache-2.0",
+ "main": "source/spdx.js",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kemitchell/spdx.js.git"
+ },
+ "scripts": {
+ "build": "node build/parser.js > source/parser.generated.js",
+ "doc": "docco --output documentation source/spdx.js",
+ "lint": "fixpack && jshint build source/spdx.js && jscs build source/spdx.js",
+ "precommit": "npm run lint && npm run test",
+ "prepublish": "npm run build",
+ "pretest": "npm run build",
+ "test": "jsmd README.md"
+ },
+ "gitHead": "7186b8f92f0aa3c8f087e73b5f98bd1c776a4d3f",
+ "_id": "spdx@0.4.0",
+ "_shasum": "5a5cbff1a457b57b15204cafd3d0ea9ad9652ef8",
+ "_from": "spdx@0.4.0",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ },
+ "maintainers": [
+ {
+ "name": "kemitchell",
+ "email": "kyle@kemitchell.com"
+ }
+ ],
+ "dist": {
+ "shasum": "5a5cbff1a457b57b15204cafd3d0ea9ad9652ef8",
+ "tarball": "http://registry.npmjs.org/spdx/-/spdx-0.4.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/spdx/-/spdx-0.4.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/spdx/source/exceptions.json b/deps/npm/node_modules/spdx/source/exceptions.json
new file mode 100644
index 00000000000000..d588a1af7e11f8
--- /dev/null
+++ b/deps/npm/node_modules/spdx/source/exceptions.json
@@ -0,0 +1,11 @@
+[
+ "Autoconf-exception-2.0",
+ "Autoconf-exception-3.0",
+ "Bison-exception-2.2",
+ "Classpath-exception-2.0",
+ "eCos-exception-2.0",
+ "Font-exception-2.0",
+ "GCC-exception-2.0",
+ "GCC-exception-3.1",
+ "WxWindows-exception-3.1"
+]
diff --git a/deps/npm/node_modules/spdx/source/parser.generated.js b/deps/npm/node_modules/spdx/source/parser.generated.js
new file mode 100644
index 00000000000000..380609339cd77a
--- /dev/null
+++ b/deps/npm/node_modules/spdx/source/parser.generated.js
@@ -0,0 +1,1255 @@
+/* parser generated by jison 0.4.15 */
+/*
+ Returns a Parser object of the following structure:
+
+ Parser: {
+ yy: {}
+ }
+
+ Parser.prototype: {
+ yy: {},
+ trace: function(),
+ symbols_: {associative list: name ==> number},
+ terminals_: {associative list: number ==> name},
+ productions_: [...],
+ performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
+ table: [...],
+ defaultActions: {...},
+ parseError: function(str, hash),
+ parse: function(input),
+
+ lexer: {
+ EOF: 1,
+ parseError: function(str, hash),
+ setInput: function(input),
+ input: function(),
+ unput: function(str),
+ more: function(),
+ less: function(n),
+ pastInput: function(),
+ upcomingInput: function(),
+ showPosition: function(),
+ test_match: function(regex_match_array, rule_index),
+ next: function(),
+ lex: function(),
+ begin: function(condition),
+ popState: function(),
+ _currentRules: function(),
+ topState: function(),
+ pushState: function(condition),
+
+ options: {
+ ranges: boolean (optional: true ==> token location info will include a .range[] member)
+ flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
+ backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
+ },
+
+ performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
+ rules: [...],
+ conditions: {associative list: name ==> set},
+ }
+ }
+
+
+ token location info (@$, _$, etc.): {
+ first_line: n,
+ last_line: n,
+ first_column: n,
+ last_column: n,
+ range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
+ }
+
+
+ the parseError function receives a 'hash' object with these members for lexer and parser errors: {
+ text: (matched text)
+ token: (the produced terminal token, if any)
+ line: (yylineno)
+ }
+ while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
+ loc: (yylloc)
+ expected: (string describing the set of expected tokens)
+ recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
+ }
+*/
+var spdxparse = (function(){
+var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[1,5],$V1=[1,6],$V2=[1,7],$V3=[1,4],$V4=[1,9],$V5=[1,10],$V6=[5,14,15,17],$V7=[5,12,14,15,17];
+var parser = {trace: function trace() { },
+yy: {},
+symbols_: {"error":2,"start":3,"expression":4,"EOS":5,"simpleExpression":6,"LICENSE":7,"PLUS":8,"LICENSEREF":9,"DOCUMENTREF":10,"COLON":11,"WITH":12,"EXCEPTION":13,"AND":14,"OR":15,"OPEN":16,"CLOSE":17,"$accept":0,"$end":1},
+terminals_: {2:"error",5:"EOS",7:"LICENSE",8:"PLUS",9:"LICENSEREF",10:"DOCUMENTREF",11:"COLON",12:"WITH",13:"EXCEPTION",14:"AND",15:"OR",16:"OPEN",17:"CLOSE"},
+productions_: [0,[3,2],[6,1],[6,2],[6,1],[6,3],[4,1],[4,3],[4,3],[4,3],[4,3]],
+performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
+/* this == yyval */
+
+var $0 = $$.length - 1;
+switch (yystate) {
+case 1:
+return this.$ = $$[$0-1];
+break;
+case 2: case 4: case 5:
+this.$ = { license: yytext };
+break;
+case 3:
+this.$ = { license: $$[$0-1], plus: true };
+break;
+case 6:
+this.$ = $$[$0];
+break;
+case 7:
+this.$ = { exception: $$[$0] };
+this.$.license = $$[$0-2].license;
+if ($$[$0-2].hasOwnProperty('plus')) {
+ this.$.plus = $$[$0-2].plus;
+}
+break;
+case 8:
+this.$ = { conjunction: 'and', left: $$[$0-2], right: $$[$0] };
+break;
+case 9:
+this.$ = { conjunction: 'or', left: $$[$0-2], right: $$[$0] };
+break;
+case 10:
+this.$ = $$[$0-1]
+break;
+}
+},
+table: [{3:1,4:2,6:3,7:$V0,9:$V1,10:$V2,16:$V3},{1:[3]},{5:[1,8],14:$V4,15:$V5},o($V6,[2,6],{12:[1,11]}),{4:12,6:3,7:$V0,9:$V1,10:$V2,16:$V3},o($V7,[2,2],{8:[1,13]}),o($V7,[2,4]),{11:[1,14]},{1:[2,1]},{4:15,6:3,7:$V0,9:$V1,10:$V2,16:$V3},{4:16,6:3,7:$V0,9:$V1,10:$V2,16:$V3},{13:[1,17]},{14:$V4,15:$V5,17:[1,18]},o($V7,[2,3]),{9:[1,19]},o($V6,[2,8]),o([5,15,17],[2,9],{14:$V4}),o($V6,[2,7]),o($V6,[2,10]),o($V7,[2,5])],
+defaultActions: {8:[2,1]},
+parseError: function parseError(str, hash) {
+ if (hash.recoverable) {
+ this.trace(str);
+ } else {
+ throw new Error(str);
+ }
+},
+parse: function parse(input) {
+ var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
+ var args = lstack.slice.call(arguments, 1);
+ var lexer = Object.create(this.lexer);
+ var sharedState = { yy: {} };
+ for (var k in this.yy) {
+ if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
+ sharedState.yy[k] = this.yy[k];
+ }
+ }
+ lexer.setInput(input, sharedState.yy);
+ sharedState.yy.lexer = lexer;
+ sharedState.yy.parser = this;
+ if (typeof lexer.yylloc == 'undefined') {
+ lexer.yylloc = {};
+ }
+ var yyloc = lexer.yylloc;
+ lstack.push(yyloc);
+ var ranges = lexer.options && lexer.options.ranges;
+ if (typeof sharedState.yy.parseError === 'function') {
+ this.parseError = sharedState.yy.parseError;
+ } else {
+ this.parseError = Object.getPrototypeOf(this).parseError;
+ }
+ function popStack(n) {
+ stack.length = stack.length - 2 * n;
+ vstack.length = vstack.length - n;
+ lstack.length = lstack.length - n;
+ }
+ _token_stack:
+ function lex() {
+ var token;
+ token = lexer.lex() || EOF;
+ if (typeof token !== 'number') {
+ token = self.symbols_[token] || token;
+ }
+ return token;
+ }
+ var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
+ while (true) {
+ state = stack[stack.length - 1];
+ if (this.defaultActions[state]) {
+ action = this.defaultActions[state];
+ } else {
+ if (symbol === null || typeof symbol == 'undefined') {
+ symbol = lex();
+ }
+ action = table[state] && table[state][symbol];
+ }
+ if (typeof action === 'undefined' || !action.length || !action[0]) {
+ var errStr = '';
+ expected = [];
+ for (p in table[state]) {
+ if (this.terminals_[p] && p > TERROR) {
+ expected.push('\'' + this.terminals_[p] + '\'');
+ }
+ }
+ if (lexer.showPosition) {
+ errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
+ } else {
+ errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
+ }
+ this.parseError(errStr, {
+ text: lexer.match,
+ token: this.terminals_[symbol] || symbol,
+ line: lexer.yylineno,
+ loc: yyloc,
+ expected: expected
+ });
+ }
+ if (action[0] instanceof Array && action.length > 1) {
+ throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
+ }
+ switch (action[0]) {
+ case 1:
+ stack.push(symbol);
+ vstack.push(lexer.yytext);
+ lstack.push(lexer.yylloc);
+ stack.push(action[1]);
+ symbol = null;
+ if (!preErrorSymbol) {
+ yyleng = lexer.yyleng;
+ yytext = lexer.yytext;
+ yylineno = lexer.yylineno;
+ yyloc = lexer.yylloc;
+ if (recovering > 0) {
+ recovering--;
+ }
+ } else {
+ symbol = preErrorSymbol;
+ preErrorSymbol = null;
+ }
+ break;
+ case 2:
+ len = this.productions_[action[1]][1];
+ yyval.$ = vstack[vstack.length - len];
+ yyval._$ = {
+ first_line: lstack[lstack.length - (len || 1)].first_line,
+ last_line: lstack[lstack.length - 1].last_line,
+ first_column: lstack[lstack.length - (len || 1)].first_column,
+ last_column: lstack[lstack.length - 1].last_column
+ };
+ if (ranges) {
+ yyval._$.range = [
+ lstack[lstack.length - (len || 1)].range[0],
+ lstack[lstack.length - 1].range[1]
+ ];
+ }
+ r = this.performAction.apply(yyval, [
+ yytext,
+ yyleng,
+ yylineno,
+ sharedState.yy,
+ action[1],
+ vstack,
+ lstack
+ ].concat(args));
+ if (typeof r !== 'undefined') {
+ return r;
+ }
+ if (len) {
+ stack = stack.slice(0, -1 * len * 2);
+ vstack = vstack.slice(0, -1 * len);
+ lstack = lstack.slice(0, -1 * len);
+ }
+ stack.push(this.productions_[action[1]][0]);
+ vstack.push(yyval.$);
+ lstack.push(yyval._$);
+ newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
+ stack.push(newState);
+ break;
+ case 3:
+ return true;
+ }
+ }
+ return true;
+}};
+/* generated by jison-lex 0.3.4 */
+var lexer = (function(){
+var lexer = ({
+
+EOF:1,
+
+parseError:function parseError(str, hash) {
+ if (this.yy.parser) {
+ this.yy.parser.parseError(str, hash);
+ } else {
+ throw new Error(str);
+ }
+ },
+
+// resets the lexer, sets new input
+setInput:function (input, yy) {
+ this.yy = yy || this.yy || {};
+ this._input = input;
+ this._more = this._backtrack = this.done = false;
+ this.yylineno = this.yyleng = 0;
+ this.yytext = this.matched = this.match = '';
+ this.conditionStack = ['INITIAL'];
+ this.yylloc = {
+ first_line: 1,
+ first_column: 0,
+ last_line: 1,
+ last_column: 0
+ };
+ if (this.options.ranges) {
+ this.yylloc.range = [0,0];
+ }
+ this.offset = 0;
+ return this;
+ },
+
+// consumes and returns one char from the input
+input:function () {
+ var ch = this._input[0];
+ this.yytext += ch;
+ this.yyleng++;
+ this.offset++;
+ this.match += ch;
+ this.matched += ch;
+ var lines = ch.match(/(?:\r\n?|\n).*/g);
+ if (lines) {
+ this.yylineno++;
+ this.yylloc.last_line++;
+ } else {
+ this.yylloc.last_column++;
+ }
+ if (this.options.ranges) {
+ this.yylloc.range[1]++;
+ }
+
+ this._input = this._input.slice(1);
+ return ch;
+ },
+
+// unshifts one char (or a string) into the input
+unput:function (ch) {
+ var len = ch.length;
+ var lines = ch.split(/(?:\r\n?|\n)/g);
+
+ this._input = ch + this._input;
+ this.yytext = this.yytext.substr(0, this.yytext.length - len);
+ //this.yyleng -= len;
+ this.offset -= len;
+ var oldLines = this.match.split(/(?:\r\n?|\n)/g);
+ this.match = this.match.substr(0, this.match.length - 1);
+ this.matched = this.matched.substr(0, this.matched.length - 1);
+
+ if (lines.length - 1) {
+ this.yylineno -= lines.length - 1;
+ }
+ var r = this.yylloc.range;
+
+ this.yylloc = {
+ first_line: this.yylloc.first_line,
+ last_line: this.yylineno + 1,
+ first_column: this.yylloc.first_column,
+ last_column: lines ?
+ (lines.length === oldLines.length ? this.yylloc.first_column : 0)
+ + oldLines[oldLines.length - lines.length].length - lines[0].length :
+ this.yylloc.first_column - len
+ };
+
+ if (this.options.ranges) {
+ this.yylloc.range = [r[0], r[0] + this.yyleng - len];
+ }
+ this.yyleng = this.yytext.length;
+ return this;
+ },
+
+// When called from action, caches matched text and appends it on next action
+more:function () {
+ this._more = true;
+ return this;
+ },
+
+// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
+reject:function () {
+ if (this.options.backtrack_lexer) {
+ this._backtrack = true;
+ } else {
+ return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
+ text: "",
+ token: null,
+ line: this.yylineno
+ });
+
+ }
+ return this;
+ },
+
+// retain first n characters of the match
+less:function (n) {
+ this.unput(this.match.slice(n));
+ },
+
+// displays already matched input, i.e. for error messages
+pastInput:function () {
+ var past = this.matched.substr(0, this.matched.length - this.match.length);
+ return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
+ },
+
+// displays upcoming input, i.e. for error messages
+upcomingInput:function () {
+ var next = this.match;
+ if (next.length < 20) {
+ next += this._input.substr(0, 20-next.length);
+ }
+ return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
+ },
+
+// displays the character position where the lexing error occurred, i.e. for error messages
+showPosition:function () {
+ var pre = this.pastInput();
+ var c = new Array(pre.length + 1).join("-");
+ return pre + this.upcomingInput() + "\n" + c + "^";
+ },
+
+// test the lexed token: return FALSE when not a match, otherwise return token
+test_match:function (match, indexed_rule) {
+ var token,
+ lines,
+ backup;
+
+ if (this.options.backtrack_lexer) {
+ // save context
+ backup = {
+ yylineno: this.yylineno,
+ yylloc: {
+ first_line: this.yylloc.first_line,
+ last_line: this.last_line,
+ first_column: this.yylloc.first_column,
+ last_column: this.yylloc.last_column
+ },
+ yytext: this.yytext,
+ match: this.match,
+ matches: this.matches,
+ matched: this.matched,
+ yyleng: this.yyleng,
+ offset: this.offset,
+ _more: this._more,
+ _input: this._input,
+ yy: this.yy,
+ conditionStack: this.conditionStack.slice(0),
+ done: this.done
+ };
+ if (this.options.ranges) {
+ backup.yylloc.range = this.yylloc.range.slice(0);
+ }
+ }
+
+ lines = match[0].match(/(?:\r\n?|\n).*/g);
+ if (lines) {
+ this.yylineno += lines.length;
+ }
+ this.yylloc = {
+ first_line: this.yylloc.last_line,
+ last_line: this.yylineno + 1,
+ first_column: this.yylloc.last_column,
+ last_column: lines ?
+ lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
+ this.yylloc.last_column + match[0].length
+ };
+ this.yytext += match[0];
+ this.match += match[0];
+ this.matches = match;
+ this.yyleng = this.yytext.length;
+ if (this.options.ranges) {
+ this.yylloc.range = [this.offset, this.offset += this.yyleng];
+ }
+ this._more = false;
+ this._backtrack = false;
+ this._input = this._input.slice(match[0].length);
+ this.matched += match[0];
+ token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
+ if (this.done && this._input) {
+ this.done = false;
+ }
+ if (token) {
+ return token;
+ } else if (this._backtrack) {
+ // recover context
+ for (var k in backup) {
+ this[k] = backup[k];
+ }
+ return false; // rule action called reject() implying the next rule should be tested instead.
+ }
+ return false;
+ },
+
+// return next match in input
+next:function () {
+ if (this.done) {
+ return this.EOF;
+ }
+ if (!this._input) {
+ this.done = true;
+ }
+
+ var token,
+ match,
+ tempMatch,
+ index;
+ if (!this._more) {
+ this.yytext = '';
+ this.match = '';
+ }
+ var rules = this._currentRules();
+ for (var i = 0; i < rules.length; i++) {
+ tempMatch = this._input.match(this.rules[rules[i]]);
+ if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
+ match = tempMatch;
+ index = i;
+ if (this.options.backtrack_lexer) {
+ token = this.test_match(tempMatch, rules[i]);
+ if (token !== false) {
+ return token;
+ } else if (this._backtrack) {
+ match = false;
+ continue; // rule action called reject() implying a rule MISmatch.
+ } else {
+ // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
+ return false;
+ }
+ } else if (!this.options.flex) {
+ break;
+ }
+ }
+ }
+ if (match) {
+ token = this.test_match(match, rules[index]);
+ if (token !== false) {
+ return token;
+ }
+ // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
+ return false;
+ }
+ if (this._input === "") {
+ return this.EOF;
+ } else {
+ return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
+ text: "",
+ token: null,
+ line: this.yylineno
+ });
+ }
+ },
+
+// return next match that has a token
+lex:function lex() {
+ var r = this.next();
+ if (r) {
+ return r;
+ } else {
+ return this.lex();
+ }
+ },
+
+// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
+begin:function begin(condition) {
+ this.conditionStack.push(condition);
+ },
+
+// pop the previously active lexer condition state off the condition stack
+popState:function popState() {
+ var n = this.conditionStack.length - 1;
+ if (n > 0) {
+ return this.conditionStack.pop();
+ } else {
+ return this.conditionStack[0];
+ }
+ },
+
+// produce the lexer rule set which is active for the currently active lexer condition state
+_currentRules:function _currentRules() {
+ if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
+ return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
+ } else {
+ return this.conditions["INITIAL"].rules;
+ }
+ },
+
+// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
+topState:function topState(n) {
+ n = this.conditionStack.length - 1 - Math.abs(n || 0);
+ if (n >= 0) {
+ return this.conditionStack[n];
+ } else {
+ return "INITIAL";
+ }
+ },
+
+// alias for begin(condition)
+pushState:function pushState(condition) {
+ this.begin(condition);
+ },
+
+// return the number of states currently on the stack
+stateStackSize:function stateStackSize() {
+ return this.conditionStack.length;
+ },
+options: {},
+performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
+var YYSTATE=YY_START;
+switch($avoiding_name_collisions) {
+case 0:return 5;
+break;
+case 1:/* skip whitespace */
+break;
+case 2:return 8;
+break;
+case 3:return 16;
+break;
+case 4:return 17;
+break;
+case 5:return 11;
+break;
+case 6:return 10;
+break;
+case 7:return 9;
+break;
+case 8:return 14;
+break;
+case 9:return 15;
+break;
+case 10:return 12;
+break;
+case 11:return 7;
+break;
+case 12:return 7;
+break;
+case 13:return 7;
+break;
+case 14:return 7;
+break;
+case 15:return 7;
+break;
+case 16:return 7;
+break;
+case 17:return 7;
+break;
+case 18:return 7;
+break;
+case 19:return 7;
+break;
+case 20:return 7;
+break;
+case 21:return 7;
+break;
+case 22:return 7;
+break;
+case 23:return 7;
+break;
+case 24:return 7;
+break;
+case 25:return 7;
+break;
+case 26:return 7;
+break;
+case 27:return 7;
+break;
+case 28:return 7;
+break;
+case 29:return 7;
+break;
+case 30:return 7;
+break;
+case 31:return 7;
+break;
+case 32:return 7;
+break;
+case 33:return 7;
+break;
+case 34:return 7;
+break;
+case 35:return 7;
+break;
+case 36:return 7;
+break;
+case 37:return 7;
+break;
+case 38:return 7;
+break;
+case 39:return 7;
+break;
+case 40:return 7;
+break;
+case 41:return 7;
+break;
+case 42:return 7;
+break;
+case 43:return 7;
+break;
+case 44:return 7;
+break;
+case 45:return 7;
+break;
+case 46:return 7;
+break;
+case 47:return 7;
+break;
+case 48:return 7;
+break;
+case 49:return 7;
+break;
+case 50:return 7;
+break;
+case 51:return 7;
+break;
+case 52:return 7;
+break;
+case 53:return 7;
+break;
+case 54:return 7;
+break;
+case 55:return 7;
+break;
+case 56:return 7;
+break;
+case 57:return 7;
+break;
+case 58:return 7;
+break;
+case 59:return 7;
+break;
+case 60:return 7;
+break;
+case 61:return 7;
+break;
+case 62:return 7;
+break;
+case 63:return 7;
+break;
+case 64:return 7;
+break;
+case 65:return 7;
+break;
+case 66:return 7;
+break;
+case 67:return 7;
+break;
+case 68:return 7;
+break;
+case 69:return 7;
+break;
+case 70:return 7;
+break;
+case 71:return 7;
+break;
+case 72:return 7;
+break;
+case 73:return 7;
+break;
+case 74:return 7;
+break;
+case 75:return 7;
+break;
+case 76:return 7;
+break;
+case 77:return 7;
+break;
+case 78:return 7;
+break;
+case 79:return 7;
+break;
+case 80:return 7;
+break;
+case 81:return 7;
+break;
+case 82:return 7;
+break;
+case 83:return 7;
+break;
+case 84:return 7;
+break;
+case 85:return 7;
+break;
+case 86:return 7;
+break;
+case 87:return 7;
+break;
+case 88:return 7;
+break;
+case 89:return 7;
+break;
+case 90:return 7;
+break;
+case 91:return 7;
+break;
+case 92:return 7;
+break;
+case 93:return 7;
+break;
+case 94:return 7;
+break;
+case 95:return 7;
+break;
+case 96:return 7;
+break;
+case 97:return 7;
+break;
+case 98:return 7;
+break;
+case 99:return 7;
+break;
+case 100:return 7;
+break;
+case 101:return 7;
+break;
+case 102:return 7;
+break;
+case 103:return 7;
+break;
+case 104:return 7;
+break;
+case 105:return 7;
+break;
+case 106:return 7;
+break;
+case 107:return 7;
+break;
+case 108:return 7;
+break;
+case 109:return 7;
+break;
+case 110:return 7;
+break;
+case 111:return 7;
+break;
+case 112:return 7;
+break;
+case 113:return 7;
+break;
+case 114:return 7;
+break;
+case 115:return 7;
+break;
+case 116:return 7;
+break;
+case 117:return 7;
+break;
+case 118:return 7;
+break;
+case 119:return 7;
+break;
+case 120:return 7;
+break;
+case 121:return 7;
+break;
+case 122:return 7;
+break;
+case 123:return 7;
+break;
+case 124:return 7;
+break;
+case 125:return 7;
+break;
+case 126:return 7;
+break;
+case 127:return 7;
+break;
+case 128:return 7;
+break;
+case 129:return 7;
+break;
+case 130:return 7;
+break;
+case 131:return 7;
+break;
+case 132:return 7;
+break;
+case 133:return 7;
+break;
+case 134:return 7;
+break;
+case 135:return 7;
+break;
+case 136:return 7;
+break;
+case 137:return 7;
+break;
+case 138:return 7;
+break;
+case 139:return 7;
+break;
+case 140:return 7;
+break;
+case 141:return 7;
+break;
+case 142:return 7;
+break;
+case 143:return 7;
+break;
+case 144:return 7;
+break;
+case 145:return 7;
+break;
+case 146:return 7;
+break;
+case 147:return 7;
+break;
+case 148:return 7;
+break;
+case 149:return 7;
+break;
+case 150:return 7;
+break;
+case 151:return 7;
+break;
+case 152:return 7;
+break;
+case 153:return 7;
+break;
+case 154:return 7;
+break;
+case 155:return 7;
+break;
+case 156:return 7;
+break;
+case 157:return 7;
+break;
+case 158:return 7;
+break;
+case 159:return 7;
+break;
+case 160:return 7;
+break;
+case 161:return 7;
+break;
+case 162:return 7;
+break;
+case 163:return 7;
+break;
+case 164:return 7;
+break;
+case 165:return 7;
+break;
+case 166:return 7;
+break;
+case 167:return 7;
+break;
+case 168:return 7;
+break;
+case 169:return 7;
+break;
+case 170:return 7;
+break;
+case 171:return 7;
+break;
+case 172:return 7;
+break;
+case 173:return 7;
+break;
+case 174:return 7;
+break;
+case 175:return 7;
+break;
+case 176:return 7;
+break;
+case 177:return 7;
+break;
+case 178:return 7;
+break;
+case 179:return 7;
+break;
+case 180:return 7;
+break;
+case 181:return 7;
+break;
+case 182:return 7;
+break;
+case 183:return 7;
+break;
+case 184:return 7;
+break;
+case 185:return 7;
+break;
+case 186:return 7;
+break;
+case 187:return 7;
+break;
+case 188:return 7;
+break;
+case 189:return 7;
+break;
+case 190:return 7;
+break;
+case 191:return 7;
+break;
+case 192:return 7;
+break;
+case 193:return 7;
+break;
+case 194:return 7;
+break;
+case 195:return 7;
+break;
+case 196:return 7;
+break;
+case 197:return 7;
+break;
+case 198:return 7;
+break;
+case 199:return 7;
+break;
+case 200:return 7;
+break;
+case 201:return 7;
+break;
+case 202:return 7;
+break;
+case 203:return 7;
+break;
+case 204:return 7;
+break;
+case 205:return 7;
+break;
+case 206:return 7;
+break;
+case 207:return 7;
+break;
+case 208:return 7;
+break;
+case 209:return 7;
+break;
+case 210:return 7;
+break;
+case 211:return 7;
+break;
+case 212:return 7;
+break;
+case 213:return 7;
+break;
+case 214:return 7;
+break;
+case 215:return 7;
+break;
+case 216:return 7;
+break;
+case 217:return 7;
+break;
+case 218:return 7;
+break;
+case 219:return 7;
+break;
+case 220:return 7;
+break;
+case 221:return 7;
+break;
+case 222:return 7;
+break;
+case 223:return 7;
+break;
+case 224:return 7;
+break;
+case 225:return 7;
+break;
+case 226:return 7;
+break;
+case 227:return 7;
+break;
+case 228:return 7;
+break;
+case 229:return 7;
+break;
+case 230:return 7;
+break;
+case 231:return 7;
+break;
+case 232:return 7;
+break;
+case 233:return 7;
+break;
+case 234:return 7;
+break;
+case 235:return 7;
+break;
+case 236:return 7;
+break;
+case 237:return 7;
+break;
+case 238:return 7;
+break;
+case 239:return 7;
+break;
+case 240:return 7;
+break;
+case 241:return 7;
+break;
+case 242:return 7;
+break;
+case 243:return 7;
+break;
+case 244:return 7;
+break;
+case 245:return 7;
+break;
+case 246:return 7;
+break;
+case 247:return 7;
+break;
+case 248:return 7;
+break;
+case 249:return 7;
+break;
+case 250:return 7;
+break;
+case 251:return 7;
+break;
+case 252:return 7;
+break;
+case 253:return 7;
+break;
+case 254:return 7;
+break;
+case 255:return 7;
+break;
+case 256:return 7;
+break;
+case 257:return 7;
+break;
+case 258:return 7;
+break;
+case 259:return 7;
+break;
+case 260:return 7;
+break;
+case 261:return 7;
+break;
+case 262:return 7;
+break;
+case 263:return 7;
+break;
+case 264:return 7;
+break;
+case 265:return 7;
+break;
+case 266:return 7;
+break;
+case 267:return 7;
+break;
+case 268:return 7;
+break;
+case 269:return 7;
+break;
+case 270:return 7;
+break;
+case 271:return 7;
+break;
+case 272:return 7;
+break;
+case 273:return 7;
+break;
+case 274:return 7;
+break;
+case 275:return 7;
+break;
+case 276:return 7;
+break;
+case 277:return 7;
+break;
+case 278:return 7;
+break;
+case 279:return 7;
+break;
+case 280:return 7;
+break;
+case 281:return 7;
+break;
+case 282:return 7;
+break;
+case 283:return 7;
+break;
+case 284:return 7;
+break;
+case 285:return 7;
+break;
+case 286:return 7;
+break;
+case 287:return 7;
+break;
+case 288:return 7;
+break;
+case 289:return 7;
+break;
+case 290:return 7;
+break;
+case 291:return 7;
+break;
+case 292:return 7;
+break;
+case 293:return 7;
+break;
+case 294:return 7;
+break;
+case 295:return 7;
+break;
+case 296:return 7;
+break;
+case 297:return 7;
+break;
+case 298:return 7;
+break;
+case 299:return 7;
+break;
+case 300:return 7;
+break;
+case 301:return 7;
+break;
+case 302:return 7;
+break;
+case 303:return 7;
+break;
+case 304:return 7;
+break;
+case 305:return 7;
+break;
+case 306:return 7;
+break;
+case 307:return 7;
+break;
+case 308:return 13;
+break;
+case 309:return 13;
+break;
+case 310:return 13;
+break;
+case 311:return 13;
+break;
+case 312:return 13;
+break;
+case 313:return 13;
+break;
+case 314:return 13;
+break;
+case 315:return 13;
+break;
+case 316:return 13;
+break;
+}
+},
+rules: [/^(?:$)/,/^(?:\s+)/,/^(?:\+)/,/^(?:\()/,/^(?:\))/,/^(?::)/,/^(?:DocumentRef-([0-9A-Za-z-+.]+))/,/^(?:LicenseRef-([0-9A-Za-z-+.]+))/,/^(?:AND)/,/^(?:OR)/,/^(?:WITH)/,/^(?:Glide)/,/^(?:Abstyles)/,/^(?:AFL-1.1)/,/^(?:AFL-1.2)/,/^(?:AFL-2.0)/,/^(?:AFL-2.1)/,/^(?:AFL-3.0)/,/^(?:AMPAS)/,/^(?:APL-1.0)/,/^(?:Adobe-Glyph)/,/^(?:APAFML)/,/^(?:Adobe-2006)/,/^(?:AGPL-1.0)/,/^(?:Afmparse)/,/^(?:Aladdin)/,/^(?:ADSL)/,/^(?:AMDPLPA)/,/^(?:ANTLR-PD)/,/^(?:Apache-1.0)/,/^(?:Apache-1.1)/,/^(?:Apache-2.0)/,/^(?:AML)/,/^(?:APSL-1.0)/,/^(?:APSL-1.1)/,/^(?:APSL-1.2)/,/^(?:APSL-2.0)/,/^(?:Artistic-1.0)/,/^(?:Artistic-1.0-Perl)/,/^(?:Artistic-1.0-cl8)/,/^(?:Artistic-2.0)/,/^(?:AAL)/,/^(?:Bahyph)/,/^(?:Barr)/,/^(?:Beerware)/,/^(?:BitTorrent-1.0)/,/^(?:BitTorrent-1.1)/,/^(?:BSL-1.0)/,/^(?:Borceux)/,/^(?:BSD-2-Clause)/,/^(?:BSD-2-Clause-FreeBSD)/,/^(?:BSD-2-Clause-NetBSD)/,/^(?:BSD-3-Clause)/,/^(?:BSD-3-Clause-Clear)/,/^(?:BSD-4-Clause)/,/^(?:BSD-Protection)/,/^(?:BSD-3-Clause-Attribution)/,/^(?:BSD-4-Clause-UC)/,/^(?:bzip2-1.0.5)/,/^(?:bzip2-1.0.6)/,/^(?:Caldera)/,/^(?:CECILL-1.0)/,/^(?:CECILL-1.1)/,/^(?:CECILL-2.0)/,/^(?:CECILL-B)/,/^(?:CECILL-C)/,/^(?:ClArtistic)/,/^(?:MIT-CMU)/,/^(?:CNRI-Jython)/,/^(?:CNRI-Python)/,/^(?:CNRI-Python-GPL-Compatible)/,/^(?:CPOL-1.02)/,/^(?:CDDL-1.0)/,/^(?:CDDL-1.1)/,/^(?:CPAL-1.0)/,/^(?:CPL-1.0)/,/^(?:CATOSL-1.1)/,/^(?:Condor-1.1)/,/^(?:CC-BY-1.0)/,/^(?:CC-BY-2.0)/,/^(?:CC-BY-2.5)/,/^(?:CC-BY-3.0)/,/^(?:CC-BY-4.0)/,/^(?:CC-BY-ND-1.0)/,/^(?:CC-BY-ND-2.0)/,/^(?:CC-BY-ND-2.5)/,/^(?:CC-BY-ND-3.0)/,/^(?:CC-BY-ND-4.0)/,/^(?:CC-BY-NC-1.0)/,/^(?:CC-BY-NC-2.0)/,/^(?:CC-BY-NC-2.5)/,/^(?:CC-BY-NC-3.0)/,/^(?:CC-BY-NC-4.0)/,/^(?:CC-BY-NC-ND-1.0)/,/^(?:CC-BY-NC-ND-2.0)/,/^(?:CC-BY-NC-ND-2.5)/,/^(?:CC-BY-NC-ND-3.0)/,/^(?:CC-BY-NC-ND-4.0)/,/^(?:CC-BY-NC-SA-1.0)/,/^(?:CC-BY-NC-SA-2.0)/,/^(?:CC-BY-NC-SA-2.5)/,/^(?:CC-BY-NC-SA-3.0)/,/^(?:CC-BY-NC-SA-4.0)/,/^(?:CC-BY-SA-1.0)/,/^(?:CC-BY-SA-2.0)/,/^(?:CC-BY-SA-2.5)/,/^(?:CC-BY-SA-3.0)/,/^(?:CC-BY-SA-4.0)/,/^(?:CC0-1.0)/,/^(?:Crossword)/,/^(?:CUA-OPL-1.0)/,/^(?:Cube)/,/^(?:D-FSL-1.0)/,/^(?:diffmark)/,/^(?:WTFPL)/,/^(?:DOC)/,/^(?:Dotseqn)/,/^(?:DSDP)/,/^(?:dvipdfm)/,/^(?:EPL-1.0)/,/^(?:ECL-1.0)/,/^(?:ECL-2.0)/,/^(?:eGenix)/,/^(?:EFL-1.0)/,/^(?:EFL-2.0)/,/^(?:MIT-advertising)/,/^(?:MIT-enna)/,/^(?:Entessa)/,/^(?:ErlPL-1.1)/,/^(?:EUDatagrid)/,/^(?:EUPL-1.0)/,/^(?:EUPL-1.1)/,/^(?:Eurosym)/,/^(?:Fair)/,/^(?:MIT-feh)/,/^(?:Frameworx-1.0)/,/^(?:FreeImage)/,/^(?:FTL)/,/^(?:FSFUL)/,/^(?:FSFULLR)/,/^(?:Giftware)/,/^(?:GL2PS)/,/^(?:Glulxe)/,/^(?:AGPL-3.0)/,/^(?:GFDL-1.1)/,/^(?:GFDL-1.2)/,/^(?:GFDL-1.3)/,/^(?:GPL-1.0)/,/^(?:GPL-2.0)/,/^(?:GPL-3.0)/,/^(?:LGPL-2.1)/,/^(?:LGPL-3.0)/,/^(?:LGPL-2.0)/,/^(?:gnuplot)/,/^(?:gSOAP-1.3b)/,/^(?:HaskellReport)/,/^(?:HPND)/,/^(?:IBM-pibs)/,/^(?:IPL-1.0)/,/^(?:ICU)/,/^(?:ImageMagick)/,/^(?:iMatix)/,/^(?:Imlib2)/,/^(?:IJG)/,/^(?:Intel-ACPI)/,/^(?:Intel)/,/^(?:IPA)/,/^(?:ISC)/,/^(?:JasPer-2.0)/,/^(?:JSON)/,/^(?:LPPL-1.3a)/,/^(?:LPPL-1.0)/,/^(?:LPPL-1.1)/,/^(?:LPPL-1.2)/,/^(?:LPPL-1.3c)/,/^(?:Latex2e)/,/^(?:BSD-3-Clause-LBNL)/,/^(?:Leptonica)/,/^(?:Libpng)/,/^(?:libtiff)/,/^(?:LPL-1.02)/,/^(?:LPL-1.0)/,/^(?:MakeIndex)/,/^(?:MTLL)/,/^(?:MS-PL)/,/^(?:MS-RL)/,/^(?:MirOS)/,/^(?:MITNFA)/,/^(?:MIT)/,/^(?:Motosoto)/,/^(?:MPL-1.0)/,/^(?:MPL-1.1)/,/^(?:MPL-2.0)/,/^(?:MPL-2.0-no-copyleft-exception)/,/^(?:mpich2)/,/^(?:Multics)/,/^(?:Mup)/,/^(?:NASA-1.3)/,/^(?:Naumen)/,/^(?:NBPL-1.0)/,/^(?:NetCDF)/,/^(?:NGPL)/,/^(?:NOSL)/,/^(?:NPL-1.0)/,/^(?:NPL-1.1)/,/^(?:Newsletr)/,/^(?:NLPL)/,/^(?:Nokia)/,/^(?:NPOSL-3.0)/,/^(?:Noweb)/,/^(?:NRL)/,/^(?:NTP)/,/^(?:Nunit)/,/^(?:OCLC-2.0)/,/^(?:ODbL-1.0)/,/^(?:PDDL-1.0)/,/^(?:OGTSL)/,/^(?:OLDAP-2.2.2)/,/^(?:OLDAP-1.1)/,/^(?:OLDAP-1.2)/,/^(?:OLDAP-1.3)/,/^(?:OLDAP-1.4)/,/^(?:OLDAP-2.0)/,/^(?:OLDAP-2.0.1)/,/^(?:OLDAP-2.1)/,/^(?:OLDAP-2.2)/,/^(?:OLDAP-2.2.1)/,/^(?:OLDAP-2.3)/,/^(?:OLDAP-2.4)/,/^(?:OLDAP-2.5)/,/^(?:OLDAP-2.6)/,/^(?:OLDAP-2.7)/,/^(?:OLDAP-2.8)/,/^(?:OML)/,/^(?:OPL-1.0)/,/^(?:OSL-1.0)/,/^(?:OSL-1.1)/,/^(?:OSL-2.0)/,/^(?:OSL-2.1)/,/^(?:OSL-3.0)/,/^(?:OpenSSL)/,/^(?:PHP-3.0)/,/^(?:PHP-3.01)/,/^(?:Plexus)/,/^(?:PostgreSQL)/,/^(?:psfrag)/,/^(?:psutils)/,/^(?:Python-2.0)/,/^(?:QPL-1.0)/,/^(?:Qhull)/,/^(?:Rdisc)/,/^(?:RPSL-1.0)/,/^(?:RPL-1.1)/,/^(?:RPL-1.5)/,/^(?:RHeCos-1.1)/,/^(?:RSCPL)/,/^(?:RSA-MD)/,/^(?:Ruby)/,/^(?:SAX-PD)/,/^(?:Saxpath)/,/^(?:SCEA)/,/^(?:SWL)/,/^(?:SGI-B-1.0)/,/^(?:SGI-B-1.1)/,/^(?:SGI-B-2.0)/,/^(?:OFL-1.0)/,/^(?:OFL-1.1)/,/^(?:SimPL-2.0)/,/^(?:Sleepycat)/,/^(?:SNIA)/,/^(?:SMLNJ)/,/^(?:SugarCRM-1.1.3)/,/^(?:SISSL)/,/^(?:SISSL-1.2)/,/^(?:SPL-1.0)/,/^(?:Watcom-1.0)/,/^(?:TCL)/,/^(?:Unlicense)/,/^(?:TMate)/,/^(?:TORQUE-1.1)/,/^(?:TOSL)/,/^(?:Unicode-TOU)/,/^(?:UPL-1.0)/,/^(?:NCSA)/,/^(?:Vim)/,/^(?:VOSTROM)/,/^(?:VSL-1.0)/,/^(?:W3C-19980720)/,/^(?:W3C)/,/^(?:Wsuipa)/,/^(?:Xnet)/,/^(?:X11)/,/^(?:Xerox)/,/^(?:XFree86-1.1)/,/^(?:xinetd)/,/^(?:xpp)/,/^(?:XSkat)/,/^(?:YPL-1.0)/,/^(?:YPL-1.1)/,/^(?:Zed)/,/^(?:Zend-2.0)/,/^(?:Zimbra-1.3)/,/^(?:Zimbra-1.4)/,/^(?:Zlib)/,/^(?:zlib-acknowledgement)/,/^(?:ZPL-1.1)/,/^(?:ZPL-2.0)/,/^(?:ZPL-2.1)/,/^(?:Autoconf-exception-2.0)/,/^(?:Autoconf-exception-3.0)/,/^(?:Bison-exception-2.2)/,/^(?:Classpath-exception-2.0)/,/^(?:eCos-exception-2.0)/,/^(?:Font-exception-2.0)/,/^(?:GCC-exception-2.0)/,/^(?:GCC-exception-3.1)/,/^(?:WxWindows-exception-3.1)/],
+conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316],"inclusive":true}}
+});
+return lexer;
+})();
+parser.lexer = lexer;
+function Parser () {
+ this.yy = {};
+}
+Parser.prototype = parser;parser.Parser = Parser;
+return new Parser;
+})();
+
+
+if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
+exports.parser = spdxparse;
+exports.Parser = spdxparse.Parser;
+exports.parse = function () { return spdxparse.parse.apply(spdxparse, arguments); };
+exports.main = function commonjsMain(args) {
+ if (!args[1]) {
+ console.log('Usage: '+args[0]+' FILE');
+ process.exit(1);
+ }
+ var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
+ return exports.parser.parse(source);
+};
+if (typeof module !== 'undefined' && require.main === module) {
+ exports.main(process.argv.slice(1));
+}
+}
diff --git a/deps/npm/node_modules/spdx/source/ranges.json b/deps/npm/node_modules/spdx/source/ranges.json
new file mode 100644
index 00000000000000..a3fc260e65f06a
--- /dev/null
+++ b/deps/npm/node_modules/spdx/source/ranges.json
@@ -0,0 +1,196 @@
+[
+ [
+ "AFL-1.1",
+ "AFL-1.2",
+ "AFL-2.0",
+ "AFL-2.1",
+ "AFL-3.0"
+ ],
+ [
+ "Apache-1.0",
+ "Apache-1.1",
+ "Apache-2.0"
+ ],
+ [
+ "APSL-1.0",
+ "APSL-1.1",
+ "APSL-1.2",
+ "APSL-2.0"
+ ],
+ [
+ "Artistic-1.0",
+ "Artistic-2.0"
+ ],
+ [
+ "BitTorrent-1.0",
+ "BitTorrent-1.1"
+ ],
+ [
+ "CC-BY-1.0",
+ "CC-BY-2.0",
+ "CC-BY-2.5",
+ "CC-BY-3.0",
+ "CC-BY-4.0"
+ ],
+ [
+ "CC-BY-NC-1.0",
+ "CC-BY-NC-2.0",
+ "CC-BY-NC-2.5",
+ "CC-BY-NC-3.0",
+ "CC-BY-NC-4.0"
+ ],
+ [
+ "CC-BY-NC-ND-1.0",
+ "CC-BY-NC-ND-2.0",
+ "CC-BY-NC-ND-2.5",
+ "CC-BY-NC-ND-3.0",
+ "CC-BY-NC-ND-4.0"
+ ],
+ [
+ "CC-BY-NC-SA-1.0",
+ "CC-BY-NC-SA-2.0",
+ "CC-BY-NC-SA-2.5",
+ "CC-BY-NC-SA-3.0",
+ "CC-BY-NC-SA-4.0"
+ ],
+ [
+ "CC-BY-ND-1.0",
+ "CC-BY-ND-2.0",
+ "CC-BY-ND-2.5",
+ "CC-BY-ND-3.0",
+ "CC-BY-ND-4.0"
+ ],
+ [
+ "CC-BY-SA-1.0",
+ "CC-BY-SA-2.0",
+ "CC-BY-SA-2.5",
+ "CC-BY-SA-3.0",
+ "CC-BY-SA-4.0"
+ ],
+ [
+ "CDDL-1.0",
+ "CDDL-1.1"
+ ],
+ [
+ "CECILL-1.0",
+ "CECILL-1.1",
+ "CECILL-2.0"
+ ],
+ [
+ "ECL-1.0",
+ "ECL-2.0"
+ ],
+ [
+ "EFL-1.0",
+ "EFL-2.0"
+ ],
+ [
+ "EUPL-1.0",
+ "EUPL-1.1"
+ ],
+ [
+ "GFDL-1.1",
+ "GFDL-1.2",
+ "GFDL-1.3"
+ ],
+ [
+ "GPL-1.0",
+ "GPL-2.0",
+ "GPL-3.0"
+ ],
+ [
+ "LGPL-2.0",
+ "LGPL-2.1",
+ "LGPL-3.0"
+ ],
+ [
+ "LPL-1.0",
+ "LPL-1.02"
+ ],
+ [
+ "LPPL-1.0",
+ "LPPL-1.1",
+ "LPPL-1.2",
+ "LPPL-1.3a"
+ ],
+ [
+ "LPPL-1.0",
+ "LPPL-1.1",
+ "LPPL-1.2",
+ "LPPL-1.3c"
+ ],
+ [
+ "MPL-1.0",
+ "MPL-1.1",
+ "MPL-2.0"
+ ],
+ [
+ "MPL-1.0",
+ "MPL-1.1",
+ "MPL-2.0-no-copyleft-exception"
+ ],
+ [
+ "NPL-1.0",
+ "NPL-1.1"
+ ],
+ [
+ "OFL-1.0",
+ "OFL-1.1"
+ ],
+ [
+ "OLDAP-1.1",
+ "OLDAP-1.2",
+ "OLDAP-1.3",
+ "OLDAP-1.4",
+ "OLDAP-2.0",
+ "OLDAP-2.0.1",
+ "OLDAP-2.1",
+ "OLDAP-2.2",
+ "OLDAP-2.2.1",
+ "OLDAP-2.2.2",
+ "OLDAP-2.3",
+ "OLDAP-2.4",
+ "OLDAP-2.5",
+ "OLDAP-2.6",
+ "OLDAP-2.7",
+ "OLDAP-2.8"
+ ],
+ [
+ "OPL-1.0",
+ "OSL-1.0",
+ "OSL-1.1",
+ "OSL-2.0",
+ "OSL-2.1",
+ "OSL-3.0"
+ ],
+ [
+ "PHP-3.0",
+ "PHP-3.01"
+ ],
+ [
+ "RPL-1.1",
+ "RPL-1.5"
+ ],
+ [
+ "SGI-B-1.0",
+ "SGI-B-1.1",
+ "SGI-B-2.0"
+ ],
+ [
+ "YPL-1.0",
+ "YPL-1.1"
+ ],
+ [
+ "ZPL-1.1",
+ "ZPL-2.0",
+ "ZPL-2.1"
+ ],
+ [
+ "Zimbra-1.3",
+ "Zimbra-1.4"
+ ],
+ [
+ "bzip2-1.0.5",
+ "bzip2-1.0.6"
+ ]
+]
diff --git a/deps/npm/node_modules/spdx/source/spdx.js b/deps/npm/node_modules/spdx/source/spdx.js
new file mode 100644
index 00000000000000..ab0a4f6b1d5881
--- /dev/null
+++ b/deps/npm/node_modules/spdx/source/spdx.js
@@ -0,0 +1,161 @@
+// spdx.js
+// =======
+// SPDX License Expression Syntax parser
+
+// Validation
+// ----------
+
+// Require the generated parser.
+var parser = require('./parser.generated.js').parser;
+
+exports.parse = function(argument) {
+ return parser.parse(argument);
+};
+
+var containsRepeatedSpace = /\s{2,}/;
+
+exports.valid = function(argument) {
+ if (
+ argument.trim() !== argument ||
+ containsRepeatedSpace.test(argument)
+ ) {
+ return false;
+ }
+ try {
+ parser.parse(argument);
+ return true;
+ } catch (e) {
+ // jison generates parsers that throw errors, while this function
+ // mimics `semver.valid` by returning null.
+ return null;
+ }
+};
+
+// Comparison
+// ----------
+
+var ranges = require('./ranges.json');
+
+var notALicenseIdentifier = ' is not a simple license identifier';
+
+var rangeComparison = function(comparison) {
+ return function(first, second) {
+ var firstAST = exports.parse(first);
+ if (!firstAST.hasOwnProperty('license')) {
+ throw new Error('"' + first + '"' + notALicenseIdentifier);
+ }
+ var secondAST = exports.parse(second);
+ if (!secondAST.hasOwnProperty('license')) {
+ throw new Error('"' + second + '"' + notALicenseIdentifier);
+ }
+ return ranges.some(function(range) {
+ var indexOfFirst = range.indexOf(firstAST.license);
+ if (indexOfFirst < 0) {
+ return false;
+ }
+ var indexOfSecond = range.indexOf(secondAST.license);
+ if (indexOfSecond < 0) {
+ return false;
+ }
+ return comparison(indexOfFirst, indexOfSecond);
+ });
+ };
+};
+
+exports.gt = rangeComparison(function(first, second) {
+ return first > second;
+});
+
+exports.lt = rangeComparison(function(first, second) {
+ return first < second;
+});
+
+exports.satisfies = (function() {
+ var rangesAreCompatible = function(first, second) {
+ return (
+ first.license === second.license ||
+ ranges.some(function(range) {
+ return (
+ range.indexOf(first.license) > -1 &&
+ range.indexOf(second.license)
+ );
+ })
+ );
+ };
+
+ var identifierInRange = function(identifier, range) {
+ return (
+ identifier.license === range.license ||
+ exports.gt(identifier.license, range.license)
+ );
+ };
+
+ var licensesAreCompatible = function(first, second) {
+ if (first.exception !== second.exception) {
+ return false;
+ } else if (second.hasOwnProperty('license')) {
+ if (second.hasOwnProperty('plus')) {
+ if (first.hasOwnProperty('plus')) {
+ // first+, second+
+ return rangesAreCompatible(first, second);
+ } else {
+ // first, second+
+ return identifierInRange(first, second);
+ }
+ } else {
+ if (first.hasOwnProperty('plus')) {
+ // first+, second
+ return identifierInRange(second, first);
+ } else {
+ // first, second
+ return first.license === second.license;
+ }
+ }
+ }
+ };
+
+ var recurseLeftAndRight = function(first, second) {
+ var firstConjunction = first.conjunction;
+ if (firstConjunction === 'and') {
+ return (
+ recurse(first.left, second) &&
+ recurse(first.right, second)
+ );
+ } else if (firstConjunction === 'or') {
+ return (
+ recurse(first.left, second) ||
+ recurse(first.right, second)
+ );
+ }
+ };
+
+ var recurse = function(first, second) {
+ if (first.hasOwnProperty('conjunction')) {
+ return recurseLeftAndRight(first, second);
+ } else if (second.hasOwnProperty('conjunction')) {
+ return recurseLeftAndRight(second, first);
+ } else {
+ return licensesAreCompatible(first, second);
+ }
+ };
+
+ return function(first, second) {
+ return recurse(parser.parse(first), parser.parse(second));
+ };
+})();
+
+// Reference Data
+// --------------
+
+// Require the same license and exception data used by the parser.
+exports.licenses = require('spdx-license-ids');
+exports.exceptions = require('./exceptions.json');
+
+// Version Metadata
+// ----------------
+
+// The License Expression Syntax version
+exports.specificationVersion = '2.0';
+
+// This module's semantic version
+exports.implementationVersion = '0.4.0';
diff --git a/deps/npm/node_modules/tar/LICENCE b/deps/npm/node_modules/tar/LICENCE
deleted file mode 100644
index 74489e2e2658e7..00000000000000
--- a/deps/npm/node_modules/tar/LICENCE
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) Isaac Z. Schlueter
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
-``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/tar/LICENSE b/deps/npm/node_modules/tar/LICENSE
new file mode 100644
index 00000000000000..019b7e40ea0568
--- /dev/null
+++ b/deps/npm/node_modules/tar/LICENSE
@@ -0,0 +1,12 @@
+The ISC License
+Copyright (c) Isaac Z. Schlueter and Contributors
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tar/README.md b/deps/npm/node_modules/tar/README.md
index 424a2782bf7b59..cfda2ac180611c 100644
--- a/deps/npm/node_modules/tar/README.md
+++ b/deps/npm/node_modules/tar/README.md
@@ -22,7 +22,9 @@ stream.
This only works with directories, it does not work with individual files.
The optional `properties` object are used to set properties in the tar
-'Global Extended Header'.
+'Global Extended Header'. If the `fromBase` property is set to true,
+the tar will contain files relative to the path passed, and not with
+the path included.
### tar.Extract([options])
diff --git a/deps/npm/node_modules/tar/lib/extract.js b/deps/npm/node_modules/tar/lib/extract.js
index 5a4cb98c38f3cb..fe1bb976eb0ce2 100644
--- a/deps/npm/node_modules/tar/lib/extract.js
+++ b/deps/npm/node_modules/tar/lib/extract.js
@@ -88,7 +88,7 @@ inherits(Extract, tar.Parse)
Extract.prototype._streamEnd = function () {
var me = this
- if (!me._ended) me.error("unexpected eof")
+ if (!me._ended || me._entry) me.error("unexpected eof")
me._fst.end()
// my .end() is coming later.
}
diff --git a/deps/npm/node_modules/tar/lib/pack.js b/deps/npm/node_modules/tar/lib/pack.js
index 3ff14dd695100e..5a3bb95a121bdb 100644
--- a/deps/npm/node_modules/tar/lib/pack.js
+++ b/deps/npm/node_modules/tar/lib/pack.js
@@ -131,7 +131,12 @@ Pack.prototype._process = function () {
// in the tarball to use. That way we can skip a lot of extra
// work when resolving symlinks for bundled dependencies in npm.
- var root = path.dirname((entry.root || entry).path)
+ var root = path.dirname((entry.root || entry).path);
+ if (me._global && me._global.fromBase && entry.root && entry.root.path) {
+ // user set 'fromBase: true' indicating tar root should be directory itself
+ root = entry.root.path;
+ }
+
var wprops = {}
Object.keys(entry.props || {}).forEach(function (k) {
diff --git a/deps/npm/node_modules/tar/lib/parse.js b/deps/npm/node_modules/tar/lib/parse.js
index 8517c481bc6ef4..1c53d9d26d7f21 100644
--- a/deps/npm/node_modules/tar/lib/parse.js
+++ b/deps/npm/node_modules/tar/lib/parse.js
@@ -61,7 +61,7 @@ function Parse () {
// emitting "end"
Parse.prototype._streamEnd = function () {
var me = this
- if (!me._ended) me.error("unexpected eof")
+ if (!me._ended || me._entry) me.error("unexpected eof")
me.emit("end")
}
diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json
index ec4d2d3798b190..f160d0bd0a8ff1 100644
--- a/deps/npm/node_modules/tar/package.json
+++ b/deps/npm/node_modules/tar/package.json
@@ -6,7 +6,7 @@
},
"name": "tar",
"description": "tar for node",
- "version": "2.0.1",
+ "version": "2.1.1",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-tar.git"
@@ -26,35 +26,15 @@
"tap": "0.x",
"mkdirp": "^0.5.0"
},
- "license": "BSD",
- "gitHead": "ce405d0b96f0fe186dd4cc68d666fabb0c59818d",
+ "license": "ISC",
+ "readme": "# node-tar\n\nTar for Node.js.\n\n[](https://nodei.co/npm/tar/)\n\n## API\n\nSee `examples/` for usage examples.\n\n### var tar = require('tar')\n\nReturns an object with `.Pack`, `.Extract` and `.Parse` methods.\n\n### tar.Pack([properties])\n\nReturns a through stream. Use\n[fstream](https://npmjs.org/package/fstream) to write files into the\npack stream and you will receive tar archive data from the pack\nstream.\n\nThis only works with directories, it does not work with individual files.\n\nThe optional `properties` object are used to set properties in the tar\n'Global Extended Header'. If the `fromBase` property is set to true,\nthe tar will contain files relative to the path passed, and not with\nthe path included.\n\n### tar.Extract([options])\n\nReturns a through stream. Write tar data to the stream and the files\nin the tarball will be extracted onto the filesystem.\n\n`options` can be:\n\n```js\n{\n path: '/path/to/extract/tar/into',\n strip: 0, // how many path segments to strip from the root when extracting\n}\n```\n\n`options` also get passed to the `fstream.Writer` instance that `tar`\nuses internally.\n\n### tar.Parse()\n\nReturns a writable stream. Write tar data to it and it will emit\n`entry` events for each entry parsed from the tarball. This is used by\n`tar.Extract`.\n",
+ "readmeFilename": "README.md",
+ "gitHead": "2cbe6c805fc5d87ce099183ed13c43faba962224",
"bugs": {
"url": "https://github.com/isaacs/node-tar/issues"
},
- "homepage": "https://github.com/isaacs/node-tar",
- "_id": "tar@2.0.1",
- "_shasum": "a1537ab0d1ce61462ce87b4eed1cd263fba5fc17",
- "_from": "tar@>=2.0.1 <2.1.0",
- "_npmVersion": "2.7.6",
- "_nodeVersion": "1.4.2",
- "_npmUser": {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- }
- ],
- "dist": {
- "shasum": "a1537ab0d1ce61462ce87b4eed1cd263fba5fc17",
- "tarball": "http://registry.npmjs.org/tar/-/tar-2.0.1.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/tar/-/tar-2.0.1.tgz"
+ "homepage": "https://github.com/isaacs/node-tar#readme",
+ "_id": "tar@2.1.1",
+ "_shasum": "ac0649e135fa4546e430c7698514e1da2e8a7cc4",
+ "_from": "tar@>=2.1.1 <2.2.0"
}
diff --git a/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz b/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz
new file mode 100644
index 00000000000000..9e7014d85abe48
Binary files /dev/null and b/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz differ
diff --git a/deps/npm/node_modules/tar/test/error-on-broken.js b/deps/npm/node_modules/tar/test/error-on-broken.js
new file mode 100644
index 00000000000000..e484920fd9625a
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/error-on-broken.js
@@ -0,0 +1,33 @@
+var fs = require('fs')
+var path = require('path')
+var zlib = require('zlib')
+
+var tap = require('tap')
+
+var tar = require('../tar.js')
+
+var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz')
+var target = path.join(__dirname, 'tmp/extract-test')
+
+tap.test('preclean', function (t) {
+ require('rimraf').sync(__dirname + '/tmp/extract-test')
+ t.pass('cleaned!')
+ t.end()
+})
+
+tap.test('extract test', function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ inp.pipe(zlib.createGunzip()).pipe(extract)
+
+ extract.on('error', function (er) {
+ t.equal(er.message, 'unexpected eof', 'error noticed')
+ t.end()
+ })
+
+ extract.on('end', function () {
+ t.fail('shouldn\'t reach this point due to errors')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/tar/test/pack.js b/deps/npm/node_modules/tar/test/pack.js
index bf033c12987862..0f16c07bb0162e 100644
--- a/deps/npm/node_modules/tar/test/pack.js
+++ b/deps/npm/node_modules/tar/test/pack.js
@@ -830,6 +830,10 @@ tap.test("without global header", { timeout: 10000 }, function (t) {
runTest(t, false)
})
+tap.test("with from base", { timeout: 10000 }, function (t) {
+ runTest(t, true, true)
+})
+
function alphasort (a, b) {
return a === b ? 0
: a.toLowerCase() > b.toLowerCase() ? 1
@@ -839,7 +843,7 @@ function alphasort (a, b) {
}
-function runTest (t, doGH) {
+function runTest (t, doGH, doFromBase) {
var reader = Reader({ path: input
, filter: function () {
return !this.path.match(/\.(tar|hex)$/)
@@ -847,7 +851,10 @@ function runTest (t, doGH) {
, sort: alphasort
})
- var pack = Pack(doGH ? pkg : null)
+ var props = doGH ? pkg : {}
+ if(doFromBase) props.fromBase = true;
+
+ var pack = Pack(props)
var writer = Writer(target)
// skip the global header if we're not doing that.
@@ -901,6 +908,17 @@ function runTest (t, doGH) {
}
t.equal(ev, wanted[0], "event type should be "+wanted[0])
+ if(doFromBase) {
+ if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100)
+ wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc'
+
+ if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/')
+ if(wanted[1].path == '') wanted[1].path = '/'
+ if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '')
+
+ wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '')
+ }
+
if (ev !== wanted[0] || e.path !== wanted[1].path) {
console.error("wanted", wanted)
console.error([ev, e.props])
diff --git a/deps/npm/node_modules/which/README.md b/deps/npm/node_modules/which/README.md
index ff1eb531a77731..d5571528af303d 100644
--- a/deps/npm/node_modules/which/README.md
+++ b/deps/npm/node_modules/which/README.md
@@ -1,5 +1,34 @@
-The "which" util from npm's guts.
+# which
+
+Like the unix `which` utility.
Finds the first instance of a specified executable in the PATH
environment variable. Does not cache the results, so `hash -r` is not
needed when the PATH changes.
+
+## USAGE
+
+```javascript
+var which = require('which')
+
+// async usage
+which('node', function (er, resolvedPath) {
+ // er is returned if no "node" is found on the PATH
+ // if it is found, then the absolute path to the exec is returned
+})
+
+// sync usage
+// throws if not found
+var resolved = which.sync('node')
+
+// Pass options to override the PATH and PATHEXT environment vars.
+which('node', { path: someOtherPath }, function (er, resolved) {
+ if (er)
+ throw er
+ console.log('found at %j', resolved)
+})
+```
+
+## OPTIONS
+
+If you pass in options, then `path` and `pathExt` are relevant.
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/LICENSE b/deps/npm/node_modules/which/node_modules/is-absolute/LICENSE
new file mode 100644
index 00000000000000..904ab073b70946
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2015, Jon Schlinkert.Copyright (c) 2009-2015, TJ Holowaychuk.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/README.md b/deps/npm/node_modules/which/node_modules/is-absolute/README.md
new file mode 100644
index 00000000000000..2347828a3e73dd
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/README.md
@@ -0,0 +1,53 @@
+# is-absolute [](http://badge.fury.io/js/is-absolute) [](https://travis-ci.org/jonschlinkert/is-absolute)
+
+> Return true if a file path is absolute.
+
+Based on the `isAbsolute` utility method in [express](https://github.com/visionmedia/express).
+
+## Install with [npm](npmjs.org)
+
+```bash
+npm i is-absolute --save
+```
+
+## Usage
+
+```js
+var isAbsolute = require('is-absolute');
+console.log(isAbsolute('a/b/c.js'));
+//=> 'false';
+```
+
+## Running tests
+Install dev dependencies.
+
+```bash
+npm i -d && npm test
+```
+
+
+## Contributing
+Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-absolute/issues)
+
+
+## Other projects
+* [is-relative](https://github.com/jonschlinkert/is-relative): Returns `true` if the path appears to be relative.
+* [is-dotfile](https://github.com/regexps/is-dotfile): Return true if a file path is (or has) a dotfile.
+* [is-glob](https://github.com/jonschlinkert/is-glob): Returns `true` if the given string looks like a glob pattern.
+* [cwd](https://github.com/jonschlinkert/cwd): Node.js util for easily getting the current working directory of a project based on package.json or the given path.
+* [git-config-path](https://github.com/jonschlinkert/git-config-path): Resolve the path to the user's global .gitconfig.
+
+## Author
+
+**Jon Schlinkert**
+
++ [github/jonschlinkert](https://github.com/jonschlinkert)
++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
+
+## License
+Copyright (c) 2014-2015 Jon Schlinkert
+Released under the MIT license
+
+***
+
+_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 05, 2015._
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/index.js b/deps/npm/node_modules/which/node_modules/is-absolute/index.js
new file mode 100644
index 00000000000000..9df4d5c2406a97
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/index.js
@@ -0,0 +1,26 @@
+/*!
+ * is-absolute
+ *
+ * Copyright (c) 2014-2015, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+'use strict';
+
+var isRelative = require('is-relative');
+
+module.exports = function isAbsolute(filepath) {
+ if ('/' === filepath[0]) {
+ return true;
+ }
+ if (':' === filepath[1] && '\\' === filepath[2]) {
+ return true;
+ }
+ // Microsoft Azure absolute filepath
+ if ('\\\\' == filepath.substring(0, 2)) {
+ return true;
+ }
+ if (!isRelative(filepath)) {
+ return true;
+ }
+};
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/LICENSE-MIT b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/LICENSE-MIT
new file mode 100644
index 00000000000000..b576e8d484df60
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/LICENSE-MIT
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Jon Schlinkert
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/README.md b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/README.md
new file mode 100644
index 00000000000000..5d7a2a2aabb928
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/README.md
@@ -0,0 +1,38 @@
+# is-relative [](http://badge.fury.io/js/is-relative)
+
+> Returns `true` if the path appears to be relative.
+
+## Install
+### Install with [npm](npmjs.org)
+
+```bash
+npm i is-relative --save
+```
+
+## Usage
+### [isRelative](index.js#L16)
+
+* `filepath` **{String}**: Path to test.
+* `returns`: {Boolean}
+
+```js
+var isRelative = require('is-relative');
+isRelative('README.md');
+//=> true
+```
+
+
+## Author
+
+**Jon Schlinkert**
+
++ [github/jonschlinkert](https://github.com/jonschlinkert)
++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
+
+## License
+Copyright (c) 2014 Jon Schlinkert
+Released under the MIT license
+
+***
+
+_This file was generated by [verb](https://github.com/assemble/verb) on November 17, 2014._
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/index.js b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/index.js
new file mode 100644
index 00000000000000..ffc760a82a5dab
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/index.js
@@ -0,0 +1,21 @@
+'use strict';
+
+/**
+ * ```js
+ * var isRelative = require('is-relative');
+ * isRelative('README.md');
+ * //=> true
+ * ```
+ *
+ * @name isRelative
+ * @param {String} `filepath` Path to test.
+ * @return {Boolean}
+ * @api public
+ */
+
+module.exports = function isRelative(filepath) {
+ if (typeof filepath !== 'string') {
+ throw new Error('isRelative expects a string.');
+ }
+ return !/^([a-z]+:)?[\\\/]/i.test(filepath);
+};
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/package.json b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/package.json
new file mode 100644
index 00000000000000..d582081dd157ca
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/node_modules/is-relative/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "is-relative",
+ "description": "Returns `true` if the path appears to be relative.",
+ "version": "0.1.3",
+ "homepage": "https://github.com/jonschlinkert/is-relative",
+ "author": {
+ "name": "Jon Schlinkert",
+ "url": "https://github.com/jonschlinkert"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/jonschlinkert/is-relative.git"
+ },
+ "bugs": {
+ "url": "https://github.com/jonschlinkert/is-relative/issues"
+ },
+ "licenses": [
+ {
+ "type": "MIT",
+ "url": "https://github.com/jonschlinkert/is-relative/blob/master/LICENSE-MIT"
+ }
+ ],
+ "keywords": [
+ "absolute",
+ "check",
+ "file",
+ "filepath",
+ "is",
+ "normalize",
+ "path",
+ "path.relative",
+ "relative",
+ "resolve",
+ "slash",
+ "slashes",
+ "uri",
+ "url"
+ ],
+ "main": "index.js",
+ "files": [
+ "index.js",
+ "LICENSE-MIT"
+ ],
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "mocha -R spec"
+ },
+ "devDependencies": {
+ "mocha": "*",
+ "verb": ">= 0.2.6",
+ "verb-tag-jscomments": "^0.1.4"
+ },
+ "_id": "is-relative@0.1.3",
+ "_shasum": "905fee8ae86f45b3ec614bc3c15c869df0876e82",
+ "_from": "is-relative@>=0.1.0 <0.2.0",
+ "_npmVersion": "1.4.9",
+ "_npmUser": {
+ "name": "jonschlinkert",
+ "email": "github@sellside.com"
+ },
+ "maintainers": [
+ {
+ "name": "jonschlinkert",
+ "email": "github@sellside.com"
+ }
+ ],
+ "dist": {
+ "shasum": "905fee8ae86f45b3ec614bc3c15c869df0876e82",
+ "tarball": "http://registry.npmjs.org/is-relative/-/is-relative-0.1.3.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/is-relative/-/is-relative-0.1.3.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/which/node_modules/is-absolute/package.json b/deps/npm/node_modules/which/node_modules/is-absolute/package.json
new file mode 100644
index 00000000000000..4f954b855f11d4
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/is-absolute/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "is-absolute",
+ "description": "Return true if a file path is absolute.",
+ "version": "0.1.7",
+ "homepage": "https://github.com/jonschlinkert/is-absolute",
+ "author": {
+ "name": "Jon Schlinkert",
+ "url": "https://github.com/jonschlinkert"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/jonschlinkert/is-absolute.git"
+ },
+ "bugs": {
+ "url": "https://github.com/jonschlinkert/is-absolute/issues"
+ },
+ "license": {
+ "type": "MIT",
+ "url": "https://github.com/jonschlinkert/is-absolute/blob/master/LICENSE"
+ },
+ "files": [
+ "index.js"
+ ],
+ "main": "index.js",
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "mocha"
+ },
+ "dependencies": {
+ "is-relative": "^0.1.0"
+ },
+ "devDependencies": {
+ "mocha": "*"
+ },
+ "keywords": [
+ "absolute",
+ "check",
+ "file",
+ "filepath",
+ "is",
+ "normalize",
+ "path",
+ "path.relative",
+ "relative",
+ "resolve",
+ "slash",
+ "slashes",
+ "uri",
+ "url"
+ ],
+ "gitHead": "90cca7b671620bf28b778a61fddc8a986a2e1095",
+ "_id": "is-absolute@0.1.7",
+ "_shasum": "847491119fccb5fb436217cc737f7faad50f603f",
+ "_from": "is-absolute@>=0.1.7 <0.2.0",
+ "_npmVersion": "2.5.1",
+ "_nodeVersion": "0.12.0",
+ "_npmUser": {
+ "name": "jonschlinkert",
+ "email": "github@sellside.com"
+ },
+ "maintainers": [
+ {
+ "name": "jonschlinkert",
+ "email": "github@sellside.com"
+ }
+ ],
+ "dist": {
+ "shasum": "847491119fccb5fb436217cc737f7faad50f603f",
+ "tarball": "http://registry.npmjs.org/is-absolute/-/is-absolute-0.1.7.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-0.1.7.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
index d2b7b166fc741a..8cffb7986e7ab6 100644
--- a/deps/npm/node_modules/which/package.json
+++ b/deps/npm/node_modules/which/package.json
@@ -6,7 +6,7 @@
},
"name": "which",
"description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
- "version": "1.0.9",
+ "version": "1.1.1",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-which.git"
@@ -16,20 +16,34 @@
"which": "./bin/which"
},
"license": "ISC",
- "gitHead": "df3d52a0ecd5f366d550e0f14d67ca4d5e621bad",
+ "dependencies": {
+ "is-absolute": "^0.1.7"
+ },
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.3.3",
+ "tap": "^1.0.2"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "gitHead": "c80a08e9f8cf7a5c0f39c2e2f87f18f153b118a8",
"bugs": {
"url": "https://github.com/isaacs/node-which/issues"
},
- "homepage": "https://github.com/isaacs/node-which",
- "_id": "which@1.0.9",
- "scripts": {},
- "_shasum": "460c1da0f810103d0321a9b633af9e575e64486f",
- "_from": "which@>=1.0.9 <1.1.0",
- "_npmVersion": "2.6.0",
- "_nodeVersion": "1.1.0",
+ "homepage": "https://github.com/isaacs/node-which#readme",
+ "_id": "which@1.1.1",
+ "_shasum": "9ce512459946166e12c083f08ec073380fc8cbbb",
+ "_from": "which@>=1.1.1 <1.2.0",
+ "_npmVersion": "2.9.1",
+ "_nodeVersion": "2.0.1",
"_npmUser": {
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "9ce512459946166e12c083f08ec073380fc8cbbb",
+ "tarball": "http://registry.npmjs.org/which/-/which-1.1.1.tgz"
},
"maintainers": [
{
@@ -37,11 +51,6 @@
"email": "i@izs.me"
}
],
- "dist": {
- "shasum": "460c1da0f810103d0321a9b633af9e575e64486f",
- "tarball": "http://registry.npmjs.org/which/-/which-1.0.9.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/which/-/which-1.1.1.tgz"
}
diff --git a/deps/npm/node_modules/which/test/basic.js b/deps/npm/node_modules/which/test/basic.js
new file mode 100644
index 00000000000000..189ca6d0ad5bb4
--- /dev/null
+++ b/deps/npm/node_modules/which/test/basic.js
@@ -0,0 +1,84 @@
+var t = require('tap')
+var fs = require('fs')
+var rimraf = require('rimraf')
+var mkdirp = require('mkdirp')
+var fixture = __dirname + '/fixture'
+var which = require('../which.js')
+var path = require('path')
+
+var isWindows = process.platform === 'win32' ||
+ process.env.OSTYPE === 'cygwin' ||
+ process.env.OSTYPE === 'msys'
+
+var skip = { skip: isWindows ? 'not relevant on windows' : false }
+
+t.test('setup', function (t) {
+ rimraf.sync(fixture)
+ mkdirp.sync(fixture)
+ fs.writeFileSync(fixture + '/foo.sh', 'echo foo\n')
+ t.end()
+})
+
+t.test('does not find non-executable', skip, function (t) {
+ t.plan(2)
+
+ t.test('absolute', function (t) {
+ t.plan(2)
+ which(fixture + '/foo.sh', function (er) {
+ t.isa(er, Error)
+ })
+
+ t.throws(function () {
+ which.sync(fixture + '/foo.sh')
+ })
+ })
+
+ t.test('with path', function (t) {
+ t.plan(2)
+ which('foo.sh', { path: fixture }, function (er) {
+ t.isa(er, Error)
+ })
+
+ t.throws(function () {
+ which.sync('foo.sh', { path: fixture })
+ })
+ })
+})
+
+t.test('make executable', function (t) {
+ fs.chmodSync(fixture + '/foo.sh', '0755')
+ t.end()
+})
+
+t.test('find when executable', function (t) {
+ t.plan(2)
+ var opt = { pathExt: '.sh' }
+ var expect = path.resolve(fixture, 'foo.sh').toLowerCase()
+
+ t.test('absolute', function (t) {
+ t.plan(2)
+ runTest(t)
+ })
+
+ function runTest(t) {
+ which(fixture + '/foo.sh', opt, function (er, found) {
+ if (er)
+ throw er
+ t.equal(found.toLowerCase(), expect)
+ })
+
+ var found = which.sync(fixture + '/foo.sh', opt).toLowerCase()
+ t.equal(found, expect)
+ }
+
+ t.test('with path', function (t) {
+ t.plan(2)
+ opt.path = fixture
+ runTest(t)
+ })
+})
+
+t.test('clean', function (t) {
+ rimraf.sync(fixture)
+ t.end()
+})
diff --git a/deps/npm/node_modules/which/which.js b/deps/npm/node_modules/which/which.js
index 2a45417daced20..13fc26dcfbb0ce 100644
--- a/deps/npm/node_modules/which/which.js
+++ b/deps/npm/node_modules/which/which.js
@@ -1,54 +1,83 @@
module.exports = which
which.sync = whichSync
-var path = require("path")
- , fs
- , COLON = process.platform === "win32" ? ";" : ":"
- , isExe
- , fs = require("fs")
+var isWindows = process.platform === 'win32' ||
+ process.env.OSTYPE === 'cygwin' ||
+ process.env.OSTYPE === 'msys'
-if (process.platform == "win32") {
+var path = require('path')
+var COLON = isWindows ? ';' : ':'
+var isExe
+var fs = require('fs')
+var isAbsolute = require('is-absolute')
+
+if (isWindows) {
// On windows, there is no good way to check that a file is executable
isExe = function isExe () { return true }
} else {
isExe = function isExe (mod, uid, gid) {
- //console.error(mod, uid, gid);
- //console.error("isExe?", (mod & 0111).toString(8))
var ret = (mod & 0001)
|| (mod & 0010) && process.getgid && gid === process.getgid()
- || (mod & 0010) && process.getuid && 0 === process.getuid()
|| (mod & 0100) && process.getuid && uid === process.getuid()
- || (mod & 0100) && process.getuid && 0 === process.getuid()
- //console.error("isExe?", ret)
+ || (mod & 0110) && process.getuid && 0 === process.getuid()
+
+ if (process.getgroups && (mod & 0010)) {
+ var groups = process.getgroups()
+ for (var g = 0; g < groups.length; g++) {
+ if (groups[g] === gid)
+ return true
+ }
+ }
+
return ret
}
}
+function which (cmd, opt, cb) {
+ if (typeof opt === 'function') {
+ cb = opt
+ opt = {}
+ }
+ var colon = opt.colon || COLON
+ var pathEnv = opt.path || process.env.PATH || ''
+ var pathExt = ['']
-function which (cmd, cb) {
- if (isAbsolute(cmd)) return cb(null, cmd)
- var pathEnv = (process.env.PATH || "").split(COLON)
- , pathExt = [""]
- if (process.platform === "win32") {
- pathEnv.push(process.cwd())
- pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
- if (cmd.indexOf(".") !== -1) pathExt.unshift("")
+ // On windows, env.Path is common.
+ if (isWindows && !pathEnv) {
+ var k = Object.keys(process.env)
+ for (var p = 0; p < k.length; p++) {
+ if (p.toLowerCase() === 'path') {
+ pathEnv = process.env[p]
+ break
+ }
+ }
}
- //console.error("pathEnv", pathEnv)
+
+ pathEnv = pathEnv.split(colon)
+
+ if (isWindows) {
+ pathEnv.unshift(process.cwd())
+ pathExt = (opt.pathExt || process.env.PATHEXT || '.EXE').split(colon)
+ if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
+ pathExt.unshift('')
+ }
+
+ // If it's absolute, then we don't bother searching the pathenv.
+ // just check the file itself, and that's it.
+ if (isAbsolute(cmd))
+ pathEnv = ['']
+
;(function F (i, l) {
- if (i === l) return cb(new Error("not found: "+cmd))
+ if (i === l) return cb(new Error('not found: '+cmd))
var p = path.resolve(pathEnv[i], cmd)
;(function E (ii, ll) {
if (ii === ll) return F(i + 1, l)
var ext = pathExt[ii]
- //console.error(p + ext)
fs.stat(p + ext, function (er, stat) {
if (!er &&
- stat &&
stat.isFile() &&
isExe(stat.mode, stat.uid, stat.gid)) {
- //console.error("yes, exe!", p + ext)
return cb(null, p + ext)
}
return E(ii + 1, ll)
@@ -57,45 +86,52 @@ function which (cmd, cb) {
})(0, pathEnv.length)
}
-function whichSync (cmd) {
- if (isAbsolute(cmd)) return cmd
- var pathEnv = (process.env.PATH || "").split(COLON)
- , pathExt = [""]
- if (process.platform === "win32") {
- pathEnv.push(process.cwd())
- pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
- if (cmd.indexOf(".") !== -1) pathExt.unshift("")
+function whichSync (cmd, opt) {
+ if (!opt)
+ opt = {}
+
+ var colon = opt.colon || COLON
+
+ var pathEnv = opt.path || process.env.PATH || ''
+ var pathExt = ['']
+
+ // On windows, env.Path is common.
+ if (isWindows && !pathEnv) {
+ var k = Object.keys(process.env)
+ for (var p = 0; p < k.length; p++) {
+ if (p.toLowerCase() === 'path') {
+ pathEnv = process.env[p]
+ break
+ }
+ }
+ }
+
+ pathEnv = pathEnv.split(colon)
+
+ if (isWindows) {
+ pathEnv.unshift(process.cwd())
+ pathExt = (opt.pathExt || process.env.PATHEXT || '.EXE').split(colon)
+ if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
+ pathExt.unshift('')
}
+
+ // If it's absolute, then we don't bother searching the pathenv.
+ // just check the file itself, and that's it.
+ if (isAbsolute(cmd))
+ pathEnv = ['']
+
for (var i = 0, l = pathEnv.length; i < l; i ++) {
var p = path.join(pathEnv[i], cmd)
for (var j = 0, ll = pathExt.length; j < ll; j ++) {
var cur = p + pathExt[j]
var stat
- try { stat = fs.statSync(cur) } catch (ex) {}
- if (stat &&
- stat.isFile() &&
- isExe(stat.mode, stat.uid, stat.gid)) return cur
+ try {
+ stat = fs.statSync(cur)
+ if (stat.isFile() && isExe(stat.mode, stat.uid, stat.gid))
+ return cur
+ } catch (ex) {}
}
}
- throw new Error("not found: "+cmd)
-}
-
-var isAbsolute = process.platform === "win32" ? absWin : absUnix
-
-function absWin (p) {
- if (absUnix(p)) return true
- // pull off the device/UNC bit from a windows path.
- // from node's lib/path.js
- var splitDeviceRe =
- /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?([\\\/])?/
- , result = splitDeviceRe.exec(p)
- , device = result[1] || ''
- , isUnc = device && device.charAt(1) !== ':'
- , isAbsolute = !!result[2] || isUnc // UNC paths are always absolute
-
- return isAbsolute
-}
-function absUnix (p) {
- return p.charAt(0) === "/" || p === ""
+ throw new Error('not found: '+cmd)
}
diff --git a/deps/npm/package.json b/deps/npm/package.json
index c3f053a7db72cf..2082ed8fbc7410 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "2.8.3",
+ "version": "2.10.1",
"name": "npm",
"description": "a package manager for JavaScript",
"keywords": [
@@ -19,7 +19,6 @@
"url": "https://github.com/npm/npm"
},
"bugs": {
- "email": "npm-@googlegroups.com",
"url": "http://github.com/npm/npm/issues"
},
"directories": {
@@ -46,35 +45,35 @@
"columnify": "~1.5.1",
"config-chain": "~1.1.8",
"dezalgo": "~1.0.1",
- "editor": "~0.1.0",
+ "editor": "~1.0.0",
"fs-vacuum": "~1.2.5",
"fs-write-stream-atomic": "~1.0.2",
- "fstream": "~1.0.4",
+ "fstream": "~1.0.6",
"fstream-npm": "~1.0.2",
"github-url-from-git": "~1.4.0",
"github-url-from-username-repo": "~1.0.2",
- "glob": "~5.0.5",
+ "glob": "~5.0.6",
"graceful-fs": "~3.0.6",
"hosted-git-info": "~2.1.2",
"inflight": "~1.0.4",
"inherits": "~2.0.1",
"ini": "~1.3.3",
- "init-package-json": "~1.4.0",
+ "init-package-json": "~1.5.0",
"lockfile": "~1.0.0",
- "lru-cache": "~2.5.2",
- "minimatch": "~2.0.4",
- "mkdirp": "~0.5.0",
+ "lru-cache": "~2.6.3",
+ "minimatch": "~2.0.7",
+ "mkdirp": "~0.5.1",
"node-gyp": "~1.0.3",
"nopt": "~3.0.1",
"normalize-git-url": "~1.0.0",
- "normalize-package-data": "~2.0.0",
+ "normalize-package-data": "~2.1.0",
"npm-cache-filename": "~1.0.1",
"npm-install-checks": "~1.0.5",
"npm-package-arg": "~4.0.0",
- "npm-registry-client": "~6.3.2",
+ "npm-registry-client": "~6.3.3",
"npm-user-validate": "~0.1.1",
"npmlog": "~1.2.0",
- "once": "~1.3.1",
+ "once": "~1.3.2",
"opener": "~1.4.1",
"osenv": "~0.1.0",
"path-is-inside": "~1.0.0",
@@ -85,17 +84,18 @@
"realize-package-specifier": "~3.0.0",
"request": "~2.55.0",
"retry": "~0.6.1",
- "rimraf": "~2.3.2",
- "semver": "~4.3.3",
+ "rimraf": "~2.3.3",
+ "semver": "~4.3.4",
"sha": "~1.3.0",
"slide": "~1.1.6",
"sorted-object": "~1.0.0",
+ "spdx": "~0.4.0",
"strip-ansi": "~2.0.1",
- "tar": "~2.0.1",
+ "tar": "~2.1.1",
"text-table": "~0.2.0",
"uid-number": "0.0.6",
"umask": "~1.1.0",
- "which": "~1.0.9",
+ "which": "~1.1.1",
"wrappy": "~1.0.1",
"write-file-atomic": "~1.1.0"
},
@@ -159,6 +159,7 @@
"sha",
"slide",
"sorted-object",
+ "spdx",
"strip-ansi",
"tar",
"text-table",
@@ -169,14 +170,15 @@
"write-file-atomic"
],
"devDependencies": {
+ "deep-equal": "~1.0.0",
"marked": "~0.3.3",
"marked-man": "~0.1.4",
- "nock": "~1.6.0",
+ "nock": "~2.0.1",
"npm-registry-couchapp": "~2.6.7",
"npm-registry-mock": "~1.0.0",
"require-inject": "~1.2.0",
"sprintf-js": "~1.0.2",
- "tap": "~0.7.1"
+ "tap": "~1.0.4"
},
"scripts": {
"test-legacy": "node ./test/run.js",
diff --git a/deps/npm/test/fixtures/config/userconfig-with-gc b/deps/npm/test/fixtures/config/userconfig-with-gc
index 7268fcb3c611f9..824d492bce2730 100644
--- a/deps/npm/test/fixtures/config/userconfig-with-gc
+++ b/deps/npm/test/fixtures/config/userconfig-with-gc
@@ -1,4 +1,4 @@
-globalconfig=/Users/ogd/Documents/projects/npm/npm/test/fixtures/config/globalconfig
+globalconfig=/Users/rebecca/code/npm-release/test/fixtures/config/globalconfig
email=i@izs.me
env-thing=asdf
init.author.name=Isaac Z. Schlueter
diff --git a/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-bar.git.tar.gz b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-bar.git.tar.gz
new file mode 100644
index 00000000000000..fb27e17f4812fa
Binary files /dev/null and b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-bar.git.tar.gz differ
diff --git a/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-buzz.git.tar.gz b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-buzz.git.tar.gz
new file mode 100644
index 00000000000000..0ea851fbec946a
Binary files /dev/null and b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-buzz.git.tar.gz differ
diff --git a/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-foo.git.tar.gz b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-foo.git.tar.gz
new file mode 100644
index 00000000000000..8e1abc6d05e704
Binary files /dev/null and b/deps/npm/test/fixtures/github-com-BryanDonovan-dummy-npm-foo.git.tar.gz differ
diff --git a/deps/npm/test/fixtures/github-com-BryanDonovan-npm-git-test.git.tar.gz b/deps/npm/test/fixtures/github-com-BryanDonovan-npm-git-test.git.tar.gz
new file mode 100644
index 00000000000000..7a4b9e81317312
Binary files /dev/null and b/deps/npm/test/fixtures/github-com-BryanDonovan-npm-git-test.git.tar.gz differ
diff --git a/deps/npm/test/tap/404-private-registry-scoped.js b/deps/npm/test/tap/404-private-registry-scoped.js
new file mode 100644
index 00000000000000..681fff05e8ec5f
--- /dev/null
+++ b/deps/npm/test/tap/404-private-registry-scoped.js
@@ -0,0 +1,22 @@
+var nock = require('nock')
+var test = require('tap').test
+var npm = require('../../')
+var addNamed = require('../../lib/cache/add-named')
+
+test('scoped package names not mangled on error with non-root registry', function test404 (t) {
+ nock('http://localhost:1337')
+ .get('/registry/@scope%2ffoo')
+ .reply(404, {
+ error: 'not_found',
+ reason: 'document not found'
+ })
+
+ npm.load({registry: 'http://localhost:1337/registry', global: true}, function () {
+ addNamed('@scope/foo', '*', null, function checkError (err) {
+ t.ok(err, 'should error')
+ t.equal(err.message, '404 Not Found: @scope/foo', 'should have package name in error')
+ t.equal(err.pkgid, '@scope/foo', 'err.pkgid should match package name')
+ t.end()
+ })
+ })
+})
diff --git a/deps/npm/test/tap/404-private-registry.js b/deps/npm/test/tap/404-private-registry.js
new file mode 100644
index 00000000000000..9e05f483ebe865
--- /dev/null
+++ b/deps/npm/test/tap/404-private-registry.js
@@ -0,0 +1,22 @@
+var nock = require('nock')
+var test = require('tap').test
+var npm = require('../../')
+var addNamed = require('../../lib/cache/add-named')
+
+test('package names not mangled on error with non-root registry', function test404 (t) {
+ nock('http://localhost:1337')
+ .get('/registry/foo')
+ .reply(404, {
+ error: 'not_found',
+ reason: 'document not found'
+ })
+
+ npm.load({registry: 'http://localhost:1337/registry', global: true}, function () {
+ addNamed('foo', '*', null, function checkError (err) {
+ t.ok(err, 'should error')
+ t.equal(err.message, '404 Not Found: foo', 'should have package name in error')
+ t.equal(err.pkgid, 'foo', 'err.pkgid should match package name')
+ t.end()
+ })
+ })
+})
diff --git a/deps/npm/test/tap/access.js b/deps/npm/test/tap/access.js
index aa97b1a203f833..5ac1d613841301 100644
--- a/deps/npm/test/tap/access.js
+++ b/deps/npm/test/tap/access.js
@@ -83,6 +83,102 @@ test("npm access on named package", function (t) {
)
})
+test("npm change access on unscoped package", function (t) {
+ common.npm(
+ [
+ "access",
+ "restricted", "yargs",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/you can't change the access level of unscoped packages/))
+ t.end()
+ }
+ )
+})
+
+test('npm access add', function (t) {
+ common.npm(
+ [
+ "access",
+ "add", "@scoped/another",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/npm access add isn't implemented yet!/))
+ t.end()
+ }
+ )
+})
+
+test('npm access rm', function (t) {
+ common.npm(
+ [
+ "access",
+ "rm", "@scoped/another",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/npm access rm isn't implemented yet!/))
+ t.end()
+ }
+ )
+})
+
+test('npm access ls', function (t) {
+ common.npm(
+ [
+ "access",
+ "ls", "@scoped/another",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/npm access ls isn't implemented yet!/))
+ t.end()
+ }
+ )
+})
+
+test('npm access edit', function (t) {
+ common.npm(
+ [
+ "access",
+ "edit", "@scoped/another",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/npm access edit isn't implemented yet!/))
+ t.end()
+ }
+ )
+})
+
+test('npm access blerg', function (t) {
+ common.npm(
+ [
+ "access",
+ "blerg", "@scoped/another",
+ "--registry", common.registry
+ ],
+ { cwd : pkg },
+ function (er, code, stdout, stderr) {
+ t.ok(code, 'exited with Error')
+ t.ok(stderr.match(/Usage:/))
+ t.end()
+ }
+ )
+})
+
test("cleanup", function (t) {
t.pass("cleaned up")
rimraf.sync(pkg)
diff --git a/deps/npm/test/tap/add-remote-git-fake-windows.js b/deps/npm/test/tap/add-remote-git-fake-windows.js
index 0e0d539fb0599c..c9c9dd446b9e1a 100644
--- a/deps/npm/test/tap/add-remote-git-fake-windows.js
+++ b/deps/npm/test/tap/add-remote-git-fake-windows.js
@@ -16,6 +16,20 @@ var daemon
var daemonPID
var git
+var pjParent = JSON.stringify({
+ name: 'parent',
+ version: '1.2.3',
+ dependencies: {
+ child: 'git://localhost:1233/child.git'
+ }
+}, null, 2) + '\n'
+
+var pjChild = JSON.stringify({
+ name: 'child',
+ version: '1.0.3'
+}, null, 2) + '\n'
+
+
test('setup', function (t) {
bootstrap()
setup(function (er, r) {
@@ -53,19 +67,6 @@ test('clean', function (t) {
process.kill(daemonPID)
})
-var pjParent = JSON.stringify({
- name: 'parent',
- version: '1.2.3',
- dependencies: {
- child: 'git://localhost:1233/child.git'
- }
-}, null, 2) + '\n'
-
-var pjChild = JSON.stringify({
- name: 'child',
- version: '1.0.3'
-}, null, 2) + '\n'
-
function bootstrap () {
rimraf.sync(pkg)
mkdirp.sync(pkg)
diff --git a/deps/npm/test/tap/add-remote-git-file.js b/deps/npm/test/tap/add-remote-git-file.js
index b2a7be9039b355..673be4c2854ef4 100644
--- a/deps/npm/test/tap/add-remote-git-file.js
+++ b/deps/npm/test/tap/add-remote-git-file.js
@@ -16,6 +16,11 @@ var repo = resolve(__dirname, 'add-remote-git-file-repo')
var git
var cloneURL = 'git+file://' + resolve(pkg, 'child.git')
+var pjChild = JSON.stringify({
+ name: 'child',
+ version: '1.0.3'
+}, null, 2) + '\n'
+
test('setup', function (t) {
bootstrap()
setup(function (er, r) {
@@ -45,11 +50,6 @@ test('clean', function (t) {
t.end()
})
-var pjChild = JSON.stringify({
- name: 'child',
- version: '1.0.3'
-}, null, 2) + '\n'
-
function bootstrap () {
cleanup()
mkdirp.sync(pkg)
diff --git a/deps/npm/test/tap/add-remote-git-shrinkwrap.js b/deps/npm/test/tap/add-remote-git-shrinkwrap.js
index 94951e9a9d8639..f2982355e14b12 100644
--- a/deps/npm/test/tap/add-remote-git-shrinkwrap.js
+++ b/deps/npm/test/tap/add-remote-git-shrinkwrap.js
@@ -16,6 +16,19 @@ var daemon
var daemonPID
var git
+var pjParent = JSON.stringify({
+ name: 'parent',
+ version: '1.2.3',
+ dependencies: {
+ 'child': 'git://localhost:1235/child.git#master'
+ }
+}, null, 2) + '\n'
+
+var pjChild = JSON.stringify({
+ name: 'child',
+ version: '1.0.3'
+}, null, 2) + '\n'
+
test('setup', function (t) {
bootstrap()
setup(function (er, r) {
@@ -88,19 +101,6 @@ test('clean', function (t) {
process.kill(daemonPID)
})
-var pjParent = JSON.stringify({
- name: 'parent',
- version: '1.2.3',
- dependencies: {
- 'child': 'git://localhost:1235/child.git#master'
- }
-}, null, 2) + '\n'
-
-var pjChild = JSON.stringify({
- name: 'child',
- version: '1.0.3'
-}, null, 2) + '\n'
-
function bootstrap () {
mkdirp.sync(pkg)
fs.writeFileSync(resolve(pkg, 'package.json'), pjParent)
diff --git a/deps/npm/test/tap/add-remote-git.js b/deps/npm/test/tap/add-remote-git.js
index 269d0cac7d8e2c..e33d09c1a13473 100644
--- a/deps/npm/test/tap/add-remote-git.js
+++ b/deps/npm/test/tap/add-remote-git.js
@@ -16,6 +16,19 @@ var daemon
var daemonPID
var git
+var pjParent = JSON.stringify({
+ name: 'parent',
+ version: '1.2.3',
+ dependencies: {
+ child: 'git://localhost:1234/child.git'
+ }
+}, null, 2) + '\n'
+
+var pjChild = JSON.stringify({
+ name: 'child',
+ version: '1.0.3'
+}, null, 2) + '\n'
+
test('setup', function (t) {
bootstrap()
setup(function (er, r) {
@@ -47,19 +60,6 @@ test('clean', function (t) {
process.kill(daemonPID)
})
-var pjParent = JSON.stringify({
- name: 'parent',
- version: '1.2.3',
- dependencies: {
- child: 'git://localhost:1234/child.git'
- }
-}, null, 2) + '\n'
-
-var pjChild = JSON.stringify({
- name: 'child',
- version: '1.0.3'
-}, null, 2) + '\n'
-
function bootstrap () {
mkdirp.sync(pkg)
fs.writeFileSync(resolve(pkg, 'package.json'), pjParent)
diff --git a/deps/npm/test/tap/bin.js b/deps/npm/test/tap/bin.js
new file mode 100644
index 00000000000000..ee4e1ff28c4827
--- /dev/null
+++ b/deps/npm/test/tap/bin.js
@@ -0,0 +1,17 @@
+var path = require("path")
+var test = require("tap").test
+var common = require("../common-tap.js")
+var opts = { cwd: __dirname }
+var binDir = "../../node_modules/.bin"
+var fixture = path.resolve(__dirname, binDir)
+
+test('npm bin', function (t) {
+ common.npm(["bin"], opts, function (err, code, stdout, stderr) {
+ t.ifError(err, "bin ran without issue")
+ t.notOk(stderr, "should have no stderr")
+ t.equal(code, 0, "exit ok")
+ var res = path.resolve(stdout)
+ t.equal(res, fixture + "\n")
+ t.end()
+ })
+})
diff --git a/deps/npm/test/tap/builtin-config.js b/deps/npm/test/tap/builtin-config.js
index 75acd2be276216..d92551ed6ea227 100644
--- a/deps/npm/test/tap/builtin-config.js
+++ b/deps/npm/test/tap/builtin-config.js
@@ -32,6 +32,7 @@ test("setup", function (t) {
test("install npm into first folder", function (t) {
var args = ["install", npm, "-g",
"--prefix=" + folder + "/first",
+ "--ignore-scripts",
"--cache=" + folder + "/cache",
"--no-spin",
"--loglevel=silent",
diff --git a/deps/npm/test/tap/bundled-dependencies-nonarray.js b/deps/npm/test/tap/bundled-dependencies-nonarray.js
index 938aa629a5b287..bff0522d03122e 100644
--- a/deps/npm/test/tap/bundled-dependencies-nonarray.js
+++ b/deps/npm/test/tap/bundled-dependencies-nonarray.js
@@ -13,6 +13,22 @@ var dir = path.resolve(__dirname, 'bundleddependencies')
var pkg = path.resolve(dir, 'pkg-with-bundled')
var dep = path.resolve(dir, 'a-bundled-dep')
+var pj = JSON.stringify({
+ name: 'pkg-with-bundled',
+ version: '1.0.0',
+ dependencies: {
+ 'a-bundled-dep': 'file:../a-bundled-dep'
+ },
+ bundledDependencies: {
+ 'a-bundled-dep': 'file:../a-bundled-dep'
+ }
+}, null, 2) + '\n'
+
+var pjDep = JSON.stringify({
+ name: 'a-bundled-dep',
+ version: '2.0.0'
+}, null, 2) + '\n'
+
test('setup', function (t) {
bootstrap()
t.end()
@@ -45,22 +61,6 @@ test('cleanup', function (t) {
t.end()
})
-var pj = JSON.stringify({
- name: 'pkg-with-bundled',
- version: '1.0.0',
- dependencies: {
- 'a-bundled-dep': 'file:../a-bundled-dep'
- },
- bundledDependencies: {
- 'a-bundled-dep': 'file:../a-bundled-dep'
- }
-}, null, 2) + '\n'
-
-var pjDep = JSON.stringify({
- name: 'a-bundled-dep',
- version: '2.0.0'
-}, null, 2) + '\n'
-
function bootstrap () {
mkdirp.sync(dir)
diff --git a/deps/npm/test/tap/cache-add-unpublished.js b/deps/npm/test/tap/cache-add-unpublished.js
index 46f0db232eb262..fe26929fce23ff 100644
--- a/deps/npm/test/tap/cache-add-unpublished.js
+++ b/deps/npm/test/tap/cache-add-unpublished.js
@@ -1,12 +1,34 @@
var common = require("../common-tap.js")
var test = require("tap").test
+var mr = require("npm-registry-mock")
test("cache add", function (t) {
- common.npm(["cache", "add", "superfoo"], {}, function (er, c, so, se) {
- if (er) throw er
- t.ok(c, "got non-zero exit code")
- t.equal(so, "", "nothing printed to stdout")
- t.similar(se, /404 Not Found: superfoo/, "got expected error")
- t.end()
+ setup(function (er, s) {
+ if (er) {
+ throw er
+ }
+ common.npm([
+ "cache",
+ "add",
+ "superfoo",
+ "--registry=http://localhost:1337/"
+ ], {}, function (er, c, so, se) {
+ if (er) throw er
+ t.ok(c, "got non-zero exit code")
+ t.equal(so, "", "nothing printed to stdout")
+ t.similar(se, /404 Not Found: superfoo/, "got expected error")
+ s.close()
+ t.end()
+ })
})
})
+
+function setup (cb) {
+ var s = require("http").createServer(function (req, res) {
+ res.statusCode = 404
+ res.end("{\"error\":\"not_found\"}\n")
+ })
+ s.listen(1337, function () {
+ cb(null, s)
+ })
+}
diff --git a/deps/npm/test/tap/config-meta.js b/deps/npm/test/tap/config-meta.js
index 5e0c1b7e7c5300..3da27a872b3183 100644
--- a/deps/npm/test/tap/config-meta.js
+++ b/deps/npm/test/tap/config-meta.js
@@ -37,7 +37,7 @@ test("get files", function (t) {
})
files.forEach(function (f) {
try {
- var s = fs.statSync(f)
+ var s = fs.lstatSync(f)
} catch (er) {
return
}
diff --git a/deps/npm/test/tap/dedupe-scoped.js b/deps/npm/test/tap/dedupe-scoped.js
index dacf405fcfef23..a093e1f8c064bf 100644
--- a/deps/npm/test/tap/dedupe-scoped.js
+++ b/deps/npm/test/tap/dedupe-scoped.js
@@ -11,40 +11,6 @@ var modules = join(pkg, 'node_modules')
var EXEC_OPTS = { cwd: pkg }
-test('setup', function (t) {
- setup()
- t.end()
-})
-
-// we like the cars
-function ltrimm (l) { return l.trim() }
-
-test('dedupe finds the common scoped modules and moves it up one level', function (t) {
- common.npm(
- [
- 'find-dupes' // I actually found a use for this command!
- ],
- EXEC_OPTS,
- function (err, code, stdout, stderr) {
- t.ifError(err, 'successful dry run against fake install')
- t.notOk(code, 'npm ran without issue')
- t.notOk(stderr, 'npm printed no errors')
- t.same(
- stdout.trim().split('\n').map(ltrimm),
- [prolog].concat(body).map(ltrimm),
- 'got expected output'
- )
-
- t.end()
- }
- )
-})
-
-test('cleanup', function (t) {
- cleanup()
- t.end()
-})
-
var prolog = 'dedupe@0.0.0 ' + pkg
var body = function () {/*
├─┬ first@1.0.0
@@ -95,6 +61,41 @@ var secondUnique = {
'version': '1.2.0'
}
+
+test('setup', function (t) {
+ setup()
+ t.end()
+})
+
+// we like the cars
+function ltrimm (l) { return l.trim() }
+
+test('dedupe finds the common scoped modules and moves it up one level', function (t) {
+ common.npm(
+ [
+ 'find-dupes' // I actually found a use for this command!
+ ],
+ EXEC_OPTS,
+ function (err, code, stdout, stderr) {
+ t.ifError(err, 'successful dry run against fake install')
+ t.notOk(code, 'npm ran without issue')
+ t.notOk(stderr, 'npm printed no errors')
+ t.same(
+ stdout.trim().split('\n').map(ltrimm),
+ [prolog].concat(body).map(ltrimm),
+ 'got expected output'
+ )
+
+ t.end()
+ }
+ )
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
function setup (cb) {
cleanup()
diff --git a/deps/npm/test/tap/gently-rm-overeager.js b/deps/npm/test/tap/gently-rm-overeager.js
index 7284d3e3685fe0..35f46cc2195280 100644
--- a/deps/npm/test/tap/gently-rm-overeager.js
+++ b/deps/npm/test/tap/gently-rm-overeager.js
@@ -13,6 +13,14 @@ var EXEC_OPTS = {
cwd : pkg
}
+var fixture = {
+ name: "@test/whoops",
+ version: "1.0.0",
+ scripts: {
+ postinstall: "echo \"nope\" && exit 1"
+ }
+}
+
test("setup", function (t) {
cleanup()
setup()
@@ -40,14 +48,6 @@ test("cleanup", function (t) {
})
-var fixture = {
- name: "@test/whoops",
- version: "1.0.0",
- scripts: {
- postinstall: "echo \"nope\" && exit 1"
- }
-}
-
function cleanup () {
rimraf.sync(pkg)
rimraf.sync(dep)
diff --git a/deps/npm/test/tap/gently-rm-symlink.js b/deps/npm/test/tap/gently-rm-symlink.js
index ff1524b04ac3d5..d69b62e5b271bd 100644
--- a/deps/npm/test/tap/gently-rm-symlink.js
+++ b/deps/npm/test/tap/gently-rm-symlink.js
@@ -15,6 +15,17 @@ var EXEC_OPTS = {
cwd : pkg
}
+
+var index = "module.exports = function () { console.log('whoop whoop') }"
+
+var fixture = {
+ name: "@test/linked",
+ version: "1.0.0",
+ bin: {
+ linked: "./index.js"
+ }
+}
+
test("setup", function (t) {
cleanup()
setup()
@@ -72,17 +83,6 @@ test("cleanup", function (t) {
t.end()
})
-
-var index = "module.exports = function () { console.log('whoop whoop') }"
-
-var fixture = {
- name: "@test/linked",
- version: "1.0.0",
- bin: {
- linked: "./index.js"
- }
-}
-
function verify (t, stdout) {
var binPath = resolve(lnk, "bin", "linked")
var pkgPath = resolve(lnk, "lib", "node_modules", "@test", "linked")
diff --git a/deps/npm/test/tap/git-dependency-install-link.js b/deps/npm/test/tap/git-dependency-install-link.js
index 92938b3426fdd9..cbb256d983d0f0 100644
--- a/deps/npm/test/tap/git-dependency-install-link.js
+++ b/deps/npm/test/tap/git-dependency-install-link.js
@@ -26,6 +26,20 @@ var EXEC_OPTS = {
cache: cache
}
+var pjParent = JSON.stringify({
+ name: 'parent',
+ version: '1.2.3',
+ dependencies: {
+ 'child': 'git://localhost:1234/child.git'
+ }
+}, null, 2) + '\n'
+
+var pjChild = JSON.stringify({
+ name: 'child',
+ version: '1.0.3'
+}, null, 2) + '\n'
+
+
test('setup', function (t) {
bootstrap()
setup(function (er, r) {
@@ -93,19 +107,6 @@ test('clean', function (t) {
process.kill(daemonPID)
})
-var pjParent = JSON.stringify({
- name: 'parent',
- version: '1.2.3',
- dependencies: {
- 'child': 'git://localhost:1234/child.git'
- }
-}, null, 2) + '\n'
-
-var pjChild = JSON.stringify({
- name: 'child',
- version: '1.0.3'
-}, null, 2) + '\n'
-
function bootstrap () {
rimraf.sync(repo)
rimraf.sync(pkg)
diff --git a/deps/npm/test/tap/git-npmignore.js b/deps/npm/test/tap/git-npmignore.js
index 6a703f0cf6f8f1..5e915a706faea7 100644
--- a/deps/npm/test/tap/git-npmignore.js
+++ b/deps/npm/test/tap/git-npmignore.js
@@ -27,6 +27,19 @@ var EXEC_OPTS = {
cwd : pkg
}
+var gitignore = "node_modules/\n"
+var npmignore = "t.js\n"
+
+var a = "console.log('hi');"
+var t = "require('tap').test(function (t) { t.pass('I am a test!'); t.end(); });"
+var fixture = {
+ "name" : "gitch",
+ "version" : "1.0.0",
+ "private" : true,
+ "main" : "a.js"
+}
+
+
test("setup", function (t) {
setup(function (er) {
t.ifError(er, "setup ran OK")
@@ -85,18 +98,6 @@ function packInstallTest (spec, t) {
)
}
-var gitignore = "node_modules/\n"
-var npmignore = "t.js\n"
-
-var a = "console.log('hi');"
-var t = "require('tap').test(function (t) { t.pass('I am a test!'); t.end(); });"
-var fixture = {
- "name" : "gitch",
- "version" : "1.0.0",
- "private" : true,
- "main" : "a.js"
-}
-
function cleanup () {
process.chdir(tmpdir())
rimraf.sync(pkg)
diff --git a/deps/npm/test/tap/git-races.js b/deps/npm/test/tap/git-races.js
new file mode 100644
index 00000000000000..6bbfe78bd79ca9
--- /dev/null
+++ b/deps/npm/test/tap/git-races.js
@@ -0,0 +1,212 @@
+var execFile = require('child_process').execFile
+var path = require('path')
+var zlib = require('zlib')
+
+var asyncMap = require('slide').asyncMap
+var deepEqual = require('deep-equal')
+var fs = require('graceful-fs')
+var mkdirp = require('mkdirp')
+var once = require('once')
+var requireInject = require('require-inject')
+var rimraf = require('rimraf')
+var tar = require('tar')
+var test = require('tap').test
+var tmpdir = require('osenv').tmpdir
+var which = require('which')
+
+var wd = path.resolve(tmpdir(), 'git-races')
+var fixtures = path.resolve(__dirname, '../fixtures')
+var testcase = 'github-com-BryanDonovan-npm-git-test'
+var testcase_git = path.resolve(wd, testcase + '.git')
+var testcase_path = path.resolve(wd, testcase)
+var testcase_tgz = path.resolve(fixtures, testcase + '.git.tar.gz')
+
+var testtarballs = []
+var testrepos = {}
+var testurls = {}
+
+/*
+This test is specifically for #7202, where the bug was if you tried installing multiple git urls that
+pointed at the same repo but had different comittishes, you'd sometimes get the wrong version.
+The test cases, provided by @BryanDonovan, have a dependency tree like this:
+
+ top
+ bar#4.0.0
+ buzz#3.0.0
+ foo#3.0.0
+ buzz#3.0.0
+ foo#4.0.0
+ buzz#2.0.0
+
+But what would happen is that buzz#2.0.0 would end up installed under bar#4.0.0.
+
+bar#4.0.0 shouldn't have gotten its own copy if buzz, and if it did, it shouldn've been buzz#3.0.0
+*/
+
+;['bar', 'foo', 'buzz'].forEach(function (name) {
+ var mockurl = 'ssh://git@github.com/BryanDonovan/dummy-npm-' + name + '.git'
+ var realrepo = path.resolve(wd, 'github-com-BryanDonovan-dummy-npm-' + name + '.git')
+ var tgz = path.resolve(fixtures, 'github-com-BryanDonovan-dummy-npm-' + name + '.git.tar.gz')
+
+ testrepos[mockurl] = realrepo
+ testtarballs.push(tgz)
+})
+
+function cleanup () {
+ process.chdir(tmpdir())
+ rimraf.sync(wd)
+}
+
+var npm = requireInject.installGlobally('../../lib/npm.js', {
+ 'child_process': {
+ 'execFile': function (cmd, args, options, cb) {
+ // If it's a clone we swap any requests for any of the urls we're mocking
+ // with the path to the bare repo
+ if (args[0] === 'clone') {
+ var m2 = args.length - 2
+ var m1 = args.length - 1
+ if (testrepos[args[m2]]) {
+ testurls[args[m1]] = args[m2]
+ args[m2] = testrepos[args[m2]]
+ }
+ execFile(cmd, args, options, cb)
+ // here, we intercept npm validating the remote origin url on one of the
+ // clones we've done previously and return the original url that was requested
+ } else if (args[0] === 'config' && args[1] === '--get' && args[2] === 'remote.origin.url') {
+ process.nextTick(function () {
+ cb(null, testurls[options.cwd], '')
+ })
+ } else {
+ execFile(cmd, args, options, cb)
+ }
+ }
+ }
+})
+
+function extract (tarball, target, cb) {
+ cb = once(cb)
+ fs.createReadStream(tarball).on('error', function (er) { cb(er) })
+ .pipe(zlib.createGunzip()).on('error', function (er) { cb(er) })
+ .pipe(tar.Extract({path: target})).on('error', function (er) { cb(er) })
+ .on('end', function () {
+ cb()
+ })
+}
+
+// Copied from lib/utils/git, because we need to use
+// it before calling npm.load and lib/utils/git uses npm.js
+// which doesn't allow that. =( =(
+
+function prefixGitArgs () {
+ return process.platform === 'win32' ? ['-c', 'core.longpaths=true'] : []
+}
+
+var gitcmd
+
+function execGit (args, options, cb) {
+ var fullArgs = prefixGitArgs().concat(args || [])
+ return execFile(gitcmd, fullArgs, options, cb)
+}
+
+function gitWhichAndExec (args, options, cb) {
+ if (gitcmd) return execGit(args, options, cb)
+
+ which('git', function (err, pathtogit) {
+ if (err) {
+ err.code = 'ENOGIT'
+ return cb(err)
+ }
+ gitcmd = pathtogit
+
+ execGit(args, options, cb)
+ })
+}
+
+function andClone (gitdir, repodir, cb) {
+ return function (er) {
+ if (er) return cb(er)
+ gitWhichAndExec(['clone', gitdir, repodir], {}, cb)
+ }
+}
+
+function setup (cb) {
+ cleanup()
+ mkdirp.sync(wd)
+
+ extract(testcase_tgz, wd, andClone(testcase_git, testcase_path, andExtractPackages))
+
+ function andExtractPackages (er) {
+ if (er) return cb(er)
+ asyncMap(testtarballs, function (tgz, done) {
+ extract(tgz, wd, done)
+ }, andChdir)
+ }
+ function andChdir (er) {
+ if (er) return cb(er)
+ process.chdir(testcase_path)
+ andLoadNpm()
+ }
+ function andLoadNpm () {
+ var opts = {
+ cache: path.resolve(wd, 'cache')
+ }
+ npm.load(opts, cb)
+ }
+}
+
+// there are two (sic) valid trees that can result we don't care which one we
+// get in npm@2
+var oneTree = [
+ 'npm-git-test@1.0.0', [
+ ['dummy-npm-bar@4.0.0', [
+ ['dummy-npm-foo@3.0.0', []]
+ ]],
+ ['dummy-npm-buzz@3.0.0', []],
+ ['dummy-npm-foo@4.0.0', [
+ ['dummy-npm-buzz@2.0.0', []]
+ ]]
+ ]
+]
+var otherTree = [
+ 'npm-git-test@1.0.0', [
+ ['dummy-npm-bar@4.0.0', [
+ ['dummy-npm-buzz@3.0.0', []],
+ ['dummy-npm-foo@3.0.0', []]
+ ]],
+ ['dummy-npm-buzz@3.0.0', []],
+ ['dummy-npm-foo@4.0.0', [
+ ['dummy-npm-buzz@2.0.0', []]
+ ]]
+ ]
+]
+
+function toSimple (tree) {
+ var deps = []
+ Object.keys(tree.dependencies || {}).forEach(function (dep) {
+ deps.push(toSimple(tree.dependencies[dep]))
+ })
+ return [ tree['name'] + '@' + tree['version'], deps ]
+}
+
+test('setup', function (t) {
+ setup(function (er) {
+ t.ifError(er, 'setup ran OK')
+ t.end()
+ })
+})
+
+test('correct versions are installed for git dependency', function (t) {
+ t.plan(3)
+ t.comment('test for https://github.com/npm/npm/issues/7202')
+ npm.commands.install([], function (er) {
+ t.ifError(er, 'installed OK')
+ npm.commands.ls([], true, function (er, result) {
+ t.ifError(er, 'ls OK')
+ var simplified = toSimple(result)
+ t.ok(
+ deepEqual(simplified, oneTree) || deepEqual(simplified, otherTree),
+ 'install tree is correct'
+ )
+ })
+ })
+})
diff --git a/deps/npm/test/tap/graceful-restart.js b/deps/npm/test/tap/graceful-restart.js
index bd1f3114418597..53264748056507 100644
--- a/deps/npm/test/tap/graceful-restart.js
+++ b/deps/npm/test/tap/graceful-restart.js
@@ -10,36 +10,6 @@ var common = require('../common-tap.js')
var pkg = resolve(__dirname, 'graceful-restart')
-test('setup', function (t) {
- bootstrap()
- t.end()
-})
-
-test('graceless restart', function (t) {
- fs.writeFileSync(resolve(pkg, 'package.json'), pjGraceless)
- createChild(['run-script', 'restart'], function (err, code, out) {
- t.ifError(err, 'restart finished successfully')
- t.equal(code, 0, 'npm run-script exited with code')
- t.equal(out, outGraceless, 'expected all scripts to run')
- t.end()
- })
-})
-
-test('graceful restart', function (t) {
- fs.writeFileSync(resolve(pkg, 'package.json'), pjGraceful)
- createChild(['run-script', 'restart'], function (err, code, out) {
- t.ifError(err, 'restart finished successfully')
- t.equal(code, 0, 'npm run-script exited with code')
- t.equal(out, outGraceful, 'expected only *restart scripts to run')
- t.end()
- })
-})
-
-test('clean', function (t) {
- cleanup()
- t.end()
-})
-
var outGraceless = [
'prerestart',
'prestop',
@@ -90,6 +60,36 @@ var pjGraceful = JSON.stringify({
}
}, null, 2) + '\n'
+test('setup', function (t) {
+ bootstrap()
+ t.end()
+})
+
+test('graceless restart', function (t) {
+ fs.writeFileSync(resolve(pkg, 'package.json'), pjGraceless)
+ createChild(['run-script', 'restart'], function (err, code, out) {
+ t.ifError(err, 'restart finished successfully')
+ t.equal(code, 0, 'npm run-script exited with code')
+ t.equal(out, outGraceless, 'expected all scripts to run')
+ t.end()
+ })
+})
+
+test('graceful restart', function (t) {
+ fs.writeFileSync(resolve(pkg, 'package.json'), pjGraceful)
+ createChild(['run-script', 'restart'], function (err, code, out) {
+ t.ifError(err, 'restart finished successfully')
+ t.equal(code, 0, 'npm run-script exited with code')
+ t.equal(out, outGraceful, 'expected only *restart scripts to run')
+ t.end()
+ })
+})
+
+test('clean', function (t) {
+ cleanup()
+ t.end()
+})
+
function bootstrap () {
mkdirp.sync(pkg)
}
diff --git a/deps/npm/test/tap/install-bad-man.js b/deps/npm/test/tap/install-bad-man.js
index 531509e9974dc5..9ec8a84734b755 100644
--- a/deps/npm/test/tap/install-bad-man.js
+++ b/deps/npm/test/tap/install-bad-man.js
@@ -15,6 +15,13 @@ var EXEC_OPTS = {
cwd: target
}
+var json = {
+ name : "install-bad-man",
+ version : "1.2.3",
+ man : [ "./install-bad-man.1.lol" ]
+}
+
+
test("setup", function (t) {
setup()
t.pass("setup ran")
@@ -54,12 +61,6 @@ test("clean", function (t) {
t.end()
})
-var json = {
- name : "install-bad-man",
- version : "1.2.3",
- man : [ "./install-bad-man.1.lol" ]
-}
-
function setup () {
cleanup()
mkdirp.sync(pkg)
diff --git a/deps/npm/test/tap/install-man.js b/deps/npm/test/tap/install-man.js
index 70879d81f12bf9..ebba5d87b01100 100644
--- a/deps/npm/test/tap/install-man.js
+++ b/deps/npm/test/tap/install-man.js
@@ -15,6 +15,12 @@ var EXEC_OPTS = {
cwd: target
}
+var json = {
+ name : "install-man",
+ version : "1.2.3",
+ man : [ "./install-man.1" ]
+}
+
test("setup", function (t) {
setup()
t.pass("setup ran")
@@ -50,12 +56,6 @@ test("clean", function (t) {
t.end()
})
-var json = {
- name : "install-man",
- version : "1.2.3",
- man : [ "./install-man.1" ]
-}
-
function setup () {
cleanup()
mkdirp.sync(pkg)
diff --git a/deps/npm/test/tap/install-noargs-dev.js b/deps/npm/test/tap/install-noargs-dev.js
new file mode 100644
index 00000000000000..f16a7498f597d3
--- /dev/null
+++ b/deps/npm/test/tap/install-noargs-dev.js
@@ -0,0 +1,109 @@
+var fs = require('fs')
+var path = require('path')
+
+var mkdirp = require('mkdirp')
+var mr = require('npm-registry-mock')
+var osenv = require('osenv')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var common = require('../common-tap.js')
+var server
+
+var pkg = path.join(__dirname, 'install-noargs-dev')
+
+var EXEC_OPTS = { cwd: pkg }
+
+var PACKAGE_JSON1 = {
+ name: 'install-noargs-dev',
+ version: '0.0.1',
+ devDependencies: {
+ 'underscore': '1.3.1'
+ }
+}
+
+var PACKAGE_JSON2 = {
+ name: 'install-noargs-dev',
+ version: '0.0.2',
+ devDependencies: {
+ 'underscore': '1.5.1'
+ }
+}
+
+test('setup', function (t) {
+ setup()
+ mr({ port: common.port }, function (er, s) {
+ t.ifError(er, 'started mock registry')
+ server = s
+ t.end()
+ })
+})
+
+test('install noargs installs devDependencies', function (t) {
+ common.npm(
+ [
+ '--registry', common.registry,
+ '--loglevel', 'silent',
+ 'install'
+ ],
+ EXEC_OPTS,
+ function (err, code) {
+ t.ifError(err, 'npm install ran without issue')
+ t.notOk(code, 'npm install exited with code 0')
+
+ var p = path.join(pkg, 'node_modules', 'underscore', 'package.json')
+ var pkgJson = JSON.parse(fs.readFileSync(p))
+
+ t.equal(pkgJson.version, '1.3.1')
+ t.end()
+ }
+ )
+})
+
+test('install noargs installs updated devDependencies', function (t) {
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(PACKAGE_JSON2, null, 2)
+ )
+
+ common.npm(
+ [
+ '--registry', common.registry,
+ '--loglevel', 'silent',
+ 'install'
+ ],
+ EXEC_OPTS,
+ function (err, code) {
+ t.ifError(err, 'npm install ran without issue')
+ t.notOk(code, 'npm install exited with code 0')
+
+ var p = path.join(pkg, 'node_modules', 'underscore', 'package.json')
+ var pkgJson = JSON.parse(fs.readFileSync(p))
+
+ t.equal(pkgJson.version, '1.5.1')
+ t.end()
+ }
+ )
+})
+
+test('cleanup', function (t) {
+ server.close()
+ cleanup()
+ t.end()
+})
+
+function cleanup () {
+ process.chdir(osenv.tmpdir())
+ rimraf.sync(pkg)
+}
+
+function setup () {
+ cleanup()
+ mkdirp.sync(path.resolve(pkg, 'node_modules'))
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(PACKAGE_JSON1, null, 2)
+ )
+
+ process.chdir(pkg)
+}
diff --git a/deps/npm/test/tap/link.js b/deps/npm/test/tap/link.js
index 6562e35fd3a75e..ea47e8296a15c5 100644
--- a/deps/npm/test/tap/link.js
+++ b/deps/npm/test/tap/link.js
@@ -20,6 +20,31 @@ var OPTS = {
}
}
+var readJSON = {
+ name: 'foo',
+ version: '1.0.0',
+ description: '',
+ main: 'index.js',
+ scripts: {
+ test: 'echo \"Error: no test specified\" && exit 1'
+ },
+ author: '',
+ license: 'ISC'
+}
+
+var installJSON = {
+ name: 'bar',
+ version: '1.0.0',
+ description: '',
+ main: 'index.js',
+ scripts: {
+ test: 'echo \"Error: no test specified\" && exit 1'
+ },
+ author: '',
+ license: 'ISC'
+}
+
+
test('setup', function (t) {
setup()
common.npm(['ls', '-g', '--depth=0'], OPTS, function (err, c, out) {
@@ -72,30 +97,6 @@ test('cleanup', function (t) {
})
})
-var readJSON = {
- name: 'foo',
- version: '1.0.0',
- description: '',
- main: 'index.js',
- scripts: {
- test: 'echo \"Error: no test specified\" && exit 1'
- },
- author: '',
- license: 'ISC'
-}
-
-var installJSON = {
- name: 'bar',
- version: '1.0.0',
- description: '',
- main: 'index.js',
- scripts: {
- test: 'echo \"Error: no test specified\" && exit 1'
- },
- author: '',
- license: 'ISC'
-}
-
function cleanup () {
rimraf.sync(linkRoot)
rimraf.sync(link)
diff --git a/deps/npm/test/tap/ls-l-depth-0.js b/deps/npm/test/tap/ls-l-depth-0.js
index 5bbc1278c42a73..3b5ae4d20234cd 100644
--- a/deps/npm/test/tap/ls-l-depth-0.js
+++ b/deps/npm/test/tap/ls-l-depth-0.js
@@ -27,6 +27,18 @@ var server
var EXEC_OPTS = { cwd: pkg }
+var fixture = {
+ 'name': 'glock',
+ 'version': '1.8.7',
+ 'private': true,
+ 'description': 'an inexplicably hostile sample package',
+ 'homepage': 'https://glo.ck',
+ 'repository': 'https://github.com/npm/glo.ck',
+ 'dependencies': {
+ 'underscore': '1.5.1'
+ }
+}
+
test('setup', function (t) {
setup()
mr({ port: common.port }, function (er, s) {
@@ -85,18 +97,6 @@ test('cleanup', function (t) {
t.end()
})
-var fixture = {
- 'name': 'glock',
- 'version': '1.8.7',
- 'private': true,
- 'description': 'an inexplicably hostile sample package',
- 'homepage': 'https://glo.ck',
- 'repository': 'https://github.com/npm/glo.ck',
- 'dependencies': {
- 'underscore': '1.5.1'
- }
-}
-
function cleanup () {
process.chdir(tmpdir())
rimraf.sync(pkg)
diff --git a/deps/npm/test/tap/noargs-install-config-save.js b/deps/npm/test/tap/noargs-install-config-save.js
index 15613d7a727a9c..b6900b431740d2 100644
--- a/deps/npm/test/tap/noargs-install-config-save.js
+++ b/deps/npm/test/tap/noargs-install-config-save.js
@@ -56,9 +56,8 @@ test("does not update the package.json with empty arguments", function (t) {
var child = createChild([npm, "install"])
child.on("close", function () {
var text = JSON.stringify(fs.readFileSync(pkg + "/package.json", "utf8"))
- t.ok(text.indexOf("\"dependencies") === -1)
s.close()
- t.end()
+ t.ok(text.indexOf("\"dependencies") === -1)
})
})
})
@@ -70,10 +69,9 @@ test("updates the package.json (adds dependencies) with an argument", function (
mr({port : common.port}, function (er, s) {
var child = createChild([npm, "install", "underscore"])
child.on("close", function () {
+ s.close()
var text = JSON.stringify(fs.readFileSync(pkg + "/package.json", "utf8"))
t.ok(text.indexOf("\"dependencies") !== -1)
- s.close()
- t.end()
})
})
})
diff --git a/deps/npm/test/tap/optional-metadep-rollback-collision.js b/deps/npm/test/tap/optional-metadep-rollback-collision.js
index 929c4fc0fc4e21..4b21f965ed12fc 100644
--- a/deps/npm/test/tap/optional-metadep-rollback-collision.js
+++ b/deps/npm/test/tap/optional-metadep-rollback-collision.js
@@ -63,87 +63,6 @@ var opdep = {
}
}
-test('setup', function (t) {
- cleanup()
-
- mkdirp.sync(pkg)
- fs.writeFileSync(
- path.join(pkg, 'package.json'),
- JSON.stringify(json, null, 2)
- )
-
- mkdirp.sync(path.join(deps, 'd1'))
- fs.writeFileSync(
- path.join(deps, 'd1', 'package.json'),
- JSON.stringify(d1, null, 2)
- )
-
- mkdirp.sync(path.join(deps, 'd2'))
- fs.writeFileSync(
- path.join(deps, 'd2', 'package.json'),
- JSON.stringify(d2, null, 2)
- )
- fs.writeFileSync(path.join(deps, 'd2', 'blart.js'), blart)
-
- mkdirp.sync(path.join(deps, 'opdep'))
- fs.writeFileSync(
- path.join(deps, 'opdep', 'package.json'),
- JSON.stringify(opdep, null, 2)
- )
- fs.writeFileSync(path.join(deps, 'opdep', 'bad-server.js'), badServer)
-
- t.end()
-})
-
-test('go go test racer', function (t) {
- common.npm(
- [
- '--prefix', pkg,
- '--fetch-retries', '0',
- '--loglevel', 'silent',
- '--cache', cache,
- 'install'
- ],
- {
- cwd: pkg,
- env: {
- PATH: process.env.PATH,
- Path: process.env.Path
- },
- stdio: [0, 'pipe', 2]
- },
- function (er, code, stdout, stderr) {
- t.ifError(er, 'install ran to completion without error')
- t.notOk(code, 'npm install exited with code 0')
-
- t.equal(stdout, 'ok\nok\n')
- t.notOk(/not ok/.test(stdout), 'should not contain the string \'not ok\'')
- t.end()
- }
- )
-})
-
-test('verify results', function (t) {
- t.throws(function () {
- fs.statSync(nm)
- })
- t.end()
-})
-
-test('cleanup', function (t) {
- cleanup()
- t.end()
-})
-
-function cleanup () {
- process.chdir(osenv.tmpdir())
- try {
- var pid = +fs.readFileSync(pidfile)
- process.kill(pid, 'SIGKILL')
- } catch (er) {}
-
- rimraf.sync(pkg)
-}
var badServer = function () {/*
var createServer = require('http').createServer
@@ -235,3 +154,84 @@ mkdirp(BASEDIR, function go () {
}, 3 * 1000)
})
*/}.toString().split('\n').slice(1, -1).join('\n')
+test('setup', function (t) {
+ cleanup()
+
+ mkdirp.sync(pkg)
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(json, null, 2)
+ )
+
+ mkdirp.sync(path.join(deps, 'd1'))
+ fs.writeFileSync(
+ path.join(deps, 'd1', 'package.json'),
+ JSON.stringify(d1, null, 2)
+ )
+
+ mkdirp.sync(path.join(deps, 'd2'))
+ fs.writeFileSync(
+ path.join(deps, 'd2', 'package.json'),
+ JSON.stringify(d2, null, 2)
+ )
+ fs.writeFileSync(path.join(deps, 'd2', 'blart.js'), blart)
+
+ mkdirp.sync(path.join(deps, 'opdep'))
+ fs.writeFileSync(
+ path.join(deps, 'opdep', 'package.json'),
+ JSON.stringify(opdep, null, 2)
+ )
+ fs.writeFileSync(path.join(deps, 'opdep', 'bad-server.js'), badServer)
+
+ t.end()
+})
+
+test('go go test racer', function (t) {
+ common.npm(
+ [
+ '--prefix', pkg,
+ '--fetch-retries', '0',
+ '--loglevel', 'silent',
+ '--cache', cache,
+ 'install'
+ ],
+ {
+ cwd: pkg,
+ env: {
+ PATH: process.env.PATH,
+ Path: process.env.Path
+ },
+ stdio: [0, 'pipe', 2]
+ },
+ function (er, code, stdout, stderr) {
+ t.ifError(er, 'install ran to completion without error')
+ t.notOk(code, 'npm install exited with code 0')
+
+ t.equal(stdout, 'ok\nok\n')
+ t.notOk(/not ok/.test(stdout), 'should not contain the string \'not ok\'')
+ t.end()
+ }
+ )
+})
+
+test('verify results', function (t) {
+ t.throws(function () {
+ fs.statSync(nm)
+ })
+ t.end()
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function cleanup () {
+ process.chdir(osenv.tmpdir())
+ try {
+ var pid = +fs.readFileSync(pidfile)
+ process.kill(pid, 'SIGKILL')
+ } catch (er) {}
+
+ rimraf.sync(pkg)
+}
diff --git a/deps/npm/test/tap/outdated-local.js b/deps/npm/test/tap/outdated-local.js
new file mode 100644
index 00000000000000..f9b8af4420f861
--- /dev/null
+++ b/deps/npm/test/tap/outdated-local.js
@@ -0,0 +1,194 @@
+var common = require('../common-tap.js')
+var test = require('tap').test
+var npm = require('../../')
+var rimraf = require('rimraf')
+var path = require('path')
+var mr = require('npm-registry-mock')
+var osenv = require('osenv')
+var mkdirp = require('mkdirp')
+var fs = require('graceful-fs')
+
+var pkg = path.resolve(__dirname, 'outdated-local')
+var pkgLocal = path.resolve(pkg, 'local-module')
+var pkgScopedLocal = path.resolve(pkg, 'another-local-module')
+var pkgLocalUnderscore = path.resolve(pkg, 'underscore')
+var pkgLocalOptimist = path.resolve(pkg, 'optimist')
+
+var pjParent = JSON.stringify({
+ name: 'outdated-local',
+ version: '1.0.0',
+ dependencies: {
+ 'local-module': 'file:local-module', // updated locally, not on repo
+ '@scoped/another-local-module': 'file:another-local-module', // updated locally, scoped, not on repo
+ 'underscore': 'file:underscore', // updated locally, updated but lesser version on repo
+ 'optimist': 'file:optimist' // updated locally, updated and greater version on repo
+ }
+}, null, 2) + '\n'
+
+var pjLocal = JSON.stringify({
+ name: 'local-module',
+ version: '1.0.0'
+}, null, 2) + '\n'
+
+var pjLocalBumped = JSON.stringify({
+ name: 'local-module',
+ version: '1.1.0'
+}, null, 2) + '\n'
+
+var pjScopedLocal = JSON.stringify({
+ name: '@scoped/another-local-module',
+ version: '1.0.0'
+}, null, 2) + '\n'
+
+var pjScopedLocalBumped = JSON.stringify({
+ name: '@scoped/another-local-module',
+ version: '1.2.0'
+}, null, 2) + '\n'
+
+var pjLocalUnderscore = JSON.stringify({
+ name: 'underscore',
+ version: '1.3.1'
+}, null, 2) + '\n'
+
+var pjLocalUnderscoreBumped = JSON.stringify({
+ name: 'underscore',
+ version: '1.6.1'
+}, null, 2) + '\n'
+
+var pjLocalOptimist = JSON.stringify({
+ name: 'optimist',
+ version: '0.4.0'
+}, null, 2) + '\n'
+
+var pjLocalOptimistBumped = JSON.stringify({
+ name: 'optimist',
+ version: '0.5.0'
+}, null, 2) + '\n'
+
+
+function mocks (server) {
+ server.get('/local-module')
+ .reply(404)
+ server.get('/@scoped%2fanother-local-module')
+ .reply(404)
+}
+
+test('setup', function (t) {
+ bootstrap()
+ t.end()
+})
+
+test('outdated support local modules', function (t) {
+ t.plan(4)
+ process.chdir(pkg)
+ mr({ port: common.port, plugin: mocks }, function (err, s) {
+ t.ifError(err, 'mock registry started without problems')
+
+ function verify (actual, expected) {
+ for (var i = 0; i < expected.length; i++) {
+ var current = expected[i]
+
+ var found = false
+ for (var j = 0; j < actual.length; j++) {
+ var target = actual[j]
+
+ var k
+ for (k = 0; k < current.length; k++) {
+ if (current[k] !== target[k]) break
+ }
+ if (k === current.length) found = true
+ }
+
+ if (!found) return false
+ }
+
+ return true
+ }
+
+ npm.load(
+ {
+ loglevel: 'silent',
+ parseable: true,
+ registry: common.registry
+ },
+ function () {
+ npm.install('.', function (err) {
+ t.ifError(err, 'install success')
+ bumpLocalModules()
+ npm.outdated(function (er, d) {
+ t.ifError(er, 'outdated success')
+ t.ok(verify(d, [
+ [
+ path.resolve(__dirname, 'outdated-local'),
+ 'local-module',
+ '1.0.0',
+ '1.1.0',
+ '1.1.0',
+ 'file:local-module'
+ ],
+ [
+ path.resolve(__dirname, 'outdated-local'),
+ '@scoped/another-local-module',
+ '1.0.0',
+ '1.2.0',
+ '1.2.0',
+ 'file:another-local-module'
+ ],
+ [
+ path.resolve(__dirname, 'outdated-local'),
+ 'underscore',
+ '1.3.1',
+ '1.6.1',
+ '1.5.1',
+ 'file:underscore'
+ ],
+ [
+ path.resolve(__dirname, 'outdated-local'),
+ 'optimist',
+ '0.4.0',
+ '0.6.0',
+ '0.6.0',
+ 'optimist@0.6.0'
+ ]
+ ]), 'got expected outdated output')
+ s.close()
+ })
+ })
+ }
+ )
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function bootstrap () {
+ mkdirp.sync(pkg)
+ fs.writeFileSync(path.resolve(pkg, 'package.json'), pjParent)
+
+ mkdirp.sync(pkgLocal)
+ fs.writeFileSync(path.resolve(pkgLocal, 'package.json'), pjLocal)
+
+ mkdirp.sync(pkgScopedLocal)
+ fs.writeFileSync(path.resolve(pkgScopedLocal, 'package.json'), pjScopedLocal)
+
+ mkdirp.sync(pkgLocalUnderscore)
+ fs.writeFileSync(path.resolve(pkgLocalUnderscore, 'package.json'), pjLocalUnderscore)
+
+ mkdirp.sync(pkgLocalOptimist)
+ fs.writeFileSync(path.resolve(pkgLocalOptimist, 'package.json'), pjLocalOptimist)
+}
+
+function bumpLocalModules () {
+ fs.writeFileSync(path.resolve(pkgLocal, 'package.json'), pjLocalBumped)
+ fs.writeFileSync(path.resolve(pkgScopedLocal, 'package.json'), pjScopedLocalBumped)
+ fs.writeFileSync(path.resolve(pkgLocalUnderscore, 'package.json'), pjLocalUnderscoreBumped)
+ fs.writeFileSync(path.resolve(pkgLocalOptimist, 'package.json'), pjLocalOptimistBumped)
+}
+
+function cleanup () {
+ process.chdir(osenv.tmpdir())
+ rimraf.sync(pkg)
+}
diff --git a/deps/npm/test/tap/outdated-private.js b/deps/npm/test/tap/outdated-private.js
index 7e43be7a54d47e..882d7d9479cf4c 100644
--- a/deps/npm/test/tap/outdated-private.js
+++ b/deps/npm/test/tap/outdated-private.js
@@ -13,6 +13,39 @@ var pkgLocalPrivate = path.resolve(pkg, "local-private")
var pkgScopedLocalPrivate = path.resolve(pkg, "another-local-private")
var pkgLocalUnderscore = path.resolve(pkg, "underscore")
+var pjParent = JSON.stringify({
+ name : "outdated-private",
+ version : "1.0.0",
+ dependencies : {
+ "local-private" : "file:local-private",
+ "@scoped/another-local-private" : "file:another-local-private",
+ "underscore" : "file:underscore"
+ }
+}, null, 2) + "\n"
+
+var pjLocalPrivate = JSON.stringify({
+ name : "local-private",
+ version : "1.0.0",
+ private : true
+}, null, 2) + "\n"
+
+var pjLocalPrivateBumped = JSON.stringify({
+ name : "local-private",
+ version : "1.1.0",
+ private : true
+}, null, 2) + "\n"
+
+var pjScopedLocalPrivate = JSON.stringify({
+ name : "@scoped/another-local-private",
+ version : "1.0.0",
+ private : true
+}, null, 2) + "\n"
+
+var pjLocalUnderscore = JSON.stringify({
+ name : "underscore",
+ version : "1.3.1"
+}, null, 2) + "\n"
+
test("setup", function (t) {
bootstrap()
t.end()
@@ -31,15 +64,16 @@ test("outdated ignores private modules", function (t) {
function () {
npm.install(".", function (err) {
t.ifError(err, "install success")
+ bumpLocalPrivate()
npm.outdated(function (er, d) {
t.ifError(er, "outdated success")
t.deepEqual(d, [[
path.resolve(__dirname, "outdated-private"),
"underscore",
"1.3.1",
- "1.3.1",
"1.5.1",
- "file:underscore"
+ "1.5.1",
+ "underscore@1.5.1"
]])
s.close()
})
@@ -54,33 +88,6 @@ test("cleanup", function (t) {
t.end()
})
-var pjParent = JSON.stringify({
- name : "outdated-private",
- version : "1.0.0",
- dependencies : {
- "local-private" : "file:local-private",
- "@scoped/another-local-private" : "file:another-local-private",
- "underscore" : "file:underscore"
- }
-}, null, 2) + "\n"
-
-var pjLocalPrivate = JSON.stringify({
- name : "local-private",
- version : "1.0.0",
- private : true
-}, null, 2) + "\n"
-
-var pjScopedLocalPrivate = JSON.stringify({
- name : "@scoped/another-local-private",
- version : "1.0.0",
- private : true
-}, null, 2) + "\n"
-
-var pjLocalUnderscore = JSON.stringify({
- name : "underscore",
- version : "1.3.1"
-}, null, 2) + "\n"
-
function bootstrap () {
mkdirp.sync(pkg)
fs.writeFileSync(path.resolve(pkg, "package.json"), pjParent)
@@ -95,6 +102,10 @@ function bootstrap () {
fs.writeFileSync(path.resolve(pkgLocalUnderscore, "package.json"), pjLocalUnderscore)
}
+function bumpLocalPrivate () {
+ fs.writeFileSync(path.resolve(pkgLocalPrivate, "package.json"), pjLocalPrivateBumped)
+}
+
function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(pkg)
diff --git a/deps/npm/test/tap/peer-deps-invalid.js b/deps/npm/test/tap/peer-deps-invalid.js
index 7d630f866256bb..b256b8e2e3d95d 100644
--- a/deps/npm/test/tap/peer-deps-invalid.js
+++ b/deps/npm/test/tap/peer-deps-invalid.js
@@ -23,6 +23,31 @@ var json = {
}
}
+var fileFail = function () {
+/**package
+* { "name": "npm-test-peer-deps-file-invalid"
+* , "main": "index.js"
+* , "version": "1.2.3"
+* , "description":"This one should conflict with the other one"
+* , "peerDependencies": { "underscore": "1.3.3" }
+* }
+**/
+ module.exports = 'I\'m just a lonely index, naked as the day I was born.'
+}.toString().split('\n').slice(1, -1).join('\n')
+
+var fileOK = function () {
+/**package
+* { "name": "npm-test-peer-deps-file"
+* , "main": "index.js"
+* , "version": "1.2.3"
+* , "description":"No package.json in sight!"
+* , "peerDependencies": { "underscore": "1.3.1" }
+* , "dependencies": { "mkdirp": "0.3.5" }
+* }
+**/
+ module.exports = 'I\'m just a lonely index, naked as the day I was born.'
+}.toString().split('\n').slice(1, -1).join('\n')
+
test('setup', function (t) {
cleanup()
mkdirp.sync(cache)
@@ -75,28 +100,3 @@ function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(pkg)
}
-
-var fileFail = function () {
-/**package
-* { "name": "npm-test-peer-deps-file-invalid"
-* , "main": "index.js"
-* , "version": "1.2.3"
-* , "description":"This one should conflict with the other one"
-* , "peerDependencies": { "underscore": "1.3.3" }
-* }
-**/
- module.exports = 'I\'m just a lonely index, naked as the day I was born.'
-}.toString().split('\n').slice(1, -1).join('\n')
-
-var fileOK = function () {
-/**package
-* { "name": "npm-test-peer-deps-file"
-* , "main": "index.js"
-* , "version": "1.2.3"
-* , "description":"No package.json in sight!"
-* , "peerDependencies": { "underscore": "1.3.1" }
-* , "dependencies": { "mkdirp": "0.3.5" }
-* }
-**/
- module.exports = 'I\'m just a lonely index, naked as the day I was born.'
-}.toString().split('\n').slice(1, -1).join('\n')
diff --git a/deps/npm/test/tap/peer-deps-without-package-json.js b/deps/npm/test/tap/peer-deps-without-package-json.js
index 16a3a114f62a51..08322eefaa5035 100644
--- a/deps/npm/test/tap/peer-deps-without-package-json.js
+++ b/deps/npm/test/tap/peer-deps-without-package-json.js
@@ -14,6 +14,20 @@ var pkg = path.resolve(__dirname, 'peer-deps-without-package-json')
var cache = path.resolve(pkg, 'cache')
var nodeModules = path.resolve(pkg, 'node_modules')
+var fileJS = function () {
+/**package
+* { "name": "npm-test-peer-deps-file"
+* , "main": "index.js"
+* , "version": "1.2.3"
+* , "description":"No package.json in sight!"
+* , "peerDependencies": { "underscore": "1.3.1" }
+* , "dependencies": { "mkdirp": "0.3.5" }
+* }
+**/
+
+ module.exports = 'I\'m just a lonely index, naked as the day I was born.'
+}.toString().split('\n').slice(1, -1).join('\n')
+
test('setup', function (t) {
t.comment('test for https://github.com/npm/npm/issues/3049')
cleanup()
@@ -65,17 +79,3 @@ function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(pkg)
}
-
-var fileJS = function () {
-/**package
-* { "name": "npm-test-peer-deps-file"
-* , "main": "index.js"
-* , "version": "1.2.3"
-* , "description":"No package.json in sight!"
-* , "peerDependencies": { "underscore": "1.3.1" }
-* , "dependencies": { "mkdirp": "0.3.5" }
-* }
-**/
-
- module.exports = 'I\'m just a lonely index, naked as the day I was born.'
-}.toString().split('\n').slice(1, -1).join('\n')
diff --git a/deps/npm/test/tap/publish-config.js b/deps/npm/test/tap/publish-config.js
index 9e537a920649f3..fd6dd4a2a4da11 100644
--- a/deps/npm/test/tap/publish-config.js
+++ b/deps/npm/test/tap/publish-config.js
@@ -21,9 +21,9 @@ fs.writeFileSync(pkg + "/fixture_npmrc",
test(function (t) {
var child
+ t.plan(4)
require("http").createServer(function (req, res) {
t.pass("got request on the fakey fake registry")
- t.end()
this.close()
res.statusCode = 500
res.end(JSON.stringify({
diff --git a/deps/npm/test/tap/registry.js b/deps/npm/test/tap/registry.js
index 20e7bbe811588a..060d9b67b67fcc 100644
--- a/deps/npm/test/tap/registry.js
+++ b/deps/npm/test/tap/registry.js
@@ -30,7 +30,7 @@ else {
function runTests () {
- var env = {}
+ var env = { TAP: 1 }
for (var i in process.env) env[i] = process.env[i]
env.npm = npmExec
@@ -52,7 +52,7 @@ function runTests () {
env: env,
stdio: "inherit"
}
- common.npm(["test"], opts, function (err, code) {
+ common.npm(["test", "--", "-Rtap"], opts, function (err, code) {
if (err) { throw err }
if (code) {
return test("need test to work", function (t) {
diff --git a/deps/npm/test/tap/run-script.js b/deps/npm/test/tap/run-script.js
index 60c9d3c4f9b8fb..8dfe574e139d69 100644
--- a/deps/npm/test/tap/run-script.js
+++ b/deps/npm/test/tap/run-script.js
@@ -13,43 +13,6 @@ var tmp = path.resolve(pkg, 'tmp')
var opts = { cwd: pkg }
-function testOutput (t, command, er, code, stdout, stderr) {
- var lines
-
- if (er)
- throw er
-
- if (stderr)
- throw new Error('npm ' + command + ' stderr: ' + stderr.toString())
-
- lines = stdout.trim().split('\n')
- stdout = lines.filter(function (line) {
- return line.trim() !== '' && line[0] !== '>'
- }).join(';')
-
- t.equal(stdout, command)
- t.end()
-}
-
-function writeMetadata (object) {
- fs.writeFileSync(
- path.resolve(pkg, 'package.json'),
- JSON.stringify(object, null, 2) + '\n'
- )
-}
-
-function cleanup () {
- rimraf.sync(pkg)
-}
-
-test('setup', function (t) {
- cleanup()
- mkdirp.sync(cache)
- mkdirp.sync(tmp)
- writeMetadata(fullyPopulated)
- t.end()
-})
-
var fullyPopulated = {
'name': 'runscript',
'version': '1.2.3',
@@ -91,6 +54,44 @@ var both = {
}
}
+
+function testOutput (t, command, er, code, stdout, stderr) {
+ var lines
+
+ if (er)
+ throw er
+
+ if (stderr)
+ throw new Error('npm ' + command + ' stderr: ' + stderr.toString())
+
+ lines = stdout.trim().split('\n')
+ stdout = lines.filter(function (line) {
+ return line.trim() !== '' && line[0] !== '>'
+ }).join(';')
+
+ t.equal(stdout, command)
+ t.end()
+}
+
+function writeMetadata (object) {
+ fs.writeFileSync(
+ path.resolve(pkg, 'package.json'),
+ JSON.stringify(object, null, 2) + '\n'
+ )
+}
+
+function cleanup () {
+ rimraf.sync(pkg)
+}
+
+test('setup', function (t) {
+ cleanup()
+ mkdirp.sync(cache)
+ mkdirp.sync(tmp)
+ writeMetadata(fullyPopulated)
+ t.end()
+})
+
test('npm run-script start', function (t) {
common.npm(['run-script', 'start'], opts, testOutput.bind(null, t, 'start'))
})
diff --git a/deps/npm/test/tap/scripts-whitespace-windows.js b/deps/npm/test/tap/scripts-whitespace-windows.js
index b4d1f3a34ecb96..d24a24d4d8a8da 100644
--- a/deps/npm/test/tap/scripts-whitespace-windows.js
+++ b/deps/npm/test/tap/scripts-whitespace-windows.js
@@ -25,7 +25,8 @@ var json = {
},
dependencies: {
'scripts-whitespace-windows-dep': '0.0.1'
- }
+ },
+ license: 'WTFPL'
}
var dependency = {
@@ -34,6 +35,13 @@ var dependency = {
bin: [ 'bin/foo' ]
}
+var foo = function () {/*
+#!/usr/bin/env node
+
+if (process.argv.length === 8)
+ console.log('npm-test-fine')
+*/}.toString().split('\n').slice(1, -1).join('\n')
+
test('setup', function (t) {
cleanup()
mkdirp.sync(tmp)
@@ -72,9 +80,12 @@ test('setup', function (t) {
test('test', function (t) {
common.npm(['run', 'foo'], EXEC_OPTS, function (err, code, stdout, stderr) {
+ stderr = stderr.trim()
+ if (stderr)
+ console.error(stderr)
t.ifErr(err, 'npm run finished without error')
t.equal(code, 0, 'npm run exited ok')
- t.notOk(stderr, 'no output stderr: ', stderr)
+ t.notOk(stderr, 'no output stderr: ' + stderr)
stdout = stdout.trim()
t.ok(/npm-test-fine/.test(stdout))
t.end()
@@ -91,9 +102,3 @@ function cleanup () {
rimraf.sync(pkg)
}
-var foo = function () {/*
-#!/usr/bin/env node
-
-if (process.argv.length === 8)
- console.log('npm-test-fine')
-*/}.toString().split('\n').slice(1, -1).join('\n')
diff --git a/deps/npm/test/tap/shrinkwrap-local-dependency.js b/deps/npm/test/tap/shrinkwrap-local-dependency.js
index d9514e42d3237e..ffbde6574ee86a 100644
--- a/deps/npm/test/tap/shrinkwrap-local-dependency.js
+++ b/deps/npm/test/tap/shrinkwrap-local-dependency.js
@@ -10,6 +10,34 @@ var PKG_DIR = path.resolve(__dirname, "shrinkwrap-local-dependency")
var CACHE_DIR = path.resolve(PKG_DIR, "cache")
var DEP_DIR = path.resolve(PKG_DIR, "dep")
+var desired = {
+ "name": "npm-test-shrinkwrap-local-dependency",
+ "version": "0.0.0",
+ "dependencies": {
+ "npm-test-shrinkwrap-local-dependency-dep": {
+ "version": "0.0.0",
+ "from": "dep",
+ "resolved": "file:dep"
+ }
+ }
+}
+
+var root = {
+ "author": "Thomas Torp",
+ "name": "npm-test-shrinkwrap-local-dependency",
+ "version": "0.0.0",
+ "dependencies": {
+ "npm-test-shrinkwrap-local-dependency-dep": "file:./dep"
+ }
+}
+
+var dependency = {
+ "author": "Thomas Torp",
+ "name": "npm-test-shrinkwrap-local-dependency-dep",
+ "version": "0.0.0"
+}
+
+
test("shrinkwrap uses resolved with file: on local deps", function(t) {
setup()
@@ -67,33 +95,6 @@ test("cleanup", function(t) {
t.end()
})
-var desired = {
- "name": "npm-test-shrinkwrap-local-dependency",
- "version": "0.0.0",
- "dependencies": {
- "npm-test-shrinkwrap-local-dependency-dep": {
- "version": "0.0.0",
- "from": "dep",
- "resolved": "file:dep"
- }
- }
-}
-
-var root = {
- "author": "Thomas Torp",
- "name": "npm-test-shrinkwrap-local-dependency",
- "version": "0.0.0",
- "dependencies": {
- "npm-test-shrinkwrap-local-dependency-dep": "file:./dep"
- }
-}
-
-var dependency = {
- "author": "Thomas Torp",
- "name": "npm-test-shrinkwrap-local-dependency-dep",
- "version": "0.0.0"
-}
-
function setup() {
cleanup()
mkdirp.sync(PKG_DIR)
diff --git a/deps/npm/test/tap/tag-version-prefix.js b/deps/npm/test/tap/tag-version-prefix.js
new file mode 100644
index 00000000000000..efd2d14d4f6da5
--- /dev/null
+++ b/deps/npm/test/tap/tag-version-prefix.js
@@ -0,0 +1,88 @@
+var common = require('../common-tap.js')
+var fs = require('fs')
+var path = require('path')
+
+var mkdirp = require('mkdirp')
+var osenv = require('osenv')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var npm = require('../../lib/npm.js')
+
+var pkg = path.resolve(__dirname, 'version-message-config')
+var cache = path.resolve(pkg, 'cache')
+var npmrc = path.resolve(pkg, '.npmrc')
+var packagePath = path.resolve(pkg, 'package.json')
+
+var json = { name: 'blah', version: '0.1.2' }
+
+var configContents = 'sign-git-tag=false\nmessage=":bookmark: %s"\n'
+
+test('npm version with message config', function (t) {
+ setup()
+
+ npm.load({ prefix: pkg, userconfig: npmrc }, function () {
+ var git = require('../../lib/utils/git.js')
+
+ common.makeGitRepo({ path: pkg }, function (er) {
+ t.ifErr(er, 'git bootstrap ran without error')
+
+ common.npm([
+ 'config',
+ 'set',
+ 'tag-version-prefix',
+ 'q'
+ ], { cwd: pkg, env: { PATH: process.env.PATH } },
+ function (err, code, stdout, stderr) {
+ t.ifError(err, 'npm config ran without issue')
+ t.notOk(code, 'exited with a non-error code')
+ t.notOk(stderr, 'no error output')
+
+ common.npm(
+ [
+ 'version',
+ 'patch',
+ '--loglevel', 'silent'
+ // package config is picked up from env
+ ],
+ { cwd: pkg, env: { PATH: process.env.PATH } },
+ function (err, code, stdout, stderr) {
+ t.ifError(err, 'npm version ran without issue')
+ t.notOk(code, 'exited with a non-error code')
+ t.notOk(stderr, 'no error output')
+
+ git.whichAndExec(
+ ['tag'],
+ { cwd: pkg, env: process.env },
+ function (er, tags, stderr) {
+ t.ok(tags.match(/q0\.1\.3/g), 'tag was created by version' + tags)
+ t.end()
+ }
+ )
+ }
+ )
+ })
+ })
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function cleanup () {
+ // windows fix for locked files
+ process.chdir(osenv.tmpdir())
+
+ rimraf.sync(pkg)
+}
+
+function setup () {
+ cleanup()
+ mkdirp.sync(cache)
+ process.chdir(pkg)
+
+ fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8')
+ fs.writeFileSync(npmrc, configContents, 'ascii')
+}
diff --git a/deps/npm/test/tap/unpublish-config.js b/deps/npm/test/tap/unpublish-config.js
new file mode 100644
index 00000000000000..58550ec4e59a12
--- /dev/null
+++ b/deps/npm/test/tap/unpublish-config.js
@@ -0,0 +1,82 @@
+var fs = require('graceful-fs')
+var http = require('http')
+var path = require('path')
+
+var mkdirp = require('mkdirp')
+var osenv = require('osenv')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var pkg = path.join(__dirname, 'npm-test-unpublish-config')
+var fixturePath = path.join(pkg, 'fixture_npmrc')
+
+var common = require('../common-tap.js')
+
+var json = {
+ name: 'npm-test-unpublish-config',
+ version: '1.2.3',
+ publishConfig: { registry: common.registry }
+}
+
+test('setup', function (t) {
+ mkdirp.sync(pkg)
+
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(json), 'utf8'
+ )
+ fs.writeFileSync(
+ fixturePath,
+ '//localhost:1337/:_authToken = beeeeeeeeeeeeef\n' +
+ 'registry = http://lvh.me:4321/registry/path\n'
+ )
+
+ t.end()
+})
+
+test('cursory test of unpublishing with config', function (t) {
+ var child
+ t.plan(4)
+ http.createServer(function (req, res) {
+ t.pass('got request on the fakey fake registry')
+ this.close()
+ res.statusCode = 500
+ res.end(JSON.stringify({
+ error: 'shh no tears, only dreams now'
+ }))
+ child.kill()
+ }).listen(common.port, function () {
+ t.pass('server is listening')
+
+ child = common.npm(
+ [
+ '--userconfig', fixturePath,
+ '--loglevel', 'silent',
+ '--force',
+ 'unpublish'
+ ],
+ {
+ cwd: pkg,
+ stdio: 'inherit',
+ env: {
+ 'npm_config_cache_lock_stale': 1000,
+ 'npm_config_cache_lock_wait': 1000,
+ HOME: process.env.HOME,
+ Path: process.env.PATH,
+ PATH: process.env.PATH,
+ USERPROFILE: osenv.home()
+ }
+ },
+ function (err, code) {
+ t.ifError(err, 'publish command finished successfully')
+ t.notOk(code, 'npm install exited with code 0')
+ }
+ )
+ })
+})
+
+test('cleanup', function (t) {
+ process.chdir(osenv.tmpdir())
+ rimraf.sync(pkg)
+ t.end()
+})
diff --git a/deps/npm/test/tap/update-index.js b/deps/npm/test/tap/update-index.js
index 0586269722c8cd..fe4244c308b650 100644
--- a/deps/npm/test/tap/update-index.js
+++ b/deps/npm/test/tap/update-index.js
@@ -13,74 +13,6 @@ var CACHE_DIR = path.resolve(PKG_DIR, 'cache')
var server
-function setup (t, mock, extra) {
- mkdirp.sync(CACHE_DIR)
- mr({ port: common.port, plugin: mock }, function (er, s) {
- npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) {
- if (extra) {
- Object.keys(extra).forEach(function (k) {
- npm.config.set(k, extra[k], 'user')
- })
- }
- t.ifError(err, 'no error')
- server = s
- t.end()
- })
- })
-}
-
-function cleanup (t) {
- server.close(function () {
- rimraf.sync(PKG_DIR)
-
- t.end()
- })
-}
-
-test('setup basic', function (t) {
- setup(t, mocks.basic)
-})
-
-test('request basic', function (t) {
- updateIndex(0, function (er) {
- t.ifError(er, 'no error')
- t.end()
- })
-})
-
-test('cleanup basic', cleanup)
-
-test('setup auth', function (t) {
- setup(t, mocks.auth)
-})
-
-test('request auth failure', function (t) {
- updateIndex(0, function (er) {
- t.equals(er.code, 'E401', 'gotta get that auth')
- t.ok(/^unauthorized/.test(er.message), 'unauthorized message')
- t.end()
- })
-})
-
-test('cleanup auth failure', cleanup)
-
-test('setup auth', function (t) {
- // mimic as if alwaysAuth had been set
- setup(t, mocks.auth, {
- _auth: new Buffer('bobby:tables').toString('base64'),
- 'always-auth': true
- })
-})
-
-test('request auth success', function (t) {
- updateIndex(0, function (er) {
- t.ifError(er, 'no error')
- t.end()
- })
-})
-
-test('cleanup auth', cleanup)
-
var mocks = {
basic: function (mock) {
mock.get('/-/all').reply(200, allMock)
@@ -193,3 +125,71 @@ var allMock = {
}
}
}
+
+function setup (t, mock, extra) {
+ mkdirp.sync(CACHE_DIR)
+ mr({ port: common.port, plugin: mock }, function (er, s) {
+ npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) {
+ if (extra) {
+ Object.keys(extra).forEach(function (k) {
+ npm.config.set(k, extra[k], 'user')
+ })
+ }
+ t.ifError(err, 'no error')
+ server = s
+ t.end()
+ })
+ })
+}
+
+function cleanup (t) {
+ server.close(function () {
+ rimraf.sync(PKG_DIR)
+
+ t.end()
+ })
+}
+
+test('setup basic', function (t) {
+ setup(t, mocks.basic)
+})
+
+test('request basic', function (t) {
+ updateIndex(0, function (er) {
+ t.ifError(er, 'no error')
+ t.end()
+ })
+})
+
+test('cleanup basic', cleanup)
+
+test('setup auth', function (t) {
+ setup(t, mocks.auth)
+})
+
+test('request auth failure', function (t) {
+ updateIndex(0, function (er) {
+ t.equals(er.code, 'E401', 'gotta get that auth')
+ t.ok(/^unauthorized/.test(er.message), 'unauthorized message')
+ t.end()
+ })
+})
+
+test('cleanup auth failure', cleanup)
+
+test('setup auth', function (t) {
+ // mimic as if alwaysAuth had been set
+ setup(t, mocks.auth, {
+ _auth: new Buffer('bobby:tables').toString('base64'),
+ 'always-auth': true
+ })
+})
+
+test('request auth success', function (t) {
+ updateIndex(0, function (er) {
+ t.ifError(er, 'no error')
+ t.end()
+ })
+})
+
+test('cleanup auth', cleanup)
diff --git a/deps/uv/AUTHORS b/deps/uv/AUTHORS
index 6929fff222cd91..a0e3dd46f092f4 100644
--- a/deps/uv/AUTHORS
+++ b/deps/uv/AUTHORS
@@ -181,3 +181,19 @@ Johan Bergström
Alex Mo
Luis Martinez de Bartolome
Michael Penick
+Michael
+Massimiliano Torromeo
+TomCrypto
+Brett Vickers
+Ole André Vadla Ravnås
+Kazuho Oku
+Ryan Phillips
+Brian Green
+Devchandra Meetei Leishangthem
+Corey Farrell
+Per Nilsson
+Alan Rogers
+Daryl Haresign
+Rui Abreu Ferreira
+João Reis
+farblue68
diff --git a/deps/uv/ChangeLog b/deps/uv/ChangeLog
index 7bbe5027a7aa28..e5c79808d2e1cc 100644
--- a/deps/uv/ChangeLog
+++ b/deps/uv/ChangeLog
@@ -1,4 +1,108 @@
-2015.02.27, Version 1.4.2 (Stable)
+2015.05.07, Version 1.5.0 (Stable), 4e77f74c7b95b639b3397095db1bc5bcc016c203
+
+Changes since version 1.4.2:
+
+* doc: clarify that the thread pool primites are not thread safe (Andrius
+ Bentkus)
+
+* aix: always deregister closing fds from epoll (Michael)
+
+* unix: fix glibc-2.20+ macro incompatibility (Massimiliano Torromeo)
+
+* doc: add Sphinx plugin for generating links to man pages (Saúl Ibarra
+ Corretgé)
+
+* doc: link system and library calls to man pages (Saúl Ibarra Corretgé)
+
+* doc: document uv_getnameinfo_t.{host|service} (Saúl Ibarra Corretgé)
+
+* build: update the location of gyp (Stephen von Takach)
+
+* win: name all anonymous structs and unions (TomCrypto)
+
+* linux: work around epoll bug in kernels 3.10-3.19 (Ben Noordhuis)
+
+* darwin: fix size calculation in select() fallback (Ole André Vadla Ravnås)
+
+* solaris: fix setsockopt for multicast options (Julien Gilli)
+
+* test: fix race condition in multithreaded test (Ben Noordhuis)
+
+* doc: fix long lines in tty.rst (Ben Noordhuis)
+
+* test: use UV_TTY_MODE_* values in tty test (Ben Noordhuis)
+
+* unix: don't clobber errno in uv_tty_reset_mode() (Ben Noordhuis)
+
+* unix: reject non-tty fds in uv_tty_init() (Ben Noordhuis)
+
+* win: fix pipe blocking writes (Alexis Campailla)
+
+* build: fix cross-compiling for iOS (Steven Kabbes)
+
+* win: remove unnecessary malloc.h
+
+* include: use `extern "c++"` for defining C++ code (Kazuho Oku)
+
+* unix: reap child on execvp() failure (Ryan Phillips)
+
+* windows: fix handle leak on EMFILE (Brian Green)
+
+* test: fix tty_file, close handle if initialized (Saúl Ibarra Corretgé)
+
+* doc: clarify what uv_*_open accepts (Saúl Ibarra Corretgé)
+
+* doc: clarify that we don't maintain external doc resources (Saúl Ibarra
+ Corretgé)
+
+* build: add documentation for ninja support (Devchandra Meetei Leishangthem)
+
+* doc: document uv_buf_t members (Corey Farrell)
+
+* linux: fix epoll_pwait() fallback on arm64 (Ben Noordhuis)
+
+* android: fix compilation warning (Saúl Ibarra Corretgé)
+
+* unix: don't close the fds we just setup (Sam Roberts)
+
+* test: spawn child replacing std{out,err} to stderr (Saúl Ibarra Corretgé)
+
+* unix: fix swapping fds order in uv_spawn (Saúl Ibarra Corretgé)
+
+* unix: fix potential bug if dup2 fails in uv_spawn (Saúl Ibarra Corretgé)
+
+* test: remove LOG and LOGF variadic macros (Saúl Ibarra Corretgé)
+
+* win: fix uv_fs_access on directories (Saúl Ibarra Corretgé)
+
+* win: fix of double free in uv_uptime (Per Nilsson)
+
+* unix: open "/dev/null" instead of "/" for emfile_fd (Alan Rogers)
+
+* docs: add some missing words (Daryl Haresign)
+
+* unix: clean up uv_fs_open() O_CLOEXEC logic (Ben Noordhuis)
+
+* build: set SONAME for shared library in uv.gyp (Rui Abreu Ferreira)
+
+* windows: define snprintf replacement as inline instead of static (Rui Abreu
+ Ferreira)
+
+* win: fix unlink of readonly files (João Reis)
+
+* doc: fix uv_run(UV_RUN_DEFAULT) description (Ben Noordhuis)
+
+* linux: intercept syscall when running under memory sanitizer (Keno Fischer)
+
+* aix: fix uv_interface_addresses return value (farblue68)
+
+* windows: defer reporting TCP write failure until next tick (Saúl Ibarra
+ Corretgé)
+
+* test: add test for deferred TCP write failure (Saúl Ibarra Corretgé)
+
+
+2015.02.27, Version 1.4.2 (Stable), 1a7391348a11d5450c0f69c828d5302e2cb842eb
Changes since version 1.4.1:
diff --git a/deps/uv/Makefile.am b/deps/uv/Makefile.am
index 9c511db47a6c8f..b9fb80c6738b52 100644
--- a/deps/uv/Makefile.am
+++ b/deps/uv/Makefile.am
@@ -226,6 +226,7 @@ test_run_tests_SOURCES = test/blackhole-server.c \
test/test-tcp-write-to-half-open-connection.c \
test/test-tcp-write-after-connect.c \
test/test-tcp-writealot.c \
+ test/test-tcp-write-fail.c \
test/test-tcp-try-write.c \
test/test-tcp-write-queue-order.c \
test/test-thread-equal.c \
diff --git a/deps/uv/README.md b/deps/uv/README.md
index a267f0d5b527e5..a7da8b898c581d 100644
--- a/deps/uv/README.md
+++ b/deps/uv/README.md
@@ -72,19 +72,23 @@ NOTE: Windows users need to use make.bat instead of plain 'make'.
Documentation can be browsed online [here](http://docs.libuv.org).
+The [tests and benchmarks](https://github.com/libuv/libuv/tree/master/test)
+also serve as API specification and usage examples.
+
### Other resources
* [An Introduction to libuv](http://nikhilm.github.com/uvbook/)
— An overview of libuv with tutorials.
* [LXJS 2012 talk](http://www.youtube.com/watch?v=nGn60vDSxQ4)
— High-level introductory talk about libuv.
- * [Tests and benchmarks](https://github.com/libuv/libuv/tree/master/test)
- — API specification and usage examples.
* [libuv-dox](https://github.com/thlorenz/libuv-dox)
— Documenting types and methods of libuv, mostly by reading uv.h.
* [learnuv](https://github.com/thlorenz/learnuv)
— Learn uv for fun and profit, a self guided workshop to libuv.
+These resources are not handled by libuv maintainers and might be out of
+date. Please verify it before opening new issues.
+
## Build Instructions
For GCC there are two build methods: via autotools or via [GYP][].
@@ -113,8 +117,6 @@ To have GYP generate build script for another system, checkout GYP into the
project tree manually:
$ git clone https://chromium.googlesource.com/external/gyp.git build/gyp
- OR
- $ svn co http://gyp.googlecode.com/svn/trunk build/gyp
### Unix
@@ -153,6 +155,15 @@ Run:
Note for UNIX users: compile your project with `-D_LARGEFILE_SOURCE` and
`-D_FILE_OFFSET_BITS=64`. GYP builds take care of that automatically.
+### Using Ninja
+
+To use ninja for build on ninja supported platforms, run:
+
+ $ ./gyp_uv.py -f ninja
+ $ ninja -C out/Debug #for debug build OR
+ $ ninja -C out/Release
+
+
### Running tests
Run:
diff --git a/deps/uv/configure.ac b/deps/uv/configure.ac
index 9473d5ffcb09d2..71cb4704138c57 100644
--- a/deps/uv/configure.ac
+++ b/deps/uv/configure.ac
@@ -13,7 +13,7 @@
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
AC_PREREQ(2.57)
-AC_INIT([libuv], [1.4.2], [https://github.com/libuv/libuv/issues])
+AC_INIT([libuv], [1.5.0], [https://github.com/libuv/libuv/issues])
AC_CONFIG_MACRO_DIR([m4])
m4_include([m4/libuv-extra-automake-flags.m4])
m4_include([m4/as_case.m4])
diff --git a/deps/uv/docs/src/conf.py b/deps/uv/docs/src/conf.py
index f614fc5b434b24..b9eaa137432dea 100644
--- a/deps/uv/docs/src/conf.py
+++ b/deps/uv/docs/src/conf.py
@@ -38,7 +38,7 @@ def get_libuv_version():
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath('sphinx-plugins'))
# -- General configuration ------------------------------------------------
@@ -48,7 +48,7 @@ def get_libuv_version():
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = []
+extensions = ['manpage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
diff --git a/deps/uv/docs/src/design.rst b/deps/uv/docs/src/design.rst
index 63141bedf58438..34c3cff68e54ca 100644
--- a/deps/uv/docs/src/design.rst
+++ b/deps/uv/docs/src/design.rst
@@ -40,7 +40,7 @@ The I/O loop
The I/O (or event) loop is the central part of libuv. It establishes the content for all I/O
operations, and it's meant to be tied to a single thread. One can run multiple event loops
as long as each runs in a different thread. The libuv event loop (or any other API involving
-the loop or handles, for that matter) **is not thread-safe** except stated otherwise.
+the loop or handles, for that matter) **is not thread-safe** except where stated otherwise.
The event loop follows the rather usual single threaded asynchronous I/O approach: all (network)
I/O is performed on non-blocking sockets which are polled using the best mechanism available
@@ -113,7 +113,7 @@ stages of a loop iteration:
.. note::
While the polling mechanism is different, libuv makes the execution model consistent
- Unix systems and Windows.
+ across Unix systems and Windows.
File I/O
diff --git a/deps/uv/docs/src/dns.rst b/deps/uv/docs/src/dns.rst
index 3b15377f91e419..1d881580966315 100644
--- a/deps/uv/docs/src/dns.rst
+++ b/deps/uv/docs/src/dns.rst
@@ -51,6 +51,18 @@ Public members
Loop that started this getnameinfo request and where completion will be
reported. Readonly.
+.. c:member:: char[NI_MAXHOST] uv_getnameinfo_t.host
+
+ Char array containing the resulting host. It's null terminated.
+
+ .. versionchanged:: 1.3.0 the field is declared as public.
+
+.. c:member:: char[NI_MAXSERV] uv_getnameinfo_t.service
+
+ Char array containing the resulting service. It's null terminated.
+
+ .. versionchanged:: 1.3.0 the field is declared as public.
+
.. seealso:: The :c:type:`uv_req_t` members also apply.
@@ -59,7 +71,7 @@ API
.. c:function:: int uv_getaddrinfo(uv_loop_t* loop, uv_getaddrinfo_t* req, uv_getaddrinfo_cb getaddrinfo_cb, const char* node, const char* service, const struct addrinfo* hints)
- Asynchronous ``getaddrinfo(3)``.
+ Asynchronous :man:`getaddrinfo(3)`.
Either node or service may be NULL but not both.
@@ -84,7 +96,7 @@ API
.. c:function:: int uv_getnameinfo(uv_loop_t* loop, uv_getnameinfo_t* req, uv_getnameinfo_cb getnameinfo_cb, const struct sockaddr* addr, int flags)
- Asynchronous ``getnameinfo(3)``.
+ Asynchronous :man:`getnameinfo(3)`.
Returns 0 on success or an error code < 0 on failure. If successful, the
callback will get called sometime in the future with the lookup result.
diff --git a/deps/uv/docs/src/fs.rst b/deps/uv/docs/src/fs.rst
index cd535f756fc1c9..c2a3fc252a1ffe 100644
--- a/deps/uv/docs/src/fs.rst
+++ b/deps/uv/docs/src/fs.rst
@@ -162,46 +162,46 @@ API
.. c:function:: int uv_fs_close(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb)
- Equivalent to ``close(2)``.
+ Equivalent to :man:`close(2)`.
.. c:function:: int uv_fs_open(uv_loop_t* loop, uv_fs_t* req, const char* path, int flags, int mode, uv_fs_cb cb)
- Equivalent to ``open(2)``.
+ Equivalent to :man:`open(2)`.
.. c:function:: int uv_fs_read(uv_loop_t* loop, uv_fs_t* req, uv_file file, const uv_buf_t bufs[], unsigned int nbufs, int64_t offset, uv_fs_cb cb)
- Equivalent to ``preadv(2)``.
+ Equivalent to :man:`preadv(2)`.
.. c:function:: int uv_fs_unlink(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb)
- Equivalent to ``unlink(2)``.
+ Equivalent to :man:`unlink(2)`.
.. c:function:: int uv_fs_write(uv_loop_t* loop, uv_fs_t* req, uv_file file, const uv_buf_t bufs[], unsigned int nbufs, int64_t offset, uv_fs_cb cb)
- Equivalent to ``pwritev(2)``.
+ Equivalent to :man:`pwritev(2)`.
.. c:function:: int uv_fs_mkdir(uv_loop_t* loop, uv_fs_t* req, const char* path, int mode, uv_fs_cb cb)
- Equivalent to ``mkdir(2)``.
+ Equivalent to :man:`mkdir(2)`.
.. note::
`mode` is currently not implemented on Windows.
.. c:function:: int uv_fs_mkdtemp(uv_loop_t* loop, uv_fs_t* req, const char* tpl, uv_fs_cb cb)
- Equivalent to ``mkdtemp(3)``.
+ Equivalent to :man:`mkdtemp(3)`.
.. note::
The result can be found as a null terminated string at `req->path`.
.. c:function:: int uv_fs_rmdir(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb)
- Equivalent to ``rmdir(2)``.
+ Equivalent to :man:`rmdir(2)`.
.. c:function:: int uv_fs_scandir(uv_loop_t* loop, uv_fs_t* req, const char* path, int flags, uv_fs_cb cb)
.. c:function:: int uv_fs_scandir_next(uv_fs_t* req, uv_dirent_t* ent)
- Equivalent to ``scandir(3)``, with a slightly different API. Once the callback
+ Equivalent to :man:`scandir(3)`, with a slightly different API. Once the callback
for the request is called, the user can use :c:func:`uv_fs_scandir_next` to
get `ent` populated with the next directory entry data. When there are no
more entries ``UV_EOF`` will be returned.
@@ -210,49 +210,49 @@ API
.. c:function:: int uv_fs_fstat(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb)
.. c:function:: int uv_fs_lstat(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb)
- Equivalent to ``(f/l)stat(2)``.
+ Equivalent to :man:`stat(2)`, :man:`fstat(2)` and :man:`fstat(2)` respectively.
.. c:function:: int uv_fs_rename(uv_loop_t* loop, uv_fs_t* req, const char* path, const char* new_path, uv_fs_cb cb)
- Equivalent to ``rename(2)``.
+ Equivalent to :man:`rename(2)`.
.. c:function:: int uv_fs_fsync(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb)
- Equivalent to ``fsync(2)``.
+ Equivalent to :man:`fsync(2)`.
.. c:function:: int uv_fs_fdatasync(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb)
- Equivalent to ``fdatasync(2)``.
+ Equivalent to :man:`fdatasync(2)`.
.. c:function:: int uv_fs_ftruncate(uv_loop_t* loop, uv_fs_t* req, uv_file file, int64_t offset, uv_fs_cb cb)
- Equivalent to ``ftruncate(2)``.
+ Equivalent to :man:`ftruncate(2)`.
.. c:function:: int uv_fs_sendfile(uv_loop_t* loop, uv_fs_t* req, uv_file out_fd, uv_file in_fd, int64_t in_offset, size_t length, uv_fs_cb cb)
- Limited equivalent to ``sendfile(2)``.
+ Limited equivalent to :man:`sendfile(2)`.
.. c:function:: int uv_fs_access(uv_loop_t* loop, uv_fs_t* req, const char* path, int mode, uv_fs_cb cb)
- Equivalent to ``access(2)`` on Unix. Windows uses ``GetFileAttributesW()``.
+ Equivalent to :man:`access(2)` on Unix. Windows uses ``GetFileAttributesW()``.
.. c:function:: int uv_fs_chmod(uv_loop_t* loop, uv_fs_t* req, const char* path, int mode, uv_fs_cb cb)
.. c:function:: int uv_fs_fchmod(uv_loop_t* loop, uv_fs_t* req, uv_file file, int mode, uv_fs_cb cb)
- Equivalent to ``(f)chmod(2)``.
+ Equivalent to :man:`chmod(2)` and :man:`fchmod(2)` respectively.
.. c:function:: int uv_fs_utime(uv_loop_t* loop, uv_fs_t* req, const char* path, double atime, double mtime, uv_fs_cb cb)
.. c:function:: int uv_fs_futime(uv_loop_t* loop, uv_fs_t* req, uv_file file, double atime, double mtime, uv_fs_cb cb)
- Equivalent to ``(f)utime(s)(2)``.
+ Equivalent to :man:`utime(2)` and :man:`futime(2)` respectively.
.. c:function:: int uv_fs_link(uv_loop_t* loop, uv_fs_t* req, const char* path, const char* new_path, uv_fs_cb cb)
- Equivalent to ``link(2)``.
+ Equivalent to :man:`link(2)`.
.. c:function:: int uv_fs_symlink(uv_loop_t* loop, uv_fs_t* req, const char* path, const char* new_path, int flags, uv_fs_cb cb)
- Equivalent to ``symlink(2)``.
+ Equivalent to :man:`symlink(2)`.
.. note::
On Windows the `flags` parameter can be specified to control how the symlink will
@@ -265,12 +265,12 @@ API
.. c:function:: int uv_fs_readlink(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb)
- Equivalent to ``readlink(2)``.
+ Equivalent to :man:`readlink(2)`.
.. c:function:: int uv_fs_chown(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_uid_t uid, uv_gid_t gid, uv_fs_cb cb)
.. c:function:: int uv_fs_fchown(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_uid_t uid, uv_gid_t gid, uv_fs_cb cb)
- Equivalent to ``(f)chown(2)``.
+ Equivalent to :man:`chown(2)` and :man:`fchown(2)` respectively.
.. note::
These functions are not implemented on Windows.
diff --git a/deps/uv/docs/src/loop.rst b/deps/uv/docs/src/loop.rst
index 203672bd34f00a..2a01d796375e8e 100644
--- a/deps/uv/docs/src/loop.rst
+++ b/deps/uv/docs/src/loop.rst
@@ -92,7 +92,9 @@ API
specified mode:
- UV_RUN_DEFAULT: Runs the event loop until there are no more active and
- referenced handles or requests. Always returns zero.
+ referenced handles or requests. Returns non-zero if :c:func:`uv_stop`
+ was called and there are still active handles or requests. Returns
+ zero in all other cases.
- UV_RUN_ONCE: Poll for i/o once. Note that this function blocks if
there are no pending callbacks. Returns zero when done (no active handles
or requests left), or non-zero if more callbacks are expected (meaning
diff --git a/deps/uv/docs/src/misc.rst b/deps/uv/docs/src/misc.rst
index 10c349e9b736f3..bb97a260057fc8 100644
--- a/deps/uv/docs/src/misc.rst
+++ b/deps/uv/docs/src/misc.rst
@@ -15,6 +15,17 @@ Data types
Buffer data type.
+ .. c:member:: char* uv_buf_t.base
+
+ Pointer to the base of the buffer. Readonly.
+
+ .. c:member:: size_t uv_buf_t.len
+
+ Total bytes in the buffer. Readonly.
+
+ .. note::
+ On Windows this field is ULONG.
+
.. c:type:: uv_file
Cross platform representation of a file handle.
@@ -26,7 +37,7 @@ Data types
.. c:type:: uv_os_fd_t
Abstract representation of a file descriptor. On Unix systems this is a
- `typedef` of `int` and on Windows fa `HANDLE`.
+ `typedef` of `int` and on Windows a `HANDLE`.
.. c:type:: uv_rusage_t
@@ -101,7 +112,8 @@ API
descriptor. Usually this will be used during initialization to guess the
type of the stdio streams.
- For ``isatty()`` functionality use this function and test for ``UV_TTY``.
+ For :man:`isatty(3)` equivalent functionality use this function and test
+ for ``UV_TTY``.
.. c:function:: unsigned int uv_version(void)
@@ -195,8 +207,8 @@ API
.. c:function:: int uv_inet_ntop(int af, const void* src, char* dst, size_t size)
.. c:function:: int uv_inet_pton(int af, const char* src, void* dst)
- Cross-platform IPv6-capable implementation of the 'standard' ``inet_ntop()``
- and ``inet_pton()`` functions. On success they return 0. In case of error
+ Cross-platform IPv6-capable implementation of :man:`inet_ntop(3)`
+ and :man:`inet_pton(3)`. On success they return 0. In case of error
the target `dst` pointer is unmodified.
.. c:function:: int uv_exepath(char* buffer, size_t* size)
diff --git a/deps/uv/docs/src/pipe.rst b/deps/uv/docs/src/pipe.rst
index 8f8402c29bbadd..df896a0583447f 100644
--- a/deps/uv/docs/src/pipe.rst
+++ b/deps/uv/docs/src/pipe.rst
@@ -40,6 +40,10 @@ API
.. versionchanged:: 1.2.1 the file descriptor is set to non-blocking mode.
+ .. note::
+ The passed file descriptor or HANDLE is not checked for its type, but
+ it's required that it represents a valid pipe.
+
.. c:function:: int uv_pipe_bind(uv_pipe_t* handle, const char* name)
Bind the pipe to a file path (Unix) or a name (Windows).
diff --git a/deps/uv/docs/src/poll.rst b/deps/uv/docs/src/poll.rst
index 907cb1a613dfbb..6dc41839ac1e92 100644
--- a/deps/uv/docs/src/poll.rst
+++ b/deps/uv/docs/src/poll.rst
@@ -5,7 +5,7 @@
===================================
Poll handles are used to watch file descriptors for readability and
-writability, similar to the purpose of poll(2).
+writability, similar to the purpose of :man:`poll(2)`.
The purpose of poll handles is to enable integrating external libraries that
rely on the event loop to signal it about the socket status changes, like
@@ -29,7 +29,7 @@ closed immediately after a call to :c:func:`uv_poll_stop` or :c:func:`uv_close`.
.. note::
On windows only sockets can be polled with poll handles. On Unix any file
- descriptor that would be accepted by poll(2) can be used.
+ descriptor that would be accepted by :man:`poll(2)` can be used.
Data types
diff --git a/deps/uv/docs/src/sphinx-plugins/manpage.py b/deps/uv/docs/src/sphinx-plugins/manpage.py
new file mode 100644
index 00000000000000..1d1dc379f410ee
--- /dev/null
+++ b/deps/uv/docs/src/sphinx-plugins/manpage.py
@@ -0,0 +1,46 @@
+# encoding: utf-8
+
+#
+# Copyright (c) 2013 Dariusz Dwornikowski. All rights reserved.
+#
+# Adapted from https://github.com/tdi/sphinxcontrib-manpage
+# License: Apache 2
+#
+
+
+import re
+
+from docutils import nodes, utils
+from docutils.parsers.rst.roles import set_classes
+from string import Template
+
+
+def make_link_node(rawtext, app, name, manpage_num, options):
+ ref = app.config.man_url_regex
+ if not ref:
+ ref = "http://linux.die.net/man/%s/%s" % (manpage_num, name)
+ else:
+ s = Template(ref)
+ ref = s.substitute(num=manpage_num, topic=name)
+ set_classes(options)
+ node = nodes.reference(rawtext, "%s(%s)" % (name, manpage_num), refuri=ref, **options)
+ return node
+
+
+def man_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
+ app = inliner.document.settings.env.app
+ p = re.compile("([a-zA-Z0-9_\.-_]+)\((\d)\)")
+ m = p.match(text)
+
+ manpage_num = m.group(2)
+ name = m.group(1)
+ node = make_link_node(rawtext, app, name, manpage_num, options)
+ return [node], []
+
+
+def setup(app):
+ app.info('Initializing manpage plugin')
+ app.add_role('man', man_role)
+ app.add_config_value('man_url_regex', None, 'env')
+ return
+
diff --git a/deps/uv/docs/src/stream.rst b/deps/uv/docs/src/stream.rst
index 1f6682adc1cfc2..880f0e2ebc75d3 100644
--- a/deps/uv/docs/src/stream.rst
+++ b/deps/uv/docs/src/stream.rst
@@ -104,7 +104,7 @@ API
.. c:function:: int uv_listen(uv_stream_t* stream, int backlog, uv_connection_cb cb)
Start listening for incoming connections. `backlog` indicates the number of
- connections the kernel might queue, same as ``listen(2)``. When a new
+ connections the kernel might queue, same as :man:`listen(2)`. When a new
incoming connection is received the :c:type:`uv_connection_cb` callback is
called.
diff --git a/deps/uv/docs/src/tcp.rst b/deps/uv/docs/src/tcp.rst
index 8baedde86c5c15..2b5d268ddd86e6 100644
--- a/deps/uv/docs/src/tcp.rst
+++ b/deps/uv/docs/src/tcp.rst
@@ -38,6 +38,10 @@ API
.. versionchanged:: 1.2.1 the file descriptor is set to non-blocking mode.
+ .. note::
+ The passed file descriptor or SOCKET is not checked for its type, but
+ it's required that it represents a valid stream socket.
+
.. c:function:: int uv_tcp_nodelay(uv_tcp_t* handle, int enable)
Enable / disable Nagle's algorithm.
diff --git a/deps/uv/docs/src/threadpool.rst b/deps/uv/docs/src/threadpool.rst
index 89f00844ef2c65..18949507e75004 100644
--- a/deps/uv/docs/src/threadpool.rst
+++ b/deps/uv/docs/src/threadpool.rst
@@ -18,6 +18,10 @@ libuv preallocates and initializes the maximum number of threads allowed by
``UV_THREADPOOL_SIZE``. This causes a relatively minor memory overhead
(~1MB for 128 threads) but increases the performance of threading at runtime.
+.. note::
+ Note that even though a global thread pool which is shared across all events
+ loops is used, the functions are not thread safe.
+
Data types
----------
diff --git a/deps/uv/docs/src/tty.rst b/deps/uv/docs/src/tty.rst
index 6c20c84bf75e58..18f34ef46d97b5 100644
--- a/deps/uv/docs/src/tty.rst
+++ b/deps/uv/docs/src/tty.rst
@@ -24,14 +24,14 @@ Data types
::
- typedef enum {
- /* Initial/normal terminal mode */
- UV_TTY_MODE_NORMAL,
- /* Raw input mode (On Windows, ENABLE_WINDOW_INPUT is also enabled) */
- UV_TTY_MODE_RAW,
- /* Binary-safe I/O mode for IPC (Unix-only) */
- UV_TTY_MODE_IO
- } uv_tty_mode_t;
+ typedef enum {
+ /* Initial/normal terminal mode */
+ UV_TTY_MODE_NORMAL,
+ /* Raw input mode (On Windows, ENABLE_WINDOW_INPUT is also enabled) */
+ UV_TTY_MODE_RAW,
+ /* Binary-safe I/O mode for IPC (Unix-only) */
+ UV_TTY_MODE_IO
+ } uv_tty_mode_t;
@@ -58,18 +58,22 @@ API
`readable`, specifies if you plan on calling :c:func:`uv_read_start` with
this stream. stdin is readable, stdout is not.
- On Unix this function will try to open ``/dev/tty`` and use it if the passed file
- descriptor refers to a TTY. This lets libuv put the tty in non-blocking mode
- without affecting other processes that share the tty.
+ On Unix this function will try to open ``/dev/tty`` and use it if the passed
+ file descriptor refers to a TTY. This lets libuv put the tty in non-blocking
+ mode without affecting other processes that share the tty.
.. note::
- If opening ``/dev/tty`` fails, libuv falls back to blocking writes for non-readable
- TTY streams.
+ If opening ``/dev/tty`` fails, libuv falls back to blocking writes for
+ non-readable TTY streams.
+
+ .. versionchanged:: 1.5.0: trying to initialize a TTY stream with a file
+ descriptor that refers to a file returns `UV_EINVAL`
+ on UNIX.
.. c:function:: int uv_tty_set_mode(uv_tty_t*, uv_tty_mode_t mode)
- .. versionchanged:: 1.2.0: the mode is specified as a :c:type:`uv_tty_mode_t`
- value.
+ .. versionchanged:: 1.2.0: the mode is specified as a
+ :c:type:`uv_tty_mode_t` value.
Set the TTY using the specified terminal mode.
diff --git a/deps/uv/docs/src/udp.rst b/deps/uv/docs/src/udp.rst
index 9c4aa2102bdcfc..ec7ce56d38f52c 100644
--- a/deps/uv/docs/src/udp.rst
+++ b/deps/uv/docs/src/udp.rst
@@ -122,6 +122,10 @@ API
.. versionchanged:: 1.2.1 the file descriptor is set to non-blocking mode.
+ .. note::
+ The passed file descriptor or SOCKET is not checked for its type, but
+ it's required that it represents a valid datagram socket.
+
.. c:function:: int uv_udp_bind(uv_udp_t* handle, const struct sockaddr* addr, unsigned int flags)
Bind the UDP handle to an IP address and port.
diff --git a/deps/uv/include/uv-version.h b/deps/uv/include/uv-version.h
index 836d399d774a32..0b4e6d781c35f6 100644
--- a/deps/uv/include/uv-version.h
+++ b/deps/uv/include/uv-version.h
@@ -31,8 +31,8 @@
*/
#define UV_VERSION_MAJOR 1
-#define UV_VERSION_MINOR 4
-#define UV_VERSION_PATCH 2
+#define UV_VERSION_MINOR 5
+#define UV_VERSION_PATCH 0
#define UV_VERSION_IS_RELEASE 1
#define UV_VERSION_SUFFIX ""
diff --git a/deps/uv/include/uv-win.h b/deps/uv/include/uv-win.h
index 24b22b31a9562b..fd844202b99a1a 100644
--- a/deps/uv/include/uv-win.h
+++ b/deps/uv/include/uv-win.h
@@ -43,16 +43,6 @@ typedef struct pollfd {
# define LOCALE_INVARIANT 0x007f
#endif
-#ifndef _malloca
-# if defined(_DEBUG)
-# define _malloca(size) malloc(size)
-# define _freea(ptr) free(ptr)
-# else
-# define _malloca(size) alloca(size)
-# define _freea(ptr)
-# endif
-#endif
-
#include
#include
#include
@@ -366,8 +356,8 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
struct { \
OVERLAPPED overlapped; \
size_t queued_bytes; \
- }; \
- }; \
+ } io; \
+ } u; \
struct uv_req_s* next_req;
#define UV_WRITE_PRIVATE_FIELDS \
@@ -419,9 +409,9 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
int activecnt; \
uv_read_t read_req; \
union { \
- struct { uv_stream_connection_fields }; \
- struct { uv_stream_server_fields }; \
- };
+ struct { uv_stream_connection_fields } conn; \
+ struct { uv_stream_server_fields } serv; \
+ } stream;
#define uv_tcp_server_fields \
uv_tcp_accept_t* accept_reqs; \
@@ -437,9 +427,9 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
SOCKET socket; \
int delayed_error; \
union { \
- struct { uv_tcp_server_fields }; \
- struct { uv_tcp_connection_fields }; \
- };
+ struct { uv_tcp_server_fields } serv; \
+ struct { uv_tcp_connection_fields } conn; \
+ } tcp;
#define UV_UDP_PRIVATE_FIELDS \
SOCKET socket; \
@@ -476,9 +466,9 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
HANDLE handle; \
WCHAR* name; \
union { \
- struct { uv_pipe_server_fields }; \
- struct { uv_pipe_connection_fields }; \
- };
+ struct { uv_pipe_server_fields } serv; \
+ struct { uv_pipe_connection_fields } conn; \
+ } pipe;
/* TODO: put the parser states in an union - TTY handles are always */
/* half-duplex so read-state can safely overlap write-state. */
@@ -496,7 +486,7 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
unsigned char last_key_len; \
WCHAR last_utf16_high_surrogate; \
INPUT_RECORD last_input_record; \
- }; \
+ } rd; \
struct { \
/* Used for writable TTY handles */ \
/* utf8-to-utf16 conversion state */ \
@@ -510,8 +500,8 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
unsigned short ansi_csi_argv[4]; \
COORD saved_position; \
WORD saved_attributes; \
- }; \
- };
+ } wr; \
+ } tty;
#define UV_POLL_PRIVATE_FIELDS \
SOCKET socket; \
@@ -600,7 +590,7 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
/* TODO: remove me in 0.9. */ \
WCHAR* pathw; \
int fd; \
- }; \
+ } file; \
union { \
struct { \
int mode; \
@@ -611,12 +601,12 @@ RB_HEAD(uv_timer_tree_s, uv_timer_s);
uv_buf_t* bufs; \
int64_t offset; \
uv_buf_t bufsml[4]; \
- }; \
+ } info; \
struct { \
double atime; \
double mtime; \
- }; \
- };
+ } time; \
+ } fs;
#define UV_WORK_PRIVATE_FIELDS \
struct uv__work work_req;
diff --git a/deps/uv/include/uv.h b/deps/uv/include/uv.h
index 55f75218b58682..75b3a4a5d2f8bf 100644
--- a/deps/uv/include/uv.h
+++ b/deps/uv/include/uv.h
@@ -644,13 +644,13 @@ UV_EXTERN int uv_tty_reset_mode(void);
UV_EXTERN int uv_tty_get_winsize(uv_tty_t*, int* width, int* height);
#ifdef __cplusplus
-} /* extern "C" */
+extern "C++" {
inline int uv_tty_set_mode(uv_tty_t* handle, int mode) {
return uv_tty_set_mode(handle, static_cast(mode));
}
-extern "C" {
+}
#endif
UV_EXTERN uv_handle_type uv_guess_handle(uv_file file);
@@ -799,6 +799,7 @@ struct uv_getnameinfo_s {
UV_REQ_FIELDS
/* read-only */
uv_loop_t* loop;
+ /* host and service are marked as private, but they really aren't. */
UV_GETNAMEINFO_PRIVATE_FIELDS
};
diff --git a/deps/uv/src/unix/aix.c b/deps/uv/src/unix/aix.c
index ec800c7a323f4b..e21a9cc78b6979 100644
--- a/deps/uv/src/unix/aix.c
+++ b/deps/uv/src/unix/aix.c
@@ -1111,19 +1111,19 @@ int uv_interface_addresses(uv_interface_address_t** addresses,
*count = 0;
if (0 > (sockfd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP))) {
- return -ENOSYS;
+ return -errno;
}
if (ioctl(sockfd, SIOCGSIZIFCONF, &size) == -1) {
- uv__close(sockfd);
- return -ENOSYS;
+ SAVE_ERRNO(uv__close(sockfd));
+ return -errno;
}
ifc.ifc_req = (struct ifreq*)malloc(size);
ifc.ifc_len = size;
if (ioctl(sockfd, SIOCGIFCONF, &ifc) == -1) {
- uv__close(sockfd);
- return -ENOSYS;
+ SAVE_ERRNO(uv__close(sockfd));
+ return -errno;
}
#define ADDR_SIZE(p) MAX((p).sa_len, sizeof(p))
@@ -1141,8 +1141,8 @@ int uv_interface_addresses(uv_interface_address_t** addresses,
memcpy(flg.ifr_name, p->ifr_name, sizeof(flg.ifr_name));
if (ioctl(sockfd, SIOCGIFFLAGS, &flg) == -1) {
- uv__close(sockfd);
- return -ENOSYS;
+ SAVE_ERRNO(uv__close(sockfd));
+ return -errno;
}
if (!(flg.ifr_flags & IFF_UP && flg.ifr_flags & IFF_RUNNING))
@@ -1218,16 +1218,23 @@ void uv__platform_invalidate_fd(uv_loop_t* loop, int fd) {
struct pollfd* events;
uintptr_t i;
uintptr_t nfds;
+ struct poll_ctl pc;
assert(loop->watchers != NULL);
events = (struct pollfd*) loop->watchers[loop->nwatchers];
nfds = (uintptr_t) loop->watchers[loop->nwatchers + 1];
- if (events == NULL)
- return;
- /* Invalidate events with same file descriptor */
- for (i = 0; i < nfds; i++)
- if ((int) events[i].fd == fd)
- events[i].fd = -1;
+ if (events != NULL)
+ /* Invalidate events with same file descriptor */
+ for (i = 0; i < nfds; i++)
+ if ((int) events[i].fd == fd)
+ events[i].fd = -1;
+
+ /* Remove the file descriptor from the poll set */
+ pc.events = 0;
+ pc.cmd = PS_DELETE;
+ pc.fd = fd;
+ if(loop->backend_fd >= 0)
+ pollset_ctl(loop->backend_fd, &pc, 1);
}
diff --git a/deps/uv/src/unix/android-ifaddrs.c b/deps/uv/src/unix/android-ifaddrs.c
index 3cda578dd1a94c..a99b0191d54808 100644
--- a/deps/uv/src/unix/android-ifaddrs.c
+++ b/deps/uv/src/unix/android-ifaddrs.c
@@ -24,6 +24,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "android-ifaddrs.h"
+#include "uv-common.h"
#include
#include
diff --git a/deps/uv/src/unix/fs.c b/deps/uv/src/unix/fs.c
index e7eee2f9abc4ed..7792801e9b6fd6 100644
--- a/deps/uv/src/unix/fs.c
+++ b/deps/uv/src/unix/fs.c
@@ -202,6 +202,44 @@ static ssize_t uv__fs_mkdtemp(uv_fs_t* req) {
}
+static ssize_t uv__fs_open(uv_fs_t* req) {
+ static int no_cloexec_support;
+ int r;
+
+ /* Try O_CLOEXEC before entering locks */
+ if (no_cloexec_support == 0) {
+#ifdef O_CLOEXEC
+ r = open(req->path, req->flags | O_CLOEXEC, req->mode);
+ if (r >= 0)
+ return r;
+ if (errno != EINVAL)
+ return r;
+ no_cloexec_support = 1;
+#endif /* O_CLOEXEC */
+ }
+
+ if (req->cb != NULL)
+ uv_rwlock_rdlock(&req->loop->cloexec_lock);
+
+ r = open(req->path, req->flags, req->mode);
+
+ /* In case of failure `uv__cloexec` will leave error in `errno`,
+ * so it is enough to just set `r` to `-1`.
+ */
+ if (r >= 0 && uv__cloexec(r, 1) != 0) {
+ r = uv__close(r);
+ if (r != 0 && r != -EINPROGRESS)
+ abort();
+ r = -1;
+ }
+
+ if (req->cb != NULL)
+ uv_rwlock_rdunlock(&req->loop->cloexec_lock);
+
+ return r;
+}
+
+
static ssize_t uv__fs_read(uv_fs_t* req) {
#if defined(__linux__)
static int no_preadv;
@@ -661,8 +699,22 @@ static void uv__to_stat(struct stat* src, uv_stat_t* dst) {
dst->st_birthtim.tv_nsec = src->st_birthtimespec.tv_nsec;
dst->st_flags = src->st_flags;
dst->st_gen = src->st_gen;
-#elif !defined(_AIX) && \
- (defined(_BSD_SOURCE) || defined(_SVID_SOURCE) || defined(_XOPEN_SOURCE))
+#elif defined(__ANDROID__)
+ dst->st_atim.tv_sec = src->st_atime;
+ dst->st_atim.tv_nsec = src->st_atime_nsec;
+ dst->st_mtim.tv_sec = src->st_mtime;
+ dst->st_mtim.tv_nsec = src->st_mtime_nsec;
+ dst->st_ctim.tv_sec = src->st_ctime;
+ dst->st_ctim.tv_nsec = src->st_ctime_nsec;
+ dst->st_birthtim.tv_sec = src->st_ctime;
+ dst->st_birthtim.tv_nsec = src->st_ctime_nsec;
+ dst->st_flags = 0;
+ dst->st_gen = 0;
+#elif !defined(_AIX) && ( \
+ defined(_BSD_SOURCE) || \
+ defined(_SVID_SOURCE) || \
+ defined(_XOPEN_SOURCE) || \
+ defined(_DEFAULT_SOURCE))
dst->st_atim.tv_sec = src->st_atim.tv_sec;
dst->st_atim.tv_nsec = src->st_atim.tv_nsec;
dst->st_mtim.tv_sec = src->st_mtim.tv_sec;
@@ -729,9 +781,6 @@ static void uv__fs_work(struct uv__work* w) {
int retry_on_eintr;
uv_fs_t* req;
ssize_t r;
-#ifdef O_CLOEXEC
- static int no_cloexec_support;
-#endif /* O_CLOEXEC */
req = container_of(w, uv_fs_t, work_req);
retry_on_eintr = !(req->fs_type == UV_FS_CLOSE);
@@ -760,6 +809,7 @@ static void uv__fs_work(struct uv__work* w) {
X(LINK, link(req->path, req->new_path));
X(MKDIR, mkdir(req->path, req->mode));
X(MKDTEMP, uv__fs_mkdtemp(req));
+ X(OPEN, uv__fs_open(req));
X(READ, uv__fs_read(req));
X(SCANDIR, uv__fs_scandir(req));
X(READLINK, uv__fs_readlink(req));
@@ -771,41 +821,10 @@ static void uv__fs_work(struct uv__work* w) {
X(UNLINK, unlink(req->path));
X(UTIME, uv__fs_utime(req));
X(WRITE, uv__fs_write(req));
- case UV_FS_OPEN:
-#ifdef O_CLOEXEC
- /* Try O_CLOEXEC before entering locks */
- if (!no_cloexec_support) {
- r = open(req->path, req->flags | O_CLOEXEC, req->mode);
- if (r >= 0)
- break;
- if (errno != EINVAL)
- break;
- no_cloexec_support = 1;
- }
-#endif /* O_CLOEXEC */
- if (req->cb != NULL)
- uv_rwlock_rdlock(&req->loop->cloexec_lock);
- r = open(req->path, req->flags, req->mode);
-
- /*
- * In case of failure `uv__cloexec` will leave error in `errno`,
- * so it is enough to just set `r` to `-1`.
- */
- if (r >= 0 && uv__cloexec(r, 1) != 0) {
- r = uv__close(r);
- if (r != 0 && r != -EINPROGRESS)
- abort();
- r = -1;
- }
- if (req->cb != NULL)
- uv_rwlock_rdunlock(&req->loop->cloexec_lock);
- break;
default: abort();
}
-
#undef X
- }
- while (r == -1 && errno == EINTR && retry_on_eintr);
+ } while (r == -1 && errno == EINTR && retry_on_eintr);
if (r == -1)
req->result = -errno;
diff --git a/deps/uv/src/unix/internal.h b/deps/uv/src/unix/internal.h
index 101dc745499f8d..31db5e29ea68b8 100644
--- a/deps/uv/src/unix/internal.h
+++ b/deps/uv/src/unix/internal.h
@@ -55,6 +55,9 @@
#define ACCESS_ONCE(type, var) \
(*(volatile type*) &(var))
+#define ROUND_UP(a, b) \
+ ((a) % (b) ? ((a) + (b)) - ((a) % (b)) : (a))
+
#define UNREACHABLE() \
do { \
assert(0 && "unreachable code"); \
diff --git a/deps/uv/src/unix/linux-core.c b/deps/uv/src/unix/linux-core.c
index 33a735dc674f14..d07494dd654bb3 100644
--- a/deps/uv/src/unix/linux-core.c
+++ b/deps/uv/src/unix/linux-core.c
@@ -130,8 +130,13 @@ void uv__platform_invalidate_fd(uv_loop_t* loop, int fd) {
*
* We pass in a dummy epoll_event, to work around a bug in old kernels.
*/
- if (loop->backend_fd >= 0)
+ if (loop->backend_fd >= 0) {
+ /* Work around a bug in kernels 3.10 to 3.19 where passing a struct that
+ * has the EPOLLWAKEUP flag set generates spurious audit syslog warnings.
+ */
+ memset(&dummy, 0, sizeof(dummy));
uv__epoll_ctl(loop->backend_fd, UV__EPOLL_CTL_DEL, fd, &dummy);
+ }
}
diff --git a/deps/uv/src/unix/linux-syscalls.c b/deps/uv/src/unix/linux-syscalls.c
index 7bf2c0f87dbea4..566e1f37cfed12 100644
--- a/deps/uv/src/unix/linux-syscalls.c
+++ b/deps/uv/src/unix/linux-syscalls.c
@@ -26,6 +26,13 @@
#include
#include
+#if defined(__has_feature)
+# if __has_feature(memory_sanitizer)
+# define MSAN_ACTIVE 1
+# include
+# endif
+#endif
+
#if defined(__i386__)
# ifndef __NR_socketcall
# define __NR_socketcall 102
@@ -310,7 +317,13 @@ int uv__epoll_wait(int epfd,
int nevents,
int timeout) {
#if defined(__NR_epoll_wait)
- return syscall(__NR_epoll_wait, epfd, events, nevents, timeout);
+ int result;
+ result = syscall(__NR_epoll_wait, epfd, events, nevents, timeout);
+#if MSAN_ACTIVE
+ if (result > 0)
+ __msan_unpoison(events, sizeof(events[0]) * result);
+#endif
+ return result;
#else
return errno = ENOSYS, -1;
#endif
@@ -323,13 +336,19 @@ int uv__epoll_pwait(int epfd,
int timeout,
uint64_t sigmask) {
#if defined(__NR_epoll_pwait)
- return syscall(__NR_epoll_pwait,
- epfd,
- events,
- nevents,
- timeout,
- &sigmask,
- sizeof(sigmask));
+ int result;
+ result = syscall(__NR_epoll_pwait,
+ epfd,
+ events,
+ nevents,
+ timeout,
+ &sigmask,
+ sizeof(sigmask));
+#if MSAN_ACTIVE
+ if (result > 0)
+ __msan_unpoison(events, sizeof(events[0]) * result);
+#endif
+ return result;
#else
return errno = ENOSYS, -1;
#endif
@@ -374,7 +393,13 @@ int uv__inotify_rm_watch(int fd, int32_t wd) {
int uv__pipe2(int pipefd[2], int flags) {
#if defined(__NR_pipe2)
- return syscall(__NR_pipe2, pipefd, flags);
+ int result;
+ result = syscall(__NR_pipe2, pipefd, flags);
+#if MSAN_ACTIVE
+ if (!result)
+ __msan_unpoison(pipefd, sizeof(int[2]));
+#endif
+ return result;
#else
return errno = ENOSYS, -1;
#endif
diff --git a/deps/uv/src/unix/process.c b/deps/uv/src/unix/process.c
index be283b480d6a23..380f3db1dce7f4 100644
--- a/deps/uv/src/unix/process.c
+++ b/deps/uv/src/unix/process.c
@@ -280,6 +280,21 @@ static void uv__process_child_init(const uv_process_options_t* options,
if (options->flags & UV_PROCESS_DETACHED)
setsid();
+ /* First duplicate low numbered fds, since it's not safe to duplicate them,
+ * they could get replaced. Example: swapping stdout and stderr; without
+ * this fd 2 (stderr) would be duplicated into fd 1, thus making both
+ * stdout and stderr go to the same fd, which was not the intention. */
+ for (fd = 0; fd < stdio_count; fd++) {
+ use_fd = pipes[fd][1];
+ if (use_fd < 0 || use_fd >= fd)
+ continue;
+ pipes[fd][1] = fcntl(use_fd, F_DUPFD, stdio_count);
+ if (pipes[fd][1] == -1) {
+ uv__write_int(error_fd, -errno);
+ _exit(127);
+ }
+ }
+
for (fd = 0; fd < stdio_count; fd++) {
close_fd = pipes[fd][0];
use_fd = pipes[fd][1];
@@ -304,7 +319,12 @@ static void uv__process_child_init(const uv_process_options_t* options,
if (fd == use_fd)
uv__cloexec(use_fd, 0);
else
- dup2(use_fd, fd);
+ fd = dup2(use_fd, fd);
+
+ if (fd == -1) {
+ uv__write_int(error_fd, -errno);
+ _exit(127);
+ }
if (fd <= 2)
uv__nonblock(fd, 0);
@@ -316,8 +336,8 @@ static void uv__process_child_init(const uv_process_options_t* options,
for (fd = 0; fd < stdio_count; fd++) {
use_fd = pipes[fd][1];
- if (use_fd >= 0 && fd != use_fd)
- close(use_fd);
+ if (use_fd >= stdio_count)
+ uv__close(use_fd);
}
if (options->cwd != NULL && chdir(options->cwd)) {
@@ -367,6 +387,7 @@ int uv_spawn(uv_loop_t* loop,
int err;
int exec_errorno;
int i;
+ int status;
assert(options->file != NULL);
assert(!(options->flags & ~(UV_PROCESS_DETACHED |
@@ -453,11 +474,17 @@ int uv_spawn(uv_loop_t* loop,
if (r == 0)
; /* okay, EOF */
- else if (r == sizeof(exec_errorno))
- ; /* okay, read errorno */
- else if (r == -1 && errno == EPIPE)
- ; /* okay, got EPIPE */
- else
+ else if (r == sizeof(exec_errorno)) {
+ do
+ err = waitpid(pid, &status, 0); /* okay, read errorno */
+ while (err == -1 && errno == EINTR);
+ assert(err == pid);
+ } else if (r == -1 && errno == EPIPE) {
+ do
+ err = waitpid(pid, &status, 0); /* okay, got EPIPE */
+ while (err == -1 && errno == EINTR);
+ assert(err == pid);
+ } else
abort();
uv__close(signal_pipe[0]);
diff --git a/deps/uv/src/unix/stream.c b/deps/uv/src/unix/stream.c
index 518a2fce0f2e46..48827b65d36dee 100644
--- a/deps/uv/src/unix/stream.c
+++ b/deps/uv/src/unix/stream.c
@@ -88,7 +88,12 @@ void uv__stream_init(uv_loop_t* loop,
stream->write_queue_size = 0;
if (loop->emfile_fd == -1) {
- err = uv__open_cloexec("/", O_RDONLY);
+ err = uv__open_cloexec("/dev/null", O_RDONLY);
+ if (err < 0)
+ /* In the rare case that "/dev/null" isn't mounted open "/"
+ * instead.
+ */
+ err = uv__open_cloexec("/", O_RDONLY);
if (err >= 0)
loop->emfile_fd = err;
}
@@ -301,7 +306,7 @@ int uv__stream_try_select(uv_stream_t* stream, int* fd) {
if (fds[1] > max_fd)
max_fd = fds[1];
- sread_sz = (max_fd + NBBY) / NBBY;
+ sread_sz = ROUND_UP(max_fd + 1, sizeof(uint32_t) * NBBY) / NBBY;
swrite_sz = sread_sz;
s = malloc(sizeof(*s) + sread_sz + swrite_sz);
diff --git a/deps/uv/src/unix/tty.c b/deps/uv/src/unix/tty.c
index b1782df95b2010..7783548a6e987d 100644
--- a/deps/uv/src/unix/tty.c
+++ b/deps/uv/src/unix/tty.c
@@ -35,10 +35,19 @@ static uv_spinlock_t termios_spinlock = UV_SPINLOCK_INITIALIZER;
int uv_tty_init(uv_loop_t* loop, uv_tty_t* tty, int fd, int readable) {
+ uv_handle_type type;
int flags;
int newfd;
int r;
+ /* File descriptors that refer to files cannot be monitored with epoll.
+ * That restriction also applies to character devices like /dev/random
+ * (but obviously not /dev/tty.)
+ */
+ type = uv_guess_handle(fd);
+ if (type == UV_FILE || type == UV_UNKNOWN_HANDLE)
+ return -EINVAL;
+
flags = 0;
newfd = -1;
@@ -54,7 +63,7 @@ int uv_tty_init(uv_loop_t* loop, uv_tty_t* tty, int fd, int readable) {
* different struct file, hence changing its properties doesn't affect
* other processes.
*/
- if (isatty(fd)) {
+ if (type == UV_TTY) {
r = uv__open_cloexec("/dev/tty", O_RDWR);
if (r < 0) {
@@ -237,8 +246,10 @@ uv_handle_type uv_guess_handle(uv_file file) {
* critical section when the signal was raised.
*/
int uv_tty_reset_mode(void) {
+ int saved_errno;
int err;
+ saved_errno = errno;
if (!uv_spinlock_trylock(&termios_spinlock))
return -EBUSY; /* In uv_tty_set_mode(). */
@@ -248,5 +259,7 @@ int uv_tty_reset_mode(void) {
err = -errno;
uv_spinlock_unlock(&termios_spinlock);
+ errno = saved_errno;
+
return err;
}
diff --git a/deps/uv/src/unix/udp.c b/deps/uv/src/unix/udp.c
index 941c0aec6e2f0b..22c2e1388e14ac 100644
--- a/deps/uv/src/unix/udp.c
+++ b/deps/uv/src/unix/udp.c
@@ -601,40 +601,47 @@ int uv_udp_set_membership(uv_udp_t* handle,
}
}
-
-static int uv__setsockopt_maybe_char(uv_udp_t* handle,
- int option4,
- int option6,
- int val) {
+static int uv__setsockopt(uv_udp_t* handle,
+ int option4,
+ int option6,
+ const void* val,
+ size_t size) {
int r;
-#if defined(__sun) || defined(_AIX)
- char arg = val;
-#else
- int arg = val;
-#endif
-
- if (val < 0 || val > 255)
- return -EINVAL;
if (handle->flags & UV_HANDLE_IPV6)
r = setsockopt(handle->io_watcher.fd,
IPPROTO_IPV6,
option6,
- &arg,
- sizeof(arg));
+ val,
+ size);
else
r = setsockopt(handle->io_watcher.fd,
IPPROTO_IP,
option4,
- &arg,
- sizeof(arg));
-
+ val,
+ size);
if (r)
return -errno;
return 0;
}
+static int uv__setsockopt_maybe_char(uv_udp_t* handle,
+ int option4,
+ int option6,
+ int val) {
+#if defined(__sun) || defined(_AIX)
+ char arg = val;
+#else
+ int arg = val;
+#endif
+
+ if (val < 0 || val > 255)
+ return -EINVAL;
+
+ return uv__setsockopt(handle, option4, option6, &arg, sizeof(arg));
+}
+
int uv_udp_set_broadcast(uv_udp_t* handle, int on) {
if (setsockopt(handle->io_watcher.fd,
@@ -653,6 +660,20 @@ int uv_udp_set_ttl(uv_udp_t* handle, int ttl) {
if (ttl < 1 || ttl > 255)
return -EINVAL;
+/*
+ * On Solaris and derivatives such as SmartOS, the length of socket options
+ * is sizeof(int) for IP_TTL and IPV6_UNICAST_HOPS,
+ * so hardcode the size of these options on this platform,
+ * and use the general uv__setsockopt_maybe_char call on other platforms.
+ */
+#if defined(__sun)
+ return uv__setsockopt(handle,
+ IP_TTL,
+ IPV6_UNICAST_HOPS,
+ &ttl,
+ sizeof(ttl));
+#endif /* defined(__sun) */
+
return uv__setsockopt_maybe_char(handle,
IP_TTL,
IPV6_UNICAST_HOPS,
@@ -661,6 +682,21 @@ int uv_udp_set_ttl(uv_udp_t* handle, int ttl) {
int uv_udp_set_multicast_ttl(uv_udp_t* handle, int ttl) {
+/*
+ * On Solaris and derivatives such as SmartOS, the length of socket options
+ * is sizeof(int) for IPV6_MULTICAST_HOPS and sizeof(char) for
+ * IP_MULTICAST_TTL, so hardcode the size of the option in the IPv6 case,
+ * and use the general uv__setsockopt_maybe_char call otherwise.
+ */
+#if defined(__sun)
+ if (handle->flags & UV_HANDLE_IPV6)
+ return uv__setsockopt(handle,
+ IP_MULTICAST_TTL,
+ IPV6_MULTICAST_HOPS,
+ &ttl,
+ sizeof(ttl));
+#endif /* defined(__sun) */
+
return uv__setsockopt_maybe_char(handle,
IP_MULTICAST_TTL,
IPV6_MULTICAST_HOPS,
@@ -669,6 +705,21 @@ int uv_udp_set_multicast_ttl(uv_udp_t* handle, int ttl) {
int uv_udp_set_multicast_loop(uv_udp_t* handle, int on) {
+/*
+ * On Solaris and derivatives such as SmartOS, the length of socket options
+ * is sizeof(int) for IPV6_MULTICAST_LOOP and sizeof(char) for
+ * IP_MULTICAST_LOOP, so hardcode the size of the option in the IPv6 case,
+ * and use the general uv__setsockopt_maybe_char call otherwise.
+ */
+#if defined(__sun)
+ if (handle->flags & UV_HANDLE_IPV6)
+ return uv__setsockopt(handle,
+ IP_MULTICAST_LOOP,
+ IPV6_MULTICAST_LOOP,
+ &on,
+ sizeof(on));
+#endif /* defined(__sun) */
+
return uv__setsockopt_maybe_char(handle,
IP_MULTICAST_LOOP,
IPV6_MULTICAST_LOOP,
diff --git a/deps/uv/src/uv-common.c b/deps/uv/src/uv-common.c
index 791c09b4e28e77..02341f8b95d9c0 100644
--- a/deps/uv/src/uv-common.c
+++ b/deps/uv/src/uv-common.c
@@ -379,15 +379,28 @@ int uv_fs_event_getpath(uv_fs_event_t* handle, char* buffer, size_t* size) {
return 0;
}
+/* The windows implementation does not have the same structure layout as
+ * the unix implementation (nbufs is not directly inside req but is
+ * contained in a nested union/struct) so this function locates it.
+*/
+static unsigned int* uv__get_nbufs(uv_fs_t* req) {
+#ifdef _WIN32
+ return &req->fs.info.nbufs;
+#else
+ return &req->nbufs;
+#endif
+}
void uv__fs_scandir_cleanup(uv_fs_t* req) {
uv__dirent_t** dents;
+ unsigned int* nbufs = uv__get_nbufs(req);
+
dents = req->ptr;
- if (req->nbufs > 0 && req->nbufs != (unsigned int) req->result)
- req->nbufs--;
- for (; req->nbufs < (unsigned int) req->result; req->nbufs++)
- free(dents[req->nbufs]);
+ if (*nbufs > 0 && *nbufs != (unsigned int) req->result)
+ (*nbufs)--;
+ for (; *nbufs < (unsigned int) req->result; (*nbufs)++)
+ free(dents[*nbufs]);
}
@@ -395,20 +408,22 @@ int uv_fs_scandir_next(uv_fs_t* req, uv_dirent_t* ent) {
uv__dirent_t** dents;
uv__dirent_t* dent;
+ unsigned int* nbufs = uv__get_nbufs(req);
+
dents = req->ptr;
/* Free previous entity */
- if (req->nbufs > 0)
- free(dents[req->nbufs - 1]);
+ if (*nbufs > 0)
+ free(dents[*nbufs - 1]);
/* End was already reached */
- if (req->nbufs == (unsigned int) req->result) {
+ if (*nbufs == (unsigned int) req->result) {
free(dents);
req->ptr = NULL;
return UV_EOF;
}
- dent = dents[req->nbufs++];
+ dent = dents[(*nbufs)++];
ent->name = dent->d_name;
#ifdef HAVE_DIRENT_TYPES
@@ -522,6 +537,7 @@ void uv_loop_delete(uv_loop_t* loop) {
default_loop = default_loop_ptr;
err = uv_loop_close(loop);
+ (void) err; /* Squelch compiler warnings. */
assert(err == 0);
if (loop != default_loop)
free(loop);
diff --git a/deps/uv/src/win/core.c b/deps/uv/src/win/core.c
index a101159438341f..115449224f9651 100644
--- a/deps/uv/src/win/core.c
+++ b/deps/uv/src/win/core.c
@@ -22,7 +22,6 @@
#include
#include
#include
-#include
#include
#include
#include
diff --git a/deps/uv/src/win/error.c b/deps/uv/src/win/error.c
index 5c5514736e8304..a265a272dced19 100644
--- a/deps/uv/src/win/error.c
+++ b/deps/uv/src/win/error.c
@@ -21,7 +21,6 @@
#include
#include
-#include
#include
#include
#include
diff --git a/deps/uv/src/win/fs-event.c b/deps/uv/src/win/fs-event.c
index 7ad99a88b1ebef..640651b6c9634b 100644
--- a/deps/uv/src/win/fs-event.c
+++ b/deps/uv/src/win/fs-event.c
@@ -20,7 +20,6 @@
*/
#include
-#include
#include
#include
#include
@@ -39,7 +38,8 @@ static void uv_fs_event_queue_readdirchanges(uv_loop_t* loop,
assert(handle->dir_handle != INVALID_HANDLE_VALUE);
assert(!handle->req_pending);
- memset(&(handle->req.overlapped), 0, sizeof(handle->req.overlapped));
+ memset(&(handle->req.u.io.overlapped), 0,
+ sizeof(handle->req.u.io.overlapped));
if (!ReadDirectoryChangesW(handle->dir_handle,
handle->buffer,
uv_directory_watcher_buffer_size,
@@ -53,7 +53,7 @@ static void uv_fs_event_queue_readdirchanges(uv_loop_t* loop,
FILE_NOTIFY_CHANGE_CREATION |
FILE_NOTIFY_CHANGE_SECURITY,
NULL,
- &handle->req.overlapped,
+ &handle->req.u.io.overlapped,
NULL)) {
/* Make this req pending reporting an error. */
SET_REQ_ERROR(&handle->req, GetLastError());
@@ -232,7 +232,8 @@ int uv_fs_event_start(uv_fs_event_t* handle,
uv_fatal_error(ERROR_OUTOFMEMORY, "malloc");
}
- memset(&(handle->req.overlapped), 0, sizeof(handle->req.overlapped));
+ memset(&(handle->req.u.io.overlapped), 0,
+ sizeof(handle->req.u.io.overlapped));
if (!ReadDirectoryChangesW(handle->dir_handle,
handle->buffer,
@@ -247,7 +248,7 @@ int uv_fs_event_start(uv_fs_event_t* handle,
FILE_NOTIFY_CHANGE_CREATION |
FILE_NOTIFY_CHANGE_SECURITY,
NULL,
- &handle->req.overlapped,
+ &handle->req.u.io.overlapped,
NULL)) {
last_error = GetLastError();
goto error;
@@ -349,7 +350,7 @@ void uv_process_fs_event_req(uv_loop_t* loop, uv_req_t* req,
file_info = (FILE_NOTIFY_INFORMATION*)(handle->buffer + offset);
if (REQ_SUCCESS(req)) {
- if (req->overlapped.InternalHigh > 0) {
+ if (req->u.io.overlapped.InternalHigh > 0) {
do {
file_info = (FILE_NOTIFY_INFORMATION*)((char*)file_info + offset);
assert(!filename);
diff --git a/deps/uv/src/win/fs.c b/deps/uv/src/win/fs.c
index 33bc9da304054f..af7ec74276f995 100644
--- a/deps/uv/src/win/fs.c
+++ b/deps/uv/src/win/fs.c
@@ -21,7 +21,6 @@
#include
#include
-#include
#include
#include
#include
@@ -161,8 +160,8 @@ INLINE static int fs__capture_path(uv_loop_t* loop, uv_fs_t* req,
if (buf_sz == 0) {
- req->pathw = NULL;
- req->new_pathw = NULL;
+ req->file.pathw = NULL;
+ req->fs.info.new_pathw = NULL;
req->path = NULL;
return 0;
}
@@ -182,10 +181,10 @@ INLINE static int fs__capture_path(uv_loop_t* loop, uv_fs_t* req,
(WCHAR*) pos,
pathw_len);
assert(r == (DWORD) pathw_len);
- req->pathw = (WCHAR*) pos;
+ req->file.pathw = (WCHAR*) pos;
pos += r * sizeof(WCHAR);
} else {
- req->pathw = NULL;
+ req->file.pathw = NULL;
}
if (new_path != NULL) {
@@ -196,10 +195,10 @@ INLINE static int fs__capture_path(uv_loop_t* loop, uv_fs_t* req,
(WCHAR*) pos,
new_pathw_len);
assert(r == (DWORD) new_pathw_len);
- req->new_pathw = (WCHAR*) pos;
+ req->fs.info.new_pathw = (WCHAR*) pos;
pos += r * sizeof(WCHAR);
} else {
- req->new_pathw = NULL;
+ req->fs.info.new_pathw = NULL;
}
if (!copy_path) {
@@ -388,7 +387,7 @@ void fs__open(uv_fs_t* req) {
DWORD attributes = 0;
HANDLE file;
int fd, current_umask;
- int flags = req->file_flags;
+ int flags = req->fs.info.file_flags;
/* Obtain the active umask. umask() never fails and returns the previous */
/* umask. */
@@ -450,7 +449,7 @@ void fs__open(uv_fs_t* req) {
attributes |= FILE_ATTRIBUTE_NORMAL;
if (flags & _O_CREAT) {
- if (!((req->mode & ~current_umask) & _S_IWRITE)) {
+ if (!((req->fs.info.mode & ~current_umask) & _S_IWRITE)) {
attributes |= FILE_ATTRIBUTE_READONLY;
}
}
@@ -480,7 +479,7 @@ void fs__open(uv_fs_t* req) {
/* Setting this flag makes it possible to open a directory. */
attributes |= FILE_FLAG_BACKUP_SEMANTICS;
- file = CreateFileW(req->pathw,
+ file = CreateFileW(req->file.pathw,
access,
share,
NULL,
@@ -512,6 +511,7 @@ void fs__open(uv_fs_t* req) {
SET_REQ_WIN32_ERROR(req, GetLastError());
else
SET_REQ_WIN32_ERROR(req, UV_UNKNOWN);
+ CloseHandle(file);
return;
}
@@ -523,7 +523,7 @@ void fs__open(uv_fs_t* req) {
}
void fs__close(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
int result;
VERIFY_FD(fd, req);
@@ -534,8 +534,8 @@ void fs__close(uv_fs_t* req) {
void fs__read(uv_fs_t* req) {
- int fd = req->fd;
- int64_t offset = req->offset;
+ int fd = req->file.fd;
+ int64_t offset = req->fs.info.offset;
HANDLE handle;
OVERLAPPED overlapped, *overlapped_ptr;
LARGE_INTEGER offset_;
@@ -572,13 +572,13 @@ void fs__read(uv_fs_t* req) {
}
result = ReadFile(handle,
- req->bufs[index].base,
- req->bufs[index].len,
+ req->fs.info.bufs[index].base,
+ req->fs.info.bufs[index].len,
&incremental_bytes,
overlapped_ptr);
bytes += incremental_bytes;
++index;
- } while (result && index < req->nbufs);
+ } while (result && index < req->fs.info.nbufs);
if (result || bytes > 0) {
SET_REQ_RESULT(req, bytes);
@@ -594,8 +594,8 @@ void fs__read(uv_fs_t* req) {
void fs__write(uv_fs_t* req) {
- int fd = req->fd;
- int64_t offset = req->offset;
+ int fd = req->file.fd;
+ int64_t offset = req->fs.info.offset;
HANDLE handle;
OVERLAPPED overlapped, *overlapped_ptr;
LARGE_INTEGER offset_;
@@ -630,13 +630,13 @@ void fs__write(uv_fs_t* req) {
}
result = WriteFile(handle,
- req->bufs[index].base,
- req->bufs[index].len,
+ req->fs.info.bufs[index].base,
+ req->fs.info.bufs[index].len,
&incremental_bytes,
overlapped_ptr);
bytes += incremental_bytes;
++index;
- } while (result && index < req->nbufs);
+ } while (result && index < req->fs.info.nbufs);
if (result || bytes > 0) {
SET_REQ_RESULT(req, bytes);
@@ -647,13 +647,13 @@ void fs__write(uv_fs_t* req) {
void fs__rmdir(uv_fs_t* req) {
- int result = _wrmdir(req->pathw);
+ int result = _wrmdir(req->file.pathw);
SET_REQ_RESULT(req, result);
}
void fs__unlink(uv_fs_t* req) {
- const WCHAR* pathw = req->pathw;
+ const WCHAR* pathw = req->file.pathw;
HANDLE handle;
BY_HANDLE_FILE_INFORMATION info;
FILE_DISPOSITION_INFORMATION disposition;
@@ -661,7 +661,7 @@ void fs__unlink(uv_fs_t* req) {
NTSTATUS status;
handle = CreateFileW(pathw,
- FILE_READ_ATTRIBUTES | DELETE,
+ FILE_READ_ATTRIBUTES | FILE_WRITE_ATTRIBUTES | DELETE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
OPEN_EXISTING,
@@ -703,6 +703,24 @@ void fs__unlink(uv_fs_t* req) {
}
}
+ if (info.dwFileAttributes & FILE_ATTRIBUTE_READONLY) {
+ /* Remove read-only attribute */
+ FILE_BASIC_INFORMATION basic = { 0 };
+
+ basic.FileAttributes = info.dwFileAttributes & ~(FILE_ATTRIBUTE_READONLY);
+
+ status = pNtSetInformationFile(handle,
+ &iosb,
+ &basic,
+ sizeof basic,
+ FileBasicInformation);
+ if (!NT_SUCCESS(status)) {
+ SET_REQ_WIN32_ERROR(req, pRtlNtStatusToDosError(status));
+ CloseHandle(handle);
+ return;
+ }
+ }
+
/* Try to set the delete flag. */
disposition.DeleteFile = TRUE;
status = pNtSetInformationFile(handle,
@@ -722,7 +740,7 @@ void fs__unlink(uv_fs_t* req) {
void fs__mkdir(uv_fs_t* req) {
/* TODO: use req->mode. */
- int result = _wmkdir(req->pathw);
+ int result = _wmkdir(req->file.pathw);
SET_REQ_RESULT(req, result);
}
@@ -740,8 +758,8 @@ void fs__mkdtemp(uv_fs_t* req) {
uint64_t v;
BOOL released;
- len = wcslen(req->pathw);
- ep = req->pathw + len;
+ len = wcslen(req->file.pathw);
+ ep = req->file.pathw + len;
if (len < num_x || wcsncmp(ep - num_x, L"XXXXXX", num_x)) {
SET_REQ_UV_ERROR(req, UV_EINVAL, ERROR_INVALID_PARAMETER);
return;
@@ -766,7 +784,7 @@ void fs__mkdtemp(uv_fs_t* req) {
v /= num_chars;
}
- if (_wmkdir(req->pathw) == 0) {
+ if (_wmkdir(req->file.pathw) == 0) {
len = strlen(req->path);
wcstombs((char*) req->path + len - num_x, ep - num_x, num_x);
SET_REQ_RESULT(req, 0);
@@ -810,7 +828,7 @@ void fs__scandir(uv_fs_t* req) {
/* Open the directory. */
dir_handle =
- CreateFileW(req->pathw,
+ CreateFileW(req->file.pathw,
FILE_LIST_DIRECTORY | SYNCHRONIZE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
@@ -956,7 +974,7 @@ void fs__scandir(uv_fs_t* req) {
SET_REQ_RESULT(req, dirents_used);
/* `nbufs` will be used as index by uv_fs_scandir_next. */
- req->nbufs = 0;
+ req->fs.info.nbufs = 0;
return;
@@ -1125,7 +1143,7 @@ INLINE static void fs__stat_impl(uv_fs_t* req, int do_lstat) {
flags |= FILE_FLAG_OPEN_REPARSE_POINT;
}
- handle = CreateFileW(req->pathw,
+ handle = CreateFileW(req->file.pathw,
FILE_READ_ATTRIBUTES,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
@@ -1159,19 +1177,19 @@ INLINE static void fs__stat_impl(uv_fs_t* req, int do_lstat) {
static void fs__stat(uv_fs_t* req) {
- fs__stat_prepare_path(req->pathw);
+ fs__stat_prepare_path(req->file.pathw);
fs__stat_impl(req, 0);
}
static void fs__lstat(uv_fs_t* req) {
- fs__stat_prepare_path(req->pathw);
+ fs__stat_prepare_path(req->file.pathw);
fs__stat_impl(req, 1);
}
static void fs__fstat(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
HANDLE handle;
VERIFY_FD(fd, req);
@@ -1194,7 +1212,7 @@ static void fs__fstat(uv_fs_t* req) {
static void fs__rename(uv_fs_t* req) {
- if (!MoveFileExW(req->pathw, req->new_pathw, MOVEFILE_REPLACE_EXISTING)) {
+ if (!MoveFileExW(req->file.pathw, req->fs.info.new_pathw, MOVEFILE_REPLACE_EXISTING)) {
SET_REQ_WIN32_ERROR(req, GetLastError());
return;
}
@@ -1204,7 +1222,7 @@ static void fs__rename(uv_fs_t* req) {
INLINE static void fs__sync_impl(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
int result;
VERIFY_FD(fd, req);
@@ -1229,7 +1247,7 @@ static void fs__fdatasync(uv_fs_t* req) {
static void fs__ftruncate(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
HANDLE handle;
NTSTATUS status;
IO_STATUS_BLOCK io_status;
@@ -1239,7 +1257,7 @@ static void fs__ftruncate(uv_fs_t* req) {
handle = uv__get_osfhandle(fd);
- eof_info.EndOfFile.QuadPart = req->offset;
+ eof_info.EndOfFile.QuadPart = req->fs.info.offset;
status = pNtSetInformationFile(handle,
&io_status,
@@ -1256,9 +1274,9 @@ static void fs__ftruncate(uv_fs_t* req) {
static void fs__sendfile(uv_fs_t* req) {
- int fd_in = req->fd, fd_out = req->fd_out;
- size_t length = req->bufsml[0].len;
- int64_t offset = req->offset;
+ int fd_in = req->file.fd, fd_out = req->fs.info.fd_out;
+ size_t length = req->fs.info.bufsml[0].len;
+ int64_t offset = req->fs.info.offset;
const size_t max_buf_size = 65536;
size_t buf_size = length < max_buf_size ? length : max_buf_size;
int n, result = 0;
@@ -1303,32 +1321,39 @@ static void fs__sendfile(uv_fs_t* req) {
static void fs__access(uv_fs_t* req) {
- DWORD attr = GetFileAttributesW(req->pathw);
+ DWORD attr = GetFileAttributesW(req->file.pathw);
if (attr == INVALID_FILE_ATTRIBUTES) {
SET_REQ_WIN32_ERROR(req, GetLastError());
return;
}
- if ((req->flags & W_OK) &&
- ((attr & FILE_ATTRIBUTE_READONLY) ||
- (attr & FILE_ATTRIBUTE_DIRECTORY))) {
+ /*
+ * Access is possible if
+ * - write access wasn't requested,
+ * - or the file isn't read-only,
+ * - or it's a directory.
+ * (Directories cannot be read-only on Windows.)
+ */
+ if (!(req->flags & W_OK) ||
+ !(attr & FILE_ATTRIBUTE_READONLY) ||
+ (attr & FILE_ATTRIBUTE_DIRECTORY)) {
+ SET_REQ_RESULT(req, 0);
+ } else {
SET_REQ_WIN32_ERROR(req, UV_EPERM);
- return;
}
- SET_REQ_RESULT(req, 0);
}
static void fs__chmod(uv_fs_t* req) {
- int result = _wchmod(req->pathw, req->mode);
+ int result = _wchmod(req->file.pathw, req->fs.info.mode);
SET_REQ_RESULT(req, result);
}
static void fs__fchmod(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
HANDLE handle;
NTSTATUS nt_status;
IO_STATUS_BLOCK io_status;
@@ -1349,7 +1374,7 @@ static void fs__fchmod(uv_fs_t* req) {
return;
}
- if (req->mode & _S_IWRITE) {
+ if (req->fs.info.mode & _S_IWRITE) {
file_info.FileAttributes &= ~FILE_ATTRIBUTE_READONLY;
} else {
file_info.FileAttributes |= FILE_ATTRIBUTE_READONLY;
@@ -1387,7 +1412,7 @@ INLINE static int fs__utime_handle(HANDLE handle, double atime, double mtime) {
static void fs__utime(uv_fs_t* req) {
HANDLE handle;
- handle = CreateFileW(req->pathw,
+ handle = CreateFileW(req->file.pathw,
FILE_WRITE_ATTRIBUTES,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
@@ -1400,7 +1425,7 @@ static void fs__utime(uv_fs_t* req) {
return;
}
- if (fs__utime_handle(handle, req->atime, req->mtime) != 0) {
+ if (fs__utime_handle(handle, req->fs.time.atime, req->fs.time.mtime) != 0) {
SET_REQ_WIN32_ERROR(req, GetLastError());
CloseHandle(handle);
return;
@@ -1413,7 +1438,7 @@ static void fs__utime(uv_fs_t* req) {
static void fs__futime(uv_fs_t* req) {
- int fd = req->fd;
+ int fd = req->file.fd;
HANDLE handle;
VERIFY_FD(fd, req);
@@ -1424,7 +1449,7 @@ static void fs__futime(uv_fs_t* req) {
return;
}
- if (fs__utime_handle(handle, req->atime, req->mtime) != 0) {
+ if (fs__utime_handle(handle, req->fs.time.atime, req->fs.time.mtime) != 0) {
SET_REQ_WIN32_ERROR(req, GetLastError());
return;
}
@@ -1434,7 +1459,7 @@ static void fs__futime(uv_fs_t* req) {
static void fs__link(uv_fs_t* req) {
- DWORD r = CreateHardLinkW(req->new_pathw, req->pathw, NULL);
+ DWORD r = CreateHardLinkW(req->fs.info.new_pathw, req->file.pathw, NULL);
if (r == 0) {
SET_REQ_WIN32_ERROR(req, GetLastError());
} else {
@@ -1614,9 +1639,9 @@ static void fs__create_junction(uv_fs_t* req, const WCHAR* path,
static void fs__symlink(uv_fs_t* req) {
- WCHAR* pathw = req->pathw;
- WCHAR* new_pathw = req->new_pathw;
- int flags = req->file_flags;
+ WCHAR* pathw = req->file.pathw;
+ WCHAR* new_pathw = req->fs.info.new_pathw;
+ int flags = req->fs.info.file_flags;
int result;
@@ -1640,7 +1665,7 @@ static void fs__symlink(uv_fs_t* req) {
static void fs__readlink(uv_fs_t* req) {
HANDLE handle;
- handle = CreateFileW(req->pathw,
+ handle = CreateFileW(req->file.pathw,
0,
0,
NULL,
@@ -1739,14 +1764,14 @@ void uv_fs_req_cleanup(uv_fs_t* req) {
return;
if (req->flags & UV_FS_FREE_PATHS)
- free(req->pathw);
+ free(req->file.pathw);
if (req->flags & UV_FS_FREE_PTR)
free(req->ptr);
req->path = NULL;
- req->pathw = NULL;
- req->new_pathw = NULL;
+ req->file.pathw = NULL;
+ req->fs.info.new_pathw = NULL;
req->ptr = NULL;
req->flags |= UV_FS_CLEANEDUP;
@@ -1764,8 +1789,8 @@ int uv_fs_open(uv_loop_t* loop, uv_fs_t* req, const char* path, int flags,
return uv_translate_sys_error(err);
}
- req->file_flags = flags;
- req->mode = mode;
+ req->fs.info.file_flags = flags;
+ req->fs.info.mode = mode;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1779,7 +1804,7 @@ int uv_fs_open(uv_loop_t* loop, uv_fs_t* req, const char* path, int flags,
int uv_fs_close(uv_loop_t* loop, uv_fs_t* req, uv_file fd, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_CLOSE, cb);
- req->fd = fd;
+ req->file.fd = fd;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1800,19 +1825,19 @@ int uv_fs_read(uv_loop_t* loop,
uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_READ, cb);
- req->fd = fd;
+ req->file.fd = fd;
- req->nbufs = nbufs;
- req->bufs = req->bufsml;
- if (nbufs > ARRAY_SIZE(req->bufsml))
- req->bufs = malloc(nbufs * sizeof(*bufs));
+ req->fs.info.nbufs = nbufs;
+ req->fs.info.bufs = req->fs.info.bufsml;
+ if (nbufs > ARRAY_SIZE(req->fs.info.bufsml))
+ req->fs.info.bufs = malloc(nbufs * sizeof(*bufs));
- if (req->bufs == NULL)
+ if (req->fs.info.bufs == NULL)
return UV_ENOMEM;
- memcpy(req->bufs, bufs, nbufs * sizeof(*bufs));
+ memcpy(req->fs.info.bufs, bufs, nbufs * sizeof(*bufs));
- req->offset = offset;
+ req->fs.info.offset = offset;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1833,19 +1858,19 @@ int uv_fs_write(uv_loop_t* loop,
uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_WRITE, cb);
- req->fd = fd;
+ req->file.fd = fd;
- req->nbufs = nbufs;
- req->bufs = req->bufsml;
- if (nbufs > ARRAY_SIZE(req->bufsml))
- req->bufs = malloc(nbufs * sizeof(*bufs));
+ req->fs.info.nbufs = nbufs;
+ req->fs.info.bufs = req->fs.info.bufsml;
+ if (nbufs > ARRAY_SIZE(req->fs.info.bufsml))
+ req->fs.info.bufs = malloc(nbufs * sizeof(*bufs));
- if (req->bufs == NULL)
+ if (req->fs.info.bufs == NULL)
return UV_ENOMEM;
- memcpy(req->bufs, bufs, nbufs * sizeof(*bufs));
+ memcpy(req->fs.info.bufs, bufs, nbufs * sizeof(*bufs));
- req->offset = offset;
+ req->fs.info.offset = offset;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1889,7 +1914,7 @@ int uv_fs_mkdir(uv_loop_t* loop, uv_fs_t* req, const char* path, int mode,
return uv_translate_sys_error(err);
}
- req->mode = mode;
+ req->fs.info.mode = mode;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1952,7 +1977,7 @@ int uv_fs_scandir(uv_loop_t* loop, uv_fs_t* req, const char* path, int flags,
return uv_translate_sys_error(err);
}
- req->file_flags = flags;
+ req->fs.info.file_flags = flags;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -1996,7 +2021,7 @@ int uv_fs_symlink(uv_loop_t* loop, uv_fs_t* req, const char* path,
return uv_translate_sys_error(err);
}
- req->file_flags = flags;
+ req->fs.info.file_flags = flags;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2106,7 +2131,7 @@ int uv_fs_lstat(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb) {
int uv_fs_fstat(uv_loop_t* loop, uv_fs_t* req, uv_file fd, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FSTAT, cb);
- req->fd = fd;
+ req->file.fd = fd;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2141,7 +2166,7 @@ int uv_fs_rename(uv_loop_t* loop, uv_fs_t* req, const char* path,
int uv_fs_fsync(uv_loop_t* loop, uv_fs_t* req, uv_file fd, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FSYNC, cb);
- req->fd = fd;
+ req->file.fd = fd;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2155,7 +2180,7 @@ int uv_fs_fsync(uv_loop_t* loop, uv_fs_t* req, uv_file fd, uv_fs_cb cb) {
int uv_fs_fdatasync(uv_loop_t* loop, uv_fs_t* req, uv_file fd, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FDATASYNC, cb);
- req->fd = fd;
+ req->file.fd = fd;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2171,8 +2196,8 @@ int uv_fs_ftruncate(uv_loop_t* loop, uv_fs_t* req, uv_file fd,
int64_t offset, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FTRUNCATE, cb);
- req->fd = fd;
- req->offset = offset;
+ req->file.fd = fd;
+ req->fs.info.offset = offset;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2189,10 +2214,10 @@ int uv_fs_sendfile(uv_loop_t* loop, uv_fs_t* req, uv_file fd_out,
uv_file fd_in, int64_t in_offset, size_t length, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_SENDFILE, cb);
- req->fd = fd_in;
- req->fd_out = fd_out;
- req->offset = in_offset;
- req->bufsml[0].len = length;
+ req->file.fd = fd_in;
+ req->fs.info.fd_out = fd_out;
+ req->fs.info.offset = in_offset;
+ req->fs.info.bufsml[0].len = length;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2240,7 +2265,7 @@ int uv_fs_chmod(uv_loop_t* loop, uv_fs_t* req, const char* path, int mode,
return uv_translate_sys_error(err);
}
- req->mode = mode;
+ req->fs.info.mode = mode;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2256,8 +2281,8 @@ int uv_fs_fchmod(uv_loop_t* loop, uv_fs_t* req, uv_file fd, int mode,
uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FCHMOD, cb);
- req->fd = fd;
- req->mode = mode;
+ req->file.fd = fd;
+ req->fs.info.mode = mode;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2280,8 +2305,8 @@ int uv_fs_utime(uv_loop_t* loop, uv_fs_t* req, const char* path, double atime,
return uv_translate_sys_error(err);
}
- req->atime = atime;
- req->mtime = mtime;
+ req->fs.time.atime = atime;
+ req->fs.time.mtime = mtime;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
@@ -2297,9 +2322,9 @@ int uv_fs_futime(uv_loop_t* loop, uv_fs_t* req, uv_file fd, double atime,
double mtime, uv_fs_cb cb) {
uv_fs_req_init(loop, req, UV_FS_FUTIME, cb);
- req->fd = fd;
- req->atime = atime;
- req->mtime = mtime;
+ req->file.fd = fd;
+ req->fs.time.atime = atime;
+ req->fs.time.mtime = mtime;
if (cb) {
QUEUE_FS_TP_JOB(loop, req);
diff --git a/deps/uv/src/win/getaddrinfo.c b/deps/uv/src/win/getaddrinfo.c
index f103f5fbd3418a..f3802cd5829208 100644
--- a/deps/uv/src/win/getaddrinfo.c
+++ b/deps/uv/src/win/getaddrinfo.c
@@ -20,7 +20,6 @@
*/
#include
-#include
#include "uv.h"
#include "internal.h"
diff --git a/deps/uv/src/win/getnameinfo.c b/deps/uv/src/win/getnameinfo.c
index b1d045c79bd9db..66b64b883248e3 100644
--- a/deps/uv/src/win/getnameinfo.c
+++ b/deps/uv/src/win/getnameinfo.c
@@ -20,7 +20,6 @@
*/
#include
-#include
#include
#include "uv.h"
diff --git a/deps/uv/src/win/pipe.c b/deps/uv/src/win/pipe.c
index 57fab065aa346b..5a0e5420847b12 100644
--- a/deps/uv/src/win/pipe.c
+++ b/deps/uv/src/win/pipe.c
@@ -95,15 +95,15 @@ int uv_pipe_init(uv_loop_t* loop, uv_pipe_t* handle, int ipc) {
handle->reqs_pending = 0;
handle->handle = INVALID_HANDLE_VALUE;
handle->name = NULL;
- handle->ipc_pid = 0;
- handle->remaining_ipc_rawdata_bytes = 0;
- QUEUE_INIT(&handle->pending_ipc_info.queue);
- handle->pending_ipc_info.queue_len = 0;
+ handle->pipe.conn.ipc_pid = 0;
+ handle->pipe.conn.remaining_ipc_rawdata_bytes = 0;
+ QUEUE_INIT(&handle->pipe.conn.pending_ipc_info.queue);
+ handle->pipe.conn.pending_ipc_info.queue_len = 0;
handle->ipc = ipc;
- handle->non_overlapped_writes_tail = NULL;
- handle->readfile_thread = NULL;
+ handle->pipe.conn.non_overlapped_writes_tail = NULL;
+ handle->pipe.conn.readfile_thread = NULL;
- uv_req_init(loop, (uv_req_t*) &handle->ipc_header_write_req);
+ uv_req_init(loop, (uv_req_t*) &handle->pipe.conn.ipc_header_write_req);
return 0;
}
@@ -112,11 +112,11 @@ int uv_pipe_init(uv_loop_t* loop, uv_pipe_t* handle, int ipc) {
static void uv_pipe_connection_init(uv_pipe_t* handle) {
uv_connection_init((uv_stream_t*) handle);
handle->read_req.data = handle;
- handle->eof_timer = NULL;
+ handle->pipe.conn.eof_timer = NULL;
assert(!(handle->flags & UV_HANDLE_PIPESERVER));
if (pCancelSynchronousIo &&
handle->flags & UV_HANDLE_NON_OVERLAPPED_PIPE) {
- uv_mutex_init(&handle->readfile_mutex);
+ uv_mutex_init(&handle->pipe.conn.readfile_mutex);
handle->flags |= UV_HANDLE_PIPE_READ_CANCELABLE;
}
}
@@ -330,16 +330,16 @@ void uv_pipe_endgame(uv_loop_t* loop, uv_pipe_t* handle) {
if (handle->flags & UV_HANDLE_PIPE_READ_CANCELABLE) {
handle->flags &= ~UV_HANDLE_PIPE_READ_CANCELABLE;
- uv_mutex_destroy(&handle->readfile_mutex);
+ uv_mutex_destroy(&handle->pipe.conn.readfile_mutex);
}
if ((handle->flags & UV_HANDLE_CONNECTION) &&
- handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
- req = handle->shutdown_req;
+ handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
+ req = handle->stream.conn.shutdown_req;
/* Clear the shutdown_req field so we don't go here again. */
- handle->shutdown_req = NULL;
+ handle->stream.conn.shutdown_req = NULL;
if (handle->flags & UV__HANDLE_CLOSING) {
UNREGISTER_HANDLE_REQ(loop, handle, req);
@@ -408,11 +408,11 @@ void uv_pipe_endgame(uv_loop_t* loop, uv_pipe_t* handle) {
if (handle->flags & UV_HANDLE_CONNECTION) {
/* Free pending sockets */
- while (!QUEUE_EMPTY(&handle->pending_ipc_info.queue)) {
+ while (!QUEUE_EMPTY(&handle->pipe.conn.pending_ipc_info.queue)) {
QUEUE* q;
SOCKET socket;
- q = QUEUE_HEAD(&handle->pending_ipc_info.queue);
+ q = QUEUE_HEAD(&handle->pipe.conn.pending_ipc_info.queue);
QUEUE_REMOVE(q);
item = QUEUE_DATA(q, uv__ipc_queue_item_t, member);
@@ -428,7 +428,7 @@ void uv_pipe_endgame(uv_loop_t* loop, uv_pipe_t* handle) {
if (socket != INVALID_SOCKET)
closesocket(socket);
}
- handle->pending_ipc_info.queue_len = 0;
+ handle->pipe.conn.pending_ipc_info.queue_len = 0;
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
if (handle->read_req.wait_handle != INVALID_HANDLE_VALUE) {
@@ -443,9 +443,9 @@ void uv_pipe_endgame(uv_loop_t* loop, uv_pipe_t* handle) {
}
if (handle->flags & UV_HANDLE_PIPESERVER) {
- assert(handle->accept_reqs);
- free(handle->accept_reqs);
- handle->accept_reqs = NULL;
+ assert(handle->pipe.serv.accept_reqs);
+ free(handle->pipe.serv.accept_reqs);
+ handle->pipe.serv.accept_reqs = NULL;
}
uv__handle_close(handle);
@@ -454,7 +454,7 @@ void uv_pipe_endgame(uv_loop_t* loop, uv_pipe_t* handle) {
void uv_pipe_pending_instances(uv_pipe_t* handle, int count) {
- handle->pending_instances = count;
+ handle->pipe.serv.pending_instances = count;
handle->flags |= UV_HANDLE_PIPESERVER;
}
@@ -474,17 +474,17 @@ int uv_pipe_bind(uv_pipe_t* handle, const char* name) {
}
if (!(handle->flags & UV_HANDLE_PIPESERVER)) {
- handle->pending_instances = default_pending_pipe_instances;
+ handle->pipe.serv.pending_instances = default_pending_pipe_instances;
}
- handle->accept_reqs = (uv_pipe_accept_t*)
- malloc(sizeof(uv_pipe_accept_t) * handle->pending_instances);
- if (!handle->accept_reqs) {
+ handle->pipe.serv.accept_reqs = (uv_pipe_accept_t*)
+ malloc(sizeof(uv_pipe_accept_t) * handle->pipe.serv.pending_instances);
+ if (!handle->pipe.serv.accept_reqs) {
uv_fatal_error(ERROR_OUTOFMEMORY, "malloc");
}
- for (i = 0; i < handle->pending_instances; i++) {
- req = &handle->accept_reqs[i];
+ for (i = 0; i < handle->pipe.serv.pending_instances; i++) {
+ req = &handle->pipe.serv.accept_reqs[i];
uv_req_init(loop, (uv_req_t*) req);
req->type = UV_ACCEPT;
req->data = handle;
@@ -508,13 +508,13 @@ int uv_pipe_bind(uv_pipe_t* handle, const char* name) {
* Attempt to create the first pipe with FILE_FLAG_FIRST_PIPE_INSTANCE.
* If this fails then there's already a pipe server for the given pipe name.
*/
- handle->accept_reqs[0].pipeHandle = CreateNamedPipeW(handle->name,
+ handle->pipe.serv.accept_reqs[0].pipeHandle = CreateNamedPipeW(handle->name,
PIPE_ACCESS_DUPLEX | FILE_FLAG_OVERLAPPED |
FILE_FLAG_FIRST_PIPE_INSTANCE,
PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_WAIT,
PIPE_UNLIMITED_INSTANCES, 65536, 65536, 0, NULL);
- if (handle->accept_reqs[0].pipeHandle == INVALID_HANDLE_VALUE) {
+ if (handle->pipe.serv.accept_reqs[0].pipeHandle == INVALID_HANDLE_VALUE) {
err = GetLastError();
if (err == ERROR_ACCESS_DENIED) {
err = WSAEADDRINUSE; /* Translates to UV_EADDRINUSE. */
@@ -524,12 +524,15 @@ int uv_pipe_bind(uv_pipe_t* handle, const char* name) {
goto error;
}
- if (uv_set_pipe_handle(loop, handle, handle->accept_reqs[0].pipeHandle, 0)) {
+ if (uv_set_pipe_handle(loop,
+ handle,
+ handle->pipe.serv.accept_reqs[0].pipeHandle,
+ 0)) {
err = GetLastError();
goto error;
}
- handle->pending_accepts = NULL;
+ handle->pipe.serv.pending_accepts = NULL;
handle->flags |= UV_HANDLE_PIPESERVER;
handle->flags |= UV_HANDLE_BOUND;
@@ -541,9 +544,9 @@ int uv_pipe_bind(uv_pipe_t* handle, const char* name) {
handle->name = NULL;
}
- if (handle->accept_reqs[0].pipeHandle != INVALID_HANDLE_VALUE) {
- CloseHandle(handle->accept_reqs[0].pipeHandle);
- handle->accept_reqs[0].pipeHandle = INVALID_HANDLE_VALUE;
+ if (handle->pipe.serv.accept_reqs[0].pipeHandle != INVALID_HANDLE_VALUE) {
+ CloseHandle(handle->pipe.serv.accept_reqs[0].pipeHandle);
+ handle->pipe.serv.accept_reqs[0].pipeHandle = INVALID_HANDLE_VALUE;
}
return uv_translate_sys_error(err);
@@ -677,15 +680,15 @@ void uv__pipe_pause_read(uv_pipe_t* handle) {
any access to a NamedPipe to deadlock if
any process has called ReadFile */
HANDLE h;
- uv_mutex_lock(&handle->readfile_mutex);
- h = handle->readfile_thread;
+ uv_mutex_lock(&handle->pipe.conn.readfile_mutex);
+ h = handle->pipe.conn.readfile_thread;
while (h) {
/* spinlock: we expect this to finish quickly,
or we are probably about to deadlock anyways
(in the kernel), so it doesn't matter */
pCancelSynchronousIo(h);
SwitchToThread(); /* yield thread control briefly */
- h = handle->readfile_thread;
+ h = handle->pipe.conn.readfile_thread;
}
}
}
@@ -693,7 +696,7 @@ void uv__pipe_pause_read(uv_pipe_t* handle) {
void uv__pipe_unpause_read(uv_pipe_t* handle) {
if (handle->flags & UV_HANDLE_PIPE_READ_CANCELABLE) {
- uv_mutex_unlock(&handle->readfile_mutex);
+ uv_mutex_unlock(&handle->pipe.conn.readfile_mutex);
}
}
@@ -719,11 +722,11 @@ void uv_pipe_cleanup(uv_loop_t* loop, uv_pipe_t* handle) {
}
if (handle->flags & UV_HANDLE_PIPESERVER) {
- for (i = 0; i < handle->pending_instances; i++) {
- pipeHandle = handle->accept_reqs[i].pipeHandle;
+ for (i = 0; i < handle->pipe.serv.pending_instances; i++) {
+ pipeHandle = handle->pipe.serv.accept_reqs[i].pipeHandle;
if (pipeHandle != INVALID_HANDLE_VALUE) {
CloseHandle(pipeHandle);
- handle->accept_reqs[i].pipeHandle = INVALID_HANDLE_VALUE;
+ handle->pipe.serv.accept_reqs[i].pipeHandle = INVALID_HANDLE_VALUE;
}
}
}
@@ -796,9 +799,9 @@ static void uv_pipe_queue_accept(uv_loop_t* loop, uv_pipe_t* handle,
assert(req->pipeHandle != INVALID_HANDLE_VALUE);
/* Prepare the overlapped structure. */
- memset(&(req->overlapped), 0, sizeof(req->overlapped));
+ memset(&(req->u.io.overlapped), 0, sizeof(req->u.io.overlapped));
- if (!ConnectNamedPipe(req->pipeHandle, &req->overlapped) &&
+ if (!ConnectNamedPipe(req->pipeHandle, &req->u.io.overlapped) &&
GetLastError() != ERROR_IO_PENDING) {
if (GetLastError() == ERROR_PIPE_CONNECTED) {
SET_REQ_SUCCESS(req);
@@ -826,14 +829,14 @@ int uv_pipe_accept(uv_pipe_t* server, uv_stream_t* client) {
int err;
if (server->ipc) {
- if (QUEUE_EMPTY(&server->pending_ipc_info.queue)) {
+ if (QUEUE_EMPTY(&server->pipe.conn.pending_ipc_info.queue)) {
/* No valid pending sockets. */
return WSAEWOULDBLOCK;
}
- q = QUEUE_HEAD(&server->pending_ipc_info.queue);
+ q = QUEUE_HEAD(&server->pipe.conn.pending_ipc_info.queue);
QUEUE_REMOVE(q);
- server->pending_ipc_info.queue_len--;
+ server->pipe.conn.pending_ipc_info.queue_len--;
item = QUEUE_DATA(q, uv__ipc_queue_item_t, member);
err = uv_tcp_import((uv_tcp_t*)client,
@@ -849,7 +852,7 @@ int uv_pipe_accept(uv_pipe_t* server, uv_stream_t* client) {
/* Find a connection instance that has been connected, but not yet */
/* accepted. */
- req = server->pending_accepts;
+ req = server->pipe.serv.pending_accepts;
if (!req) {
/* No valid connections found, so we error out. */
@@ -862,7 +865,7 @@ int uv_pipe_accept(uv_pipe_t* server, uv_stream_t* client) {
pipe_client->flags |= UV_HANDLE_READABLE | UV_HANDLE_WRITABLE;
/* Prepare the req to pick up a new connection */
- server->pending_accepts = req->next_pending;
+ server->pipe.serv.pending_accepts = req->next_pending;
req->next_pending = NULL;
req->pipeHandle = INVALID_HANDLE_VALUE;
@@ -881,7 +884,7 @@ int uv_pipe_listen(uv_pipe_t* handle, int backlog, uv_connection_cb cb) {
int i;
if (handle->flags & UV_HANDLE_LISTENING) {
- handle->connection_cb = cb;
+ handle->stream.serv.connection_cb = cb;
}
if (!(handle->flags & UV_HANDLE_BOUND)) {
@@ -898,13 +901,13 @@ int uv_pipe_listen(uv_pipe_t* handle, int backlog, uv_connection_cb cb) {
handle->flags |= UV_HANDLE_LISTENING;
INCREASE_ACTIVE_COUNT(loop, handle);
- handle->connection_cb = cb;
+ handle->stream.serv.connection_cb = cb;
/* First pipe handle should have already been created in uv_pipe_bind */
- assert(handle->accept_reqs[0].pipeHandle != INVALID_HANDLE_VALUE);
+ assert(handle->pipe.serv.accept_reqs[0].pipeHandle != INVALID_HANDLE_VALUE);
- for (i = 0; i < handle->pending_instances; i++) {
- uv_pipe_queue_accept(loop, handle, &handle->accept_reqs[i], i == 0);
+ for (i = 0; i < handle->pipe.serv.pending_instances; i++) {
+ uv_pipe_queue_accept(loop, handle, &handle->pipe.serv.accept_reqs[i], i == 0);
}
return 0;
@@ -919,7 +922,7 @@ static DWORD WINAPI uv_pipe_zero_readfile_thread_proc(void* parameter) {
uv_loop_t* loop = handle->loop;
HANDLE hThread = NULL;
DWORD err;
- uv_mutex_t *m = &handle->readfile_mutex;
+ uv_mutex_t *m = &handle->pipe.conn.readfile_mutex;
assert(req != NULL);
assert(req->type == UV_READ);
@@ -930,7 +933,7 @@ static DWORD WINAPI uv_pipe_zero_readfile_thread_proc(void* parameter) {
if (DuplicateHandle(GetCurrentProcess(), GetCurrentThread(),
GetCurrentProcess(), &hThread,
0, TRUE, DUPLICATE_SAME_ACCESS)) {
- handle->readfile_thread = hThread;
+ handle->pipe.conn.readfile_thread = hThread;
} else {
hThread = NULL;
}
@@ -948,10 +951,10 @@ static DWORD WINAPI uv_pipe_zero_readfile_thread_proc(void* parameter) {
handle->flags & UV_HANDLE_PIPE_READ_CANCELABLE) {
if (handle->flags & UV_HANDLE_READING) {
/* just a brief break to do something else */
- handle->readfile_thread = NULL;
+ handle->pipe.conn.readfile_thread = NULL;
/* resume after it is finished */
uv_mutex_lock(m);
- handle->readfile_thread = hThread;
+ handle->pipe.conn.readfile_thread = hThread;
uv_mutex_unlock(m);
goto restart_readfile;
} else {
@@ -960,9 +963,9 @@ static DWORD WINAPI uv_pipe_zero_readfile_thread_proc(void* parameter) {
}
}
if (hThread) {
- assert(hThread == handle->readfile_thread);
+ assert(hThread == handle->pipe.conn.readfile_thread);
/* mutex does not control clearing readfile_thread */
- handle->readfile_thread = NULL;
+ handle->pipe.conn.readfile_thread = NULL;
uv_mutex_lock(m);
/* only when we hold the mutex lock is it safe to
open or close the handle */
@@ -1017,9 +1020,9 @@ static void CALLBACK post_completion_read_wait(void* context, BOOLEAN timed_out)
assert(!timed_out);
if (!PostQueuedCompletionStatus(handle->loop->iocp,
- req->overlapped.InternalHigh,
+ req->u.io.overlapped.InternalHigh,
0,
- &req->overlapped)) {
+ &req->u.io.overlapped)) {
uv_fatal_error(GetLastError(), "PostQueuedCompletionStatus");
}
}
@@ -1036,9 +1039,9 @@ static void CALLBACK post_completion_write_wait(void* context, BOOLEAN timed_out
assert(!timed_out);
if (!PostQueuedCompletionStatus(handle->loop->iocp,
- req->overlapped.InternalHigh,
+ req->u.io.overlapped.InternalHigh,
0,
- &req->overlapped)) {
+ &req->u.io.overlapped)) {
uv_fatal_error(GetLastError(), "PostQueuedCompletionStatus");
}
}
@@ -1064,9 +1067,9 @@ static void uv_pipe_queue_read(uv_loop_t* loop, uv_pipe_t* handle) {
goto error;
}
} else {
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
- req->overlapped.hEvent = (HANDLE) ((uintptr_t) req->event_handle | 1);
+ req->u.io.overlapped.hEvent = (HANDLE) ((uintptr_t) req->event_handle | 1);
}
/* Do 0-read */
@@ -1074,7 +1077,7 @@ static void uv_pipe_queue_read(uv_loop_t* loop, uv_pipe_t* handle) {
&uv_zero_,
0,
NULL,
- &req->overlapped);
+ &req->u.io.overlapped);
if (!result && GetLastError() != ERROR_IO_PENDING) {
/* Make this req pending reporting an error. */
@@ -1091,7 +1094,7 @@ static void uv_pipe_queue_read(uv_loop_t* loop, uv_pipe_t* handle) {
}
if (req->wait_handle == INVALID_HANDLE_VALUE) {
if (!RegisterWaitForSingleObject(&req->wait_handle,
- req->overlapped.hEvent, post_completion_read_wait, (void*) req,
+ req->u.io.overlapped.hEvent, post_completion_read_wait, (void*) req,
INFINITE, WT_EXECUTEINWAITTHREAD)) {
SET_REQ_ERROR(req, GetLastError());
goto error;
@@ -1135,14 +1138,14 @@ int uv_pipe_read_start(uv_pipe_t* handle,
static void uv_insert_non_overlapped_write_req(uv_pipe_t* handle,
uv_write_t* req) {
req->next_req = NULL;
- if (handle->non_overlapped_writes_tail) {
+ if (handle->pipe.conn.non_overlapped_writes_tail) {
req->next_req =
- handle->non_overlapped_writes_tail->next_req;
- handle->non_overlapped_writes_tail->next_req = (uv_req_t*)req;
- handle->non_overlapped_writes_tail = req;
+ handle->pipe.conn.non_overlapped_writes_tail->next_req;
+ handle->pipe.conn.non_overlapped_writes_tail->next_req = (uv_req_t*)req;
+ handle->pipe.conn.non_overlapped_writes_tail = req;
} else {
req->next_req = (uv_req_t*)req;
- handle->non_overlapped_writes_tail = req;
+ handle->pipe.conn.non_overlapped_writes_tail = req;
}
}
@@ -1150,13 +1153,13 @@ static void uv_insert_non_overlapped_write_req(uv_pipe_t* handle,
static uv_write_t* uv_remove_non_overlapped_write_req(uv_pipe_t* handle) {
uv_write_t* req;
- if (handle->non_overlapped_writes_tail) {
- req = (uv_write_t*)handle->non_overlapped_writes_tail->next_req;
+ if (handle->pipe.conn.non_overlapped_writes_tail) {
+ req = (uv_write_t*)handle->pipe.conn.non_overlapped_writes_tail->next_req;
- if (req == handle->non_overlapped_writes_tail) {
- handle->non_overlapped_writes_tail = NULL;
+ if (req == handle->pipe.conn.non_overlapped_writes_tail) {
+ handle->pipe.conn.non_overlapped_writes_tail = NULL;
} else {
- handle->non_overlapped_writes_tail->next_req =
+ handle->pipe.conn.non_overlapped_writes_tail->next_req =
req->next_req;
}
@@ -1213,7 +1216,7 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
req->ipc_header = 0;
req->event_handle = NULL;
req->wait_handle = INVALID_HANDLE_VALUE;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
if (handle->ipc) {
assert(!(handle->flags & UV_HANDLE_NON_OVERLAPPED_PIPE));
@@ -1223,7 +1226,7 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
if (send_handle) {
tcp_send_handle = (uv_tcp_t*)send_handle;
- err = uv_tcp_duplicate_socket(tcp_send_handle, handle->ipc_pid,
+ err = uv_tcp_duplicate_socket(tcp_send_handle, handle->pipe.conn.ipc_pid,
&ipc_frame.socket_info_ex.socket_info);
if (err) {
return err;
@@ -1255,8 +1258,8 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
* Try to use the preallocated write req if it's available.
* Otherwise allocate a new one.
*/
- if (handle->ipc_header_write_req.type != UV_WRITE) {
- ipc_header_req = (uv_write_t*)&handle->ipc_header_write_req;
+ if (handle->pipe.conn.ipc_header_write_req.type != UV_WRITE) {
+ ipc_header_req = (uv_write_t*)&handle->pipe.conn.ipc_header_write_req;
} else {
ipc_header_req = (uv_write_t*)malloc(sizeof(uv_write_t));
if (!ipc_header_req) {
@@ -1272,12 +1275,13 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
}
/* Write the header or the whole frame. */
- memset(&ipc_header_req->overlapped, 0, sizeof(ipc_header_req->overlapped));
+ memset(&ipc_header_req->u.io.overlapped, 0,
+ sizeof(ipc_header_req->u.io.overlapped));
/* Using overlapped IO, but wait for completion before returning.
This write is blocking because ipc_frame is on stack. */
- ipc_header_req->overlapped.hEvent = CreateEvent(NULL, 1, 0, NULL);
- if (!ipc_header_req->overlapped.hEvent) {
+ ipc_header_req->u.io.overlapped.hEvent = CreateEvent(NULL, 1, 0, NULL);
+ if (!ipc_header_req->u.io.overlapped.hEvent) {
uv_fatal_error(GetLastError(), "CreateEvent");
}
@@ -1286,29 +1290,29 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
ipc_frame.header.flags & UV_IPC_TCP_SERVER ?
sizeof(ipc_frame) : sizeof(ipc_frame.header),
NULL,
- &ipc_header_req->overlapped);
+ &ipc_header_req->u.io.overlapped);
if (!result && GetLastError() != ERROR_IO_PENDING) {
err = GetLastError();
- CloseHandle(ipc_header_req->overlapped.hEvent);
+ CloseHandle(ipc_header_req->u.io.overlapped.hEvent);
return err;
}
if (!result) {
/* Request not completed immediately. Wait for it.*/
- if (WaitForSingleObject(ipc_header_req->overlapped.hEvent, INFINITE) !=
+ if (WaitForSingleObject(ipc_header_req->u.io.overlapped.hEvent, INFINITE) !=
WAIT_OBJECT_0) {
err = GetLastError();
- CloseHandle(ipc_header_req->overlapped.hEvent);
+ CloseHandle(ipc_header_req->u.io.overlapped.hEvent);
return err;
}
}
- ipc_header_req->queued_bytes = 0;
- CloseHandle(ipc_header_req->overlapped.hEvent);
- ipc_header_req->overlapped.hEvent = NULL;
+ ipc_header_req->u.io.queued_bytes = 0;
+ CloseHandle(ipc_header_req->u.io.overlapped.hEvent);
+ ipc_header_req->u.io.overlapped.hEvent = NULL;
REGISTER_HANDLE_REQ(loop, handle, ipc_header_req);
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
/* If we don't have any raw data to write - we're done. */
if (!(ipc_frame.header.flags & UV_IPC_RAW_DATA)) {
@@ -1331,28 +1335,28 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
return err;
} else {
/* Request completed immediately. */
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
}
REGISTER_HANDLE_REQ(loop, handle, req);
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
POST_COMPLETION_FOR_REQ(loop, req);
return 0;
} else if (handle->flags & UV_HANDLE_NON_OVERLAPPED_PIPE) {
req->write_buffer = bufs[0];
uv_insert_non_overlapped_write_req(handle, req);
- if (handle->write_reqs_pending == 0) {
+ if (handle->stream.conn.write_reqs_pending == 0) {
uv_queue_non_overlapped_write(handle);
}
/* Request queued by the kernel. */
- req->queued_bytes = uv__count_bufs(bufs, nbufs);
- handle->write_queue_size += req->queued_bytes;
+ req->u.io.queued_bytes = bufs[0].len;
+ handle->write_queue_size += req->u.io.queued_bytes;
} else if (handle->flags & UV_HANDLE_BLOCKING_WRITES) {
/* Using overlapped IO, but wait for completion before returning */
- req->overlapped.hEvent = CreateEvent(NULL, 1, 0, NULL);
- if (!req->overlapped.hEvent) {
+ req->u.io.overlapped.hEvent = CreateEvent(NULL, 1, 0, NULL);
+ if (!req->u.io.overlapped.hEvent) {
uv_fatal_error(GetLastError(), "CreateEvent");
}
@@ -1360,40 +1364,40 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
bufs[0].base,
bufs[0].len,
NULL,
- &req->overlapped);
+ &req->u.io.overlapped);
if (!result && GetLastError() != ERROR_IO_PENDING) {
err = GetLastError();
- CloseHandle(req->overlapped.hEvent);
+ CloseHandle(req->u.io.overlapped.hEvent);
return err;
}
if (result) {
/* Request completed immediately. */
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
} else {
- assert(ipc_header_req != NULL);
/* Request queued by the kernel. */
- if (WaitForSingleObject(ipc_header_req->overlapped.hEvent, INFINITE) !=
+ req->u.io.queued_bytes = bufs[0].len;
+ handle->write_queue_size += req->u.io.queued_bytes;
+ if (WaitForSingleObject(req->u.io.overlapped.hEvent, INFINITE) !=
WAIT_OBJECT_0) {
err = GetLastError();
- CloseHandle(ipc_header_req->overlapped.hEvent);
+ CloseHandle(req->u.io.overlapped.hEvent);
return uv_translate_sys_error(err);
}
}
- CloseHandle(req->overlapped.hEvent);
+ CloseHandle(req->u.io.overlapped.hEvent);
REGISTER_HANDLE_REQ(loop, handle, req);
handle->reqs_pending++;
- handle->write_reqs_pending++;
- POST_COMPLETION_FOR_REQ(loop, req);
+ handle->stream.conn.write_reqs_pending++;
return 0;
} else {
result = WriteFile(handle->handle,
bufs[0].base,
bufs[0].len,
NULL,
- &req->overlapped);
+ &req->u.io.overlapped);
if (!result && GetLastError() != ERROR_IO_PENDING) {
return GetLastError();
@@ -1401,11 +1405,11 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
if (result) {
/* Request completed immediately. */
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
} else {
/* Request queued by the kernel. */
- req->queued_bytes = uv__count_bufs(bufs, nbufs);
- handle->write_queue_size += req->queued_bytes;
+ req->u.io.queued_bytes = bufs[0].len;
+ handle->write_queue_size += req->u.io.queued_bytes;
}
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
@@ -1414,7 +1418,7 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
uv_fatal_error(GetLastError(), "CreateEvent");
}
if (!RegisterWaitForSingleObject(&req->wait_handle,
- req->overlapped.hEvent, post_completion_write_wait, (void*) req,
+ req->u.io.overlapped.hEvent, post_completion_write_wait, (void*) req,
INFINITE, WT_EXECUTEINWAITTHREAD)) {
return GetLastError();
}
@@ -1423,7 +1427,7 @@ static int uv_pipe_write_impl(uv_loop_t* loop,
REGISTER_HANDLE_REQ(loop, handle, req);
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
return 0;
}
@@ -1500,8 +1504,8 @@ void uv__pipe_insert_pending_socket(uv_pipe_t* handle,
memcpy(&item->socket_info_ex, info, sizeof(item->socket_info_ex));
item->tcp_connection = tcp_connection;
- QUEUE_INSERT_TAIL(&handle->pending_ipc_info.queue, &item->member);
- handle->pending_ipc_info.queue_len++;
+ QUEUE_INSERT_TAIL(&handle->pipe.conn.pending_ipc_info.queue, &item->member);
+ handle->pipe.conn.pending_ipc_info.queue_len++;
}
@@ -1544,7 +1548,7 @@ void uv_process_pipe_read_req(uv_loop_t* loop, uv_pipe_t* handle,
if (handle->ipc) {
/* Use the IPC framing protocol to read the incoming data. */
- if (handle->remaining_ipc_rawdata_bytes == 0) {
+ if (handle->pipe.conn.remaining_ipc_rawdata_bytes == 0) {
/* We're reading a new frame. First, read the header. */
assert(avail >= sizeof(ipc_frame.header));
@@ -1587,12 +1591,12 @@ void uv_process_pipe_read_req(uv_loop_t* loop, uv_pipe_t* handle,
}
if (ipc_frame.header.flags & UV_IPC_RAW_DATA) {
- handle->remaining_ipc_rawdata_bytes =
+ handle->pipe.conn.remaining_ipc_rawdata_bytes =
ipc_frame.header.raw_data_length;
continue;
}
} else {
- avail = min(avail, (DWORD)handle->remaining_ipc_rawdata_bytes);
+ avail = min(avail, (DWORD)handle->pipe.conn.remaining_ipc_rawdata_bytes);
}
}
@@ -1610,9 +1614,9 @@ void uv_process_pipe_read_req(uv_loop_t* loop, uv_pipe_t* handle,
NULL)) {
/* Successful read */
if (handle->ipc) {
- assert(handle->remaining_ipc_rawdata_bytes >= bytes);
- handle->remaining_ipc_rawdata_bytes =
- handle->remaining_ipc_rawdata_bytes - bytes;
+ assert(handle->pipe.conn.remaining_ipc_rawdata_bytes >= bytes);
+ handle->pipe.conn.remaining_ipc_rawdata_bytes =
+ handle->pipe.conn.remaining_ipc_rawdata_bytes - bytes;
}
handle->read_cb((uv_stream_t*)handle, bytes, &buf);
@@ -1643,8 +1647,8 @@ void uv_process_pipe_write_req(uv_loop_t* loop, uv_pipe_t* handle,
assert(handle->type == UV_NAMED_PIPE);
- assert(handle->write_queue_size >= req->queued_bytes);
- handle->write_queue_size -= req->queued_bytes;
+ assert(handle->write_queue_size >= req->u.io.queued_bytes);
+ handle->write_queue_size -= req->u.io.queued_bytes;
UNREGISTER_HANDLE_REQ(loop, handle, req);
@@ -1660,7 +1664,7 @@ void uv_process_pipe_write_req(uv_loop_t* loop, uv_pipe_t* handle,
}
if (req->ipc_header) {
- if (req == &handle->ipc_header_write_req) {
+ if (req == &handle->pipe.conn.ipc_header_write_req) {
req->type = UV_UNKNOWN_REQ;
} else {
free(req);
@@ -1672,16 +1676,16 @@ void uv_process_pipe_write_req(uv_loop_t* loop, uv_pipe_t* handle,
}
}
- handle->write_reqs_pending--;
+ handle->stream.conn.write_reqs_pending--;
if (handle->flags & UV_HANDLE_NON_OVERLAPPED_PIPE &&
- handle->non_overlapped_writes_tail) {
- assert(handle->write_reqs_pending > 0);
+ handle->pipe.conn.non_overlapped_writes_tail) {
+ assert(handle->stream.conn.write_reqs_pending > 0);
uv_queue_non_overlapped_write(handle);
}
- if (handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
+ if (handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
uv_want_endgame(loop, (uv_handle_t*)handle);
}
@@ -1704,11 +1708,11 @@ void uv_process_pipe_accept_req(uv_loop_t* loop, uv_pipe_t* handle,
if (REQ_SUCCESS(req)) {
assert(req->pipeHandle != INVALID_HANDLE_VALUE);
- req->next_pending = handle->pending_accepts;
- handle->pending_accepts = req;
+ req->next_pending = handle->pipe.serv.pending_accepts;
+ handle->pipe.serv.pending_accepts = req;
- if (handle->connection_cb) {
- handle->connection_cb((uv_stream_t*)handle, 0);
+ if (handle->stream.serv.connection_cb) {
+ handle->stream.serv.connection_cb((uv_stream_t*)handle, 0);
}
} else {
if (req->pipeHandle != INVALID_HANDLE_VALUE) {
@@ -1781,23 +1785,23 @@ void uv_process_pipe_shutdown_req(uv_loop_t* loop, uv_pipe_t* handle,
static void eof_timer_init(uv_pipe_t* pipe) {
int r;
- assert(pipe->eof_timer == NULL);
+ assert(pipe->pipe.conn.eof_timer == NULL);
assert(pipe->flags & UV_HANDLE_CONNECTION);
- pipe->eof_timer = (uv_timer_t*) malloc(sizeof *pipe->eof_timer);
+ pipe->pipe.conn.eof_timer = (uv_timer_t*) malloc(sizeof *pipe->pipe.conn.eof_timer);
- r = uv_timer_init(pipe->loop, pipe->eof_timer);
+ r = uv_timer_init(pipe->loop, pipe->pipe.conn.eof_timer);
assert(r == 0); /* timers can't fail */
- pipe->eof_timer->data = pipe;
- uv_unref((uv_handle_t*) pipe->eof_timer);
+ pipe->pipe.conn.eof_timer->data = pipe;
+ uv_unref((uv_handle_t*) pipe->pipe.conn.eof_timer);
}
static void eof_timer_start(uv_pipe_t* pipe) {
assert(pipe->flags & UV_HANDLE_CONNECTION);
- if (pipe->eof_timer != NULL) {
- uv_timer_start(pipe->eof_timer, eof_timer_cb, eof_timeout, 0);
+ if (pipe->pipe.conn.eof_timer != NULL) {
+ uv_timer_start(pipe->pipe.conn.eof_timer, eof_timer_cb, eof_timeout, 0);
}
}
@@ -1805,8 +1809,8 @@ static void eof_timer_start(uv_pipe_t* pipe) {
static void eof_timer_stop(uv_pipe_t* pipe) {
assert(pipe->flags & UV_HANDLE_CONNECTION);
- if (pipe->eof_timer != NULL) {
- uv_timer_stop(pipe->eof_timer);
+ if (pipe->pipe.conn.eof_timer != NULL) {
+ uv_timer_stop(pipe->pipe.conn.eof_timer);
}
}
@@ -1829,7 +1833,7 @@ static void eof_timer_cb(uv_timer_t* timer) {
/* Therefore we check here if the read request has completed but will */
/* be processed later. */
if ((pipe->flags & UV_HANDLE_READ_PENDING) &&
- HasOverlappedIoCompleted(&pipe->read_req.overlapped)) {
+ HasOverlappedIoCompleted(&pipe->read_req.u.io.overlapped)) {
return;
}
@@ -1850,9 +1854,9 @@ static void eof_timer_cb(uv_timer_t* timer) {
static void eof_timer_destroy(uv_pipe_t* pipe) {
assert(pipe->flags & UV_HANDLE_CONNECTION);
- if (pipe->eof_timer) {
- uv_close((uv_handle_t*) pipe->eof_timer, eof_timer_close_cb);
- pipe->eof_timer = NULL;
+ if (pipe->pipe.conn.eof_timer) {
+ uv_close((uv_handle_t*) pipe->pipe.conn.eof_timer, eof_timer_close_cb);
+ pipe->pipe.conn.eof_timer = NULL;
}
}
@@ -1903,8 +1907,8 @@ int uv_pipe_open(uv_pipe_t* pipe, uv_file file) {
if (pipe->ipc) {
assert(!(pipe->flags & UV_HANDLE_NON_OVERLAPPED_PIPE));
- pipe->ipc_pid = uv_parent_pid();
- assert(pipe->ipc_pid != -1);
+ pipe->pipe.conn.ipc_pid = uv_parent_pid();
+ assert(pipe->pipe.conn.ipc_pid != -1);
}
return 0;
}
@@ -2027,7 +2031,7 @@ static int uv__pipe_getname(const uv_pipe_t* handle, char* buffer, size_t* size)
int uv_pipe_pending_count(uv_pipe_t* handle) {
if (!handle->ipc)
return 0;
- return handle->pending_ipc_info.queue_len;
+ return handle->pipe.conn.pending_ipc_info.queue_len;
}
@@ -2060,7 +2064,7 @@ int uv_pipe_getpeername(const uv_pipe_t* handle, char* buffer, size_t* size) {
uv_handle_type uv_pipe_pending_type(uv_pipe_t* handle) {
if (!handle->ipc)
return UV_UNKNOWN_HANDLE;
- if (handle->pending_ipc_info.queue_len == 0)
+ if (handle->pipe.conn.pending_ipc_info.queue_len == 0)
return UV_UNKNOWN_HANDLE;
else
return UV_TCP;
diff --git a/deps/uv/src/win/poll.c b/deps/uv/src/win/poll.c
index 4d8e1f99f65cb5..ce861d6ffc41eb 100644
--- a/deps/uv/src/win/poll.c
+++ b/deps/uv/src/win/poll.c
@@ -112,12 +112,12 @@ static void uv__fast_poll_submit_poll_req(uv_loop_t* loop, uv_poll_t* handle) {
afd_poll_info->Handles[0].Events |= AFD_POLL_SEND | AFD_POLL_CONNECT_FAIL;
}
- memset(&req->overlapped, 0, sizeof req->overlapped);
+ memset(&req->u.io.overlapped, 0, sizeof req->u.io.overlapped);
result = uv_msafd_poll((SOCKET) handle->peer_socket,
afd_poll_info,
afd_poll_info,
- &req->overlapped);
+ &req->u.io.overlapped);
if (result != 0 && WSAGetLastError() != WSA_IO_PENDING) {
/* Queue this req, reporting an error. */
SET_REQ_ERROR(req, WSAGetLastError());
@@ -380,7 +380,7 @@ static DWORD WINAPI uv__slow_poll_thread_proc(void* arg) {
}
SET_REQ_SUCCESS(req);
- req->overlapped.InternalHigh = (DWORD) reported_events;
+ req->u.io.overlapped.InternalHigh = (DWORD) reported_events;
POST_COMPLETION_FOR_REQ(handle->loop, req);
return 0;
@@ -442,7 +442,7 @@ static void uv__slow_poll_process_poll_req(uv_loop_t* loop, uv_poll_t* handle,
}
} else {
/* Got some events. */
- int events = req->overlapped.InternalHigh & handle->events & ~mask_events;
+ int events = req->u.io.overlapped.InternalHigh & handle->events & ~mask_events;
if (events != 0) {
handle->poll_cb(handle, 0, events);
}
diff --git a/deps/uv/src/win/process.c b/deps/uv/src/win/process.c
index 3a0106f82d63e3..887595f89cc9b6 100644
--- a/deps/uv/src/win/process.c
+++ b/deps/uv/src/win/process.c
@@ -707,7 +707,7 @@ int make_program_env(char* env_block[], WCHAR** dst_ptr) {
}
/* second pass: copy to UTF-16 environment block */
- dst_copy = _malloca(env_len * sizeof(WCHAR));
+ dst_copy = malloc(env_len * sizeof(WCHAR));
if (!dst_copy) {
return ERROR_OUTOFMEMORY;
}
@@ -725,7 +725,7 @@ int make_program_env(char* env_block[], WCHAR** dst_ptr) {
(int) (env_len - (ptr - dst_copy)));
if (len <= 0) {
DWORD err = GetLastError();
- _freea(dst_copy);
+ free(dst_copy);
return err;
}
*ptr_copy++ = ptr;
@@ -767,7 +767,7 @@ int make_program_env(char* env_block[], WCHAR** dst_ptr) {
/* final pass: copy, in sort order, and inserting required variables */
dst = malloc((1+env_len) * sizeof(WCHAR));
if (!dst) {
- _freea(dst_copy);
+ free(dst_copy);
return ERROR_OUTOFMEMORY;
}
@@ -812,7 +812,7 @@ int make_program_env(char* env_block[], WCHAR** dst_ptr) {
assert(env_len == (ptr - dst));
*ptr = L'\0';
- _freea(dst_copy);
+ free(dst_copy);
*dst_ptr = dst;
return 0;
}
@@ -1124,7 +1124,7 @@ int uv_spawn(uv_loop_t* loop,
if (fdopt->flags & UV_CREATE_PIPE &&
fdopt->data.stream->type == UV_NAMED_PIPE &&
((uv_pipe_t*) fdopt->data.stream)->ipc) {
- ((uv_pipe_t*) fdopt->data.stream)->ipc_pid = info.dwProcessId;
+ ((uv_pipe_t*) fdopt->data.stream)->pipe.conn.ipc_pid = info.dwProcessId;
}
}
diff --git a/deps/uv/src/win/req-inl.h b/deps/uv/src/win/req-inl.h
index 46c7d9b106a869..b5e502eef5521a 100644
--- a/deps/uv/src/win/req-inl.h
+++ b/deps/uv/src/win/req-inl.h
@@ -29,7 +29,7 @@
#define SET_REQ_STATUS(req, status) \
- (req)->overlapped.Internal = (ULONG_PTR) (status)
+ (req)->u.io.overlapped.Internal = (ULONG_PTR) (status)
#define SET_REQ_ERROR(req, error) \
SET_REQ_STATUS((req), NTSTATUS_FROM_WIN32((error)))
@@ -38,7 +38,7 @@
SET_REQ_STATUS((req), STATUS_SUCCESS)
#define GET_REQ_STATUS(req) \
- ((NTSTATUS) (req)->overlapped.Internal)
+ ((NTSTATUS) (req)->u.io.overlapped.Internal)
#define REQ_SUCCESS(req) \
(NT_SUCCESS(GET_REQ_STATUS((req))))
@@ -74,7 +74,7 @@
if (!PostQueuedCompletionStatus((loop)->iocp, \
0, \
0, \
- &((req)->overlapped))) { \
+ &((req)->u.io.overlapped))) { \
uv_fatal_error(GetLastError(), "PostQueuedCompletionStatus"); \
}
@@ -86,13 +86,24 @@ INLINE static void uv_req_init(uv_loop_t* loop, uv_req_t* req) {
INLINE static uv_req_t* uv_overlapped_to_req(OVERLAPPED* overlapped) {
- return CONTAINING_RECORD(overlapped, uv_req_t, overlapped);
+ return CONTAINING_RECORD(overlapped, uv_req_t, u.io.overlapped);
}
INLINE static void uv_insert_pending_req(uv_loop_t* loop, uv_req_t* req) {
req->next_req = NULL;
if (loop->pending_reqs_tail) {
+#ifdef _DEBUG
+ /* Ensure the request is not already in the queue, or the queue
+ * will get corrupted.
+ */
+ uv_req_t* current = loop->pending_reqs_tail;
+ do {
+ assert(req != current);
+ current = current->next_req;
+ } while(current != loop->pending_reqs_tail);
+#endif
+
req->next_req = loop->pending_reqs_tail->next_req;
loop->pending_reqs_tail->next_req = req;
loop->pending_reqs_tail = req;
diff --git a/deps/uv/src/win/stream-inl.h b/deps/uv/src/win/stream-inl.h
index 97a6b90b50560a..b7a3c11958c274 100644
--- a/deps/uv/src/win/stream-inl.h
+++ b/deps/uv/src/win/stream-inl.h
@@ -41,7 +41,7 @@ INLINE static void uv_stream_init(uv_loop_t* loop,
INLINE static void uv_connection_init(uv_stream_t* handle) {
handle->flags |= UV_HANDLE_CONNECTION;
- handle->write_reqs_pending = 0;
+ handle->stream.conn.write_reqs_pending = 0;
uv_req_init(handle->loop, (uv_req_t*) &(handle->read_req));
handle->read_req.event_handle = NULL;
@@ -49,7 +49,7 @@ INLINE static void uv_connection_init(uv_stream_t* handle) {
handle->read_req.type = UV_READ;
handle->read_req.data = handle;
- handle->shutdown_req = NULL;
+ handle->stream.conn.shutdown_req = NULL;
}
diff --git a/deps/uv/src/win/stream.c b/deps/uv/src/win/stream.c
index 36d88d00bd9893..a2466e5e9db8ba 100644
--- a/deps/uv/src/win/stream.c
+++ b/deps/uv/src/win/stream.c
@@ -216,7 +216,7 @@ int uv_shutdown(uv_shutdown_t* req, uv_stream_t* handle, uv_shutdown_cb cb) {
req->cb = cb;
handle->flags &= ~UV_HANDLE_WRITABLE;
- handle->shutdown_req = req;
+ handle->stream.conn.shutdown_req = req;
handle->reqs_pending++;
REGISTER_HANDLE_REQ(loop, handle, req);
diff --git a/deps/uv/src/win/tcp.c b/deps/uv/src/win/tcp.c
index c5ddbed08f75ae..8b0e18c7cf3256 100644
--- a/deps/uv/src/win/tcp.c
+++ b/deps/uv/src/win/tcp.c
@@ -149,13 +149,13 @@ static int uv_tcp_set_socket(uv_loop_t* loop, uv_tcp_t* handle,
int uv_tcp_init(uv_loop_t* loop, uv_tcp_t* handle) {
uv_stream_init(loop, (uv_stream_t*) handle, UV_TCP);
- handle->accept_reqs = NULL;
- handle->pending_accepts = NULL;
+ handle->tcp.serv.accept_reqs = NULL;
+ handle->tcp.serv.pending_accepts = NULL;
handle->socket = INVALID_SOCKET;
handle->reqs_pending = 0;
- handle->func_acceptex = NULL;
- handle->func_connectex = NULL;
- handle->processed_accepts = 0;
+ handle->tcp.serv.func_acceptex = NULL;
+ handle->tcp.conn.func_connectex = NULL;
+ handle->tcp.serv.processed_accepts = 0;
handle->delayed_error = 0;
return 0;
@@ -168,10 +168,10 @@ void uv_tcp_endgame(uv_loop_t* loop, uv_tcp_t* handle) {
uv_tcp_accept_t* req;
if (handle->flags & UV_HANDLE_CONNECTION &&
- handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
+ handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
- UNREGISTER_HANDLE_REQ(loop, handle, handle->shutdown_req);
+ UNREGISTER_HANDLE_REQ(loop, handle, handle->stream.conn.shutdown_req);
err = 0;
if (handle->flags & UV__HANDLE_CLOSING) {
@@ -180,12 +180,12 @@ void uv_tcp_endgame(uv_loop_t* loop, uv_tcp_t* handle) {
err = WSAGetLastError();
}
- if (handle->shutdown_req->cb) {
- handle->shutdown_req->cb(handle->shutdown_req,
+ if (handle->stream.conn.shutdown_req->cb) {
+ handle->stream.conn.shutdown_req->cb(handle->stream.conn.shutdown_req,
uv_translate_sys_error(err));
}
- handle->shutdown_req = NULL;
+ handle->stream.conn.shutdown_req = NULL;
DECREASE_PENDING_REQ_COUNT(handle);
return;
}
@@ -200,10 +200,10 @@ void uv_tcp_endgame(uv_loop_t* loop, uv_tcp_t* handle) {
handle->flags |= UV_HANDLE_TCP_SOCKET_CLOSED;
}
- if (!(handle->flags & UV_HANDLE_CONNECTION) && handle->accept_reqs) {
+ if (!(handle->flags & UV_HANDLE_CONNECTION) && handle->tcp.serv.accept_reqs) {
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
for (i = 0; i < uv_simultaneous_server_accepts; i++) {
- req = &handle->accept_reqs[i];
+ req = &handle->tcp.serv.accept_reqs[i];
if (req->wait_handle != INVALID_HANDLE_VALUE) {
UnregisterWait(req->wait_handle);
req->wait_handle = INVALID_HANDLE_VALUE;
@@ -215,8 +215,8 @@ void uv_tcp_endgame(uv_loop_t* loop, uv_tcp_t* handle) {
}
}
- free(handle->accept_reqs);
- handle->accept_reqs = NULL;
+ free(handle->tcp.serv.accept_reqs);
+ handle->tcp.serv.accept_reqs = NULL;
}
if (handle->flags & UV_HANDLE_CONNECTION &&
@@ -327,9 +327,9 @@ static void CALLBACK post_completion(void* context, BOOLEAN timed_out) {
assert(!timed_out);
if (!PostQueuedCompletionStatus(handle->loop->iocp,
- req->overlapped.InternalHigh,
+ req->u.io.overlapped.InternalHigh,
0,
- &req->overlapped)) {
+ &req->u.io.overlapped)) {
uv_fatal_error(GetLastError(), "PostQueuedCompletionStatus");
}
}
@@ -346,9 +346,9 @@ static void CALLBACK post_write_completion(void* context, BOOLEAN timed_out) {
assert(!timed_out);
if (!PostQueuedCompletionStatus(handle->loop->iocp,
- req->overlapped.InternalHigh,
+ req->u.io.overlapped.InternalHigh,
0,
- &req->overlapped)) {
+ &req->u.io.overlapped)) {
uv_fatal_error(GetLastError(), "PostQueuedCompletionStatus");
}
}
@@ -390,19 +390,19 @@ static void uv_tcp_queue_accept(uv_tcp_t* handle, uv_tcp_accept_t* req) {
}
/* Prepare the overlapped structure. */
- memset(&(req->overlapped), 0, sizeof(req->overlapped));
+ memset(&(req->u.io.overlapped), 0, sizeof(req->u.io.overlapped));
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
- req->overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
+ req->u.io.overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
}
- success = handle->func_acceptex(handle->socket,
- accept_socket,
- (void*)req->accept_buffer,
- 0,
- sizeof(struct sockaddr_storage),
- sizeof(struct sockaddr_storage),
- &bytes,
- &req->overlapped);
+ success = handle->tcp.serv.func_acceptex(handle->socket,
+ accept_socket,
+ (void*)req->accept_buffer,
+ 0,
+ sizeof(struct sockaddr_storage),
+ sizeof(struct sockaddr_storage),
+ &bytes,
+ &req->u.io.overlapped);
if (UV_SUCCEEDED_WITHOUT_IOCP(success)) {
/* Process the req without IOCP. */
@@ -432,7 +432,7 @@ static void uv_tcp_queue_accept(uv_tcp_t* handle, uv_tcp_accept_t* req) {
closesocket(accept_socket);
/* Destroy the event handle */
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
- CloseHandle(req->overlapped.hEvent);
+ CloseHandle(req->u.io.overlapped.hEvent);
req->event_handle = NULL;
}
}
@@ -449,7 +449,7 @@ static void uv_tcp_queue_read(uv_loop_t* loop, uv_tcp_t* handle) {
assert(!(handle->flags & UV_HANDLE_READ_PENDING));
req = &handle->read_req;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
/*
* Preallocate a read buffer if the number of active streams is below
@@ -457,13 +457,13 @@ static void uv_tcp_queue_read(uv_loop_t* loop, uv_tcp_t* handle) {
*/
if (loop->active_tcp_streams < uv_active_tcp_streams_threshold) {
handle->flags &= ~UV_HANDLE_ZERO_READ;
- handle->alloc_cb((uv_handle_t*) handle, 65536, &handle->read_buffer);
- if (handle->read_buffer.len == 0) {
- handle->read_cb((uv_stream_t*) handle, UV_ENOBUFS, &handle->read_buffer);
+ handle->alloc_cb((uv_handle_t*) handle, 65536, &handle->tcp.conn.read_buffer);
+ if (handle->tcp.conn.read_buffer.len == 0) {
+ handle->read_cb((uv_stream_t*) handle, UV_ENOBUFS, &handle->tcp.conn.read_buffer);
return;
}
- assert(handle->read_buffer.base != NULL);
- buf = handle->read_buffer;
+ assert(handle->tcp.conn.read_buffer.base != NULL);
+ buf = handle->tcp.conn.read_buffer;
} else {
handle->flags |= UV_HANDLE_ZERO_READ;
buf.base = (char*) &uv_zero_;
@@ -471,10 +471,10 @@ static void uv_tcp_queue_read(uv_loop_t* loop, uv_tcp_t* handle) {
}
/* Prepare the overlapped structure. */
- memset(&(req->overlapped), 0, sizeof(req->overlapped));
+ memset(&(req->u.io.overlapped), 0, sizeof(req->u.io.overlapped));
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
assert(req->event_handle);
- req->overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
+ req->u.io.overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
}
flags = 0;
@@ -483,13 +483,13 @@ static void uv_tcp_queue_read(uv_loop_t* loop, uv_tcp_t* handle) {
1,
&bytes,
&flags,
- &req->overlapped,
+ &req->u.io.overlapped,
NULL);
if (UV_SUCCEEDED_WITHOUT_IOCP(result == 0)) {
/* Process the req without IOCP. */
handle->flags |= UV_HANDLE_READ_PENDING;
- req->overlapped.InternalHigh = bytes;
+ req->u.io.overlapped.InternalHigh = bytes;
handle->reqs_pending++;
uv_insert_pending_req(loop, (uv_req_t*)req);
} else if (UV_SUCCEEDED_WITH_IOCP(result == 0)) {
@@ -522,7 +522,7 @@ int uv_tcp_listen(uv_tcp_t* handle, int backlog, uv_connection_cb cb) {
assert(backlog > 0);
if (handle->flags & UV_HANDLE_LISTENING) {
- handle->connection_cb = cb;
+ handle->stream.serv.connection_cb = cb;
}
if (handle->flags & UV_HANDLE_READING) {
@@ -544,8 +544,8 @@ int uv_tcp_listen(uv_tcp_t* handle, int backlog, uv_connection_cb cb) {
return handle->delayed_error;
}
- if (!handle->func_acceptex) {
- if (!uv_get_acceptex_function(handle->socket, &handle->func_acceptex)) {
+ if (!handle->tcp.serv.func_acceptex) {
+ if (!uv_get_acceptex_function(handle->socket, &handle->tcp.serv.func_acceptex)) {
return WSAEAFNOSUPPORT;
}
}
@@ -556,21 +556,21 @@ int uv_tcp_listen(uv_tcp_t* handle, int backlog, uv_connection_cb cb) {
}
handle->flags |= UV_HANDLE_LISTENING;
- handle->connection_cb = cb;
+ handle->stream.serv.connection_cb = cb;
INCREASE_ACTIVE_COUNT(loop, handle);
simultaneous_accepts = handle->flags & UV_HANDLE_TCP_SINGLE_ACCEPT ? 1
: uv_simultaneous_server_accepts;
- if(!handle->accept_reqs) {
- handle->accept_reqs = (uv_tcp_accept_t*)
+ if(!handle->tcp.serv.accept_reqs) {
+ handle->tcp.serv.accept_reqs = (uv_tcp_accept_t*)
malloc(uv_simultaneous_server_accepts * sizeof(uv_tcp_accept_t));
- if (!handle->accept_reqs) {
+ if (!handle->tcp.serv.accept_reqs) {
uv_fatal_error(ERROR_OUTOFMEMORY, "malloc");
}
for (i = 0; i < simultaneous_accepts; i++) {
- req = &handle->accept_reqs[i];
+ req = &handle->tcp.serv.accept_reqs[i];
uv_req_init(loop, (uv_req_t*)req);
req->type = UV_ACCEPT;
req->accept_socket = INVALID_SOCKET;
@@ -593,7 +593,7 @@ int uv_tcp_listen(uv_tcp_t* handle, int backlog, uv_connection_cb cb) {
/* doesn't know how how many requests were initialized, so it will */
/* try to clean up {uv_simultaneous_server_accepts} requests. */
for (i = simultaneous_accepts; i < uv_simultaneous_server_accepts; i++) {
- req = &handle->accept_reqs[i];
+ req = &handle->tcp.serv.accept_reqs[i];
uv_req_init(loop, (uv_req_t*) req);
req->type = UV_ACCEPT;
req->accept_socket = INVALID_SOCKET;
@@ -612,7 +612,7 @@ int uv_tcp_accept(uv_tcp_t* server, uv_tcp_t* client) {
int err = 0;
int family;
- uv_tcp_accept_t* req = server->pending_accepts;
+ uv_tcp_accept_t* req = server->tcp.serv.pending_accepts;
if (!req) {
/* No valid connections found, so we error out. */
@@ -643,7 +643,7 @@ int uv_tcp_accept(uv_tcp_t* server, uv_tcp_t* client) {
}
/* Prepare the req to pick up a new connection */
- server->pending_accepts = req->next_pending;
+ server->tcp.serv.pending_accepts = req->next_pending;
req->next_pending = NULL;
req->accept_socket = INVALID_SOCKET;
@@ -655,15 +655,15 @@ int uv_tcp_accept(uv_tcp_t* server, uv_tcp_t* client) {
/* We better be switching to a single pending accept. */
assert(server->flags & UV_HANDLE_TCP_SINGLE_ACCEPT);
- server->processed_accepts++;
+ server->tcp.serv.processed_accepts++;
- if (server->processed_accepts >= uv_simultaneous_server_accepts) {
- server->processed_accepts = 0;
+ if (server->tcp.serv.processed_accepts >= uv_simultaneous_server_accepts) {
+ server->tcp.serv.processed_accepts = 0;
/*
* All previously queued accept requests are now processed.
* We now switch to queueing just a single accept.
*/
- uv_tcp_queue_accept(server, &server->accept_reqs[0]);
+ uv_tcp_queue_accept(server, &server->tcp.serv.accept_reqs[0]);
server->flags &= ~UV_HANDLE_TCP_ACCEPT_STATE_CHANGING;
server->flags |= UV_HANDLE_TCP_SINGLE_ACCEPT;
}
@@ -732,8 +732,8 @@ static int uv_tcp_try_connect(uv_connect_t* req,
return handle->delayed_error;
}
- if (!handle->func_connectex) {
- if (!uv_get_connectex_function(handle->socket, &handle->func_connectex)) {
+ if (!handle->tcp.conn.func_connectex) {
+ if (!uv_get_connectex_function(handle->socket, &handle->tcp.conn.func_connectex)) {
return WSAEAFNOSUPPORT;
}
}
@@ -742,15 +742,15 @@ static int uv_tcp_try_connect(uv_connect_t* req,
req->type = UV_CONNECT;
req->handle = (uv_stream_t*) handle;
req->cb = cb;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
- success = handle->func_connectex(handle->socket,
- addr,
- addrlen,
- NULL,
- 0,
- &bytes,
- &req->overlapped);
+ success = handle->tcp.conn.func_connectex(handle->socket,
+ addr,
+ addrlen,
+ NULL,
+ 0,
+ &bytes,
+ &req->u.io.overlapped);
if (UV_SUCCEEDED_WITHOUT_IOCP(success)) {
/* Process the req without IOCP. */
@@ -828,13 +828,13 @@ int uv_tcp_write(uv_loop_t* loop,
req->cb = cb;
/* Prepare the overlapped structure. */
- memset(&(req->overlapped), 0, sizeof(req->overlapped));
+ memset(&(req->u.io.overlapped), 0, sizeof(req->u.io.overlapped));
if (handle->flags & UV_HANDLE_EMULATE_IOCP) {
req->event_handle = CreateEvent(NULL, 0, 0, NULL);
if (!req->event_handle) {
uv_fatal_error(GetLastError(), "CreateEvent");
}
- req->overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
+ req->u.io.overlapped.hEvent = (HANDLE) ((ULONG_PTR) req->event_handle | 1);
req->wait_handle = INVALID_HANDLE_VALUE;
}
@@ -843,23 +843,23 @@ int uv_tcp_write(uv_loop_t* loop,
nbufs,
&bytes,
0,
- &req->overlapped,
+ &req->u.io.overlapped,
NULL);
if (UV_SUCCEEDED_WITHOUT_IOCP(result == 0)) {
/* Request completed immediately. */
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
REGISTER_HANDLE_REQ(loop, handle, req);
uv_insert_pending_req(loop, (uv_req_t*) req);
} else if (UV_SUCCEEDED_WITH_IOCP(result == 0)) {
/* Request queued by the kernel. */
- req->queued_bytes = uv__count_bufs(bufs, nbufs);
+ req->u.io.queued_bytes = uv__count_bufs(bufs, nbufs);
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
REGISTER_HANDLE_REQ(loop, handle, req);
- handle->write_queue_size += req->queued_bytes;
+ handle->write_queue_size += req->u.io.queued_bytes;
if (handle->flags & UV_HANDLE_EMULATE_IOCP &&
!RegisterWaitForSingleObject(&req->wait_handle,
req->event_handle, post_write_completion, (void*) req,
@@ -868,8 +868,13 @@ int uv_tcp_write(uv_loop_t* loop,
uv_insert_pending_req(loop, (uv_req_t*)req);
}
} else {
- /* Send failed due to an error. */
- return WSAGetLastError();
+ /* Send failed due to an error, report it later */
+ req->u.io.queued_bytes = 0;
+ handle->reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
+ REGISTER_HANDLE_REQ(loop, handle, req);
+ SET_REQ_ERROR(req, WSAGetLastError());
+ uv_insert_pending_req(loop, (uv_req_t*) req);
}
return 0;
@@ -882,7 +887,7 @@ int uv__tcp_try_write(uv_tcp_t* handle,
int result;
DWORD bytes;
- if (handle->write_reqs_pending > 0)
+ if (handle->stream.conn.write_reqs_pending > 0)
return UV_EAGAIN;
result = WSASend(handle->socket,
@@ -916,7 +921,7 @@ void uv_process_tcp_read_req(uv_loop_t* loop, uv_tcp_t* handle,
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
buf = (handle->flags & UV_HANDLE_ZERO_READ) ?
- uv_buf_init(NULL, 0) : handle->read_buffer;
+ uv_buf_init(NULL, 0) : handle->tcp.conn.read_buffer;
err = GET_REQ_SOCK_ERROR(req);
@@ -934,13 +939,13 @@ void uv_process_tcp_read_req(uv_loop_t* loop, uv_tcp_t* handle,
} else {
if (!(handle->flags & UV_HANDLE_ZERO_READ)) {
/* The read was done with a non-zero buffer length. */
- if (req->overlapped.InternalHigh > 0) {
+ if (req->u.io.overlapped.InternalHigh > 0) {
/* Successful read */
handle->read_cb((uv_stream_t*)handle,
- req->overlapped.InternalHigh,
- &handle->read_buffer);
+ req->u.io.overlapped.InternalHigh,
+ &handle->tcp.conn.read_buffer);
/* Read again only if bytes == buf.len */
- if (req->overlapped.InternalHigh < handle->read_buffer.len) {
+ if (req->u.io.overlapped.InternalHigh < handle->tcp.conn.read_buffer.len) {
goto done;
}
} else {
@@ -953,7 +958,7 @@ void uv_process_tcp_read_req(uv_loop_t* loop, uv_tcp_t* handle,
buf.base = 0;
buf.len = 0;
- handle->read_cb((uv_stream_t*)handle, UV_EOF, &handle->read_buffer);
+ handle->read_cb((uv_stream_t*)handle, UV_EOF, &handle->tcp.conn.read_buffer);
goto done;
}
}
@@ -1032,8 +1037,8 @@ void uv_process_tcp_write_req(uv_loop_t* loop, uv_tcp_t* handle,
assert(handle->type == UV_TCP);
- assert(handle->write_queue_size >= req->queued_bytes);
- handle->write_queue_size -= req->queued_bytes;
+ assert(handle->write_queue_size >= req->u.io.queued_bytes);
+ handle->write_queue_size -= req->u.io.queued_bytes;
UNREGISTER_HANDLE_REQ(loop, handle, req);
@@ -1057,9 +1062,9 @@ void uv_process_tcp_write_req(uv_loop_t* loop, uv_tcp_t* handle,
req->cb(req, err);
}
- handle->write_reqs_pending--;
- if (handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
+ handle->stream.conn.write_reqs_pending--;
+ if (handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
uv_want_endgame(loop, (uv_handle_t*)handle);
}
@@ -1082,10 +1087,10 @@ void uv_process_tcp_accept_req(uv_loop_t* loop, uv_tcp_t* handle,
if (handle->flags & UV_HANDLE_LISTENING) {
handle->flags &= ~UV_HANDLE_LISTENING;
DECREASE_ACTIVE_COUNT(loop, handle);
- if (handle->connection_cb) {
+ if (handle->stream.serv.connection_cb) {
err = GET_REQ_SOCK_ERROR(req);
- handle->connection_cb((uv_stream_t*)handle,
- uv_translate_sys_error(err));
+ handle->stream.serv.connection_cb((uv_stream_t*)handle,
+ uv_translate_sys_error(err));
}
}
} else if (REQ_SUCCESS(req) &&
@@ -1094,12 +1099,12 @@ void uv_process_tcp_accept_req(uv_loop_t* loop, uv_tcp_t* handle,
SO_UPDATE_ACCEPT_CONTEXT,
(char*)&handle->socket,
sizeof(handle->socket)) == 0) {
- req->next_pending = handle->pending_accepts;
- handle->pending_accepts = req;
+ req->next_pending = handle->tcp.serv.pending_accepts;
+ handle->tcp.serv.pending_accepts = req;
/* Accept and SO_UPDATE_ACCEPT_CONTEXT were successful. */
- if (handle->connection_cb) {
- handle->connection_cb((uv_stream_t*)handle, 0);
+ if (handle->stream.serv.connection_cb) {
+ handle->stream.serv.connection_cb((uv_stream_t*)handle, 0);
}
} else {
/* Error related to accepted socket is ignored because the server */
@@ -1357,7 +1362,7 @@ void uv_tcp_close(uv_loop_t* loop, uv_tcp_t* tcp) {
}
} else if ((tcp->flags & UV_HANDLE_SHARED_TCP_SOCKET) &&
- tcp->accept_reqs != NULL) {
+ tcp->tcp.serv.accept_reqs != NULL) {
/* Under normal circumstances closesocket() will ensure that all pending */
/* accept reqs are canceled. However, when the socket is shared the */
/* presence of another reference to the socket in another process will */
@@ -1371,9 +1376,9 @@ void uv_tcp_close(uv_loop_t* loop, uv_tcp_t* tcp) {
/* cause the connection to be aborted. */
unsigned int i;
for (i = 0; i < uv_simultaneous_server_accepts; i++) {
- uv_tcp_accept_t* req = &tcp->accept_reqs[i];
+ uv_tcp_accept_t* req = &tcp->tcp.serv.accept_reqs[i];
if (req->accept_socket != INVALID_SOCKET &&
- !HasOverlappedIoCompleted(&req->overlapped)) {
+ !HasOverlappedIoCompleted(&req->u.io.overlapped)) {
closesocket(req->accept_socket);
req->accept_socket = INVALID_SOCKET;
}
diff --git a/deps/uv/src/win/tty.c b/deps/uv/src/win/tty.c
index 603421045cac58..7b1e4ba0557fca 100644
--- a/deps/uv/src/win/tty.c
+++ b/deps/uv/src/win/tty.c
@@ -142,28 +142,28 @@ int uv_tty_init(uv_loop_t* loop, uv_tty_t* tty, uv_file fd, int readable) {
if (readable) {
/* Initialize TTY input specific fields. */
tty->flags |= UV_HANDLE_TTY_READABLE | UV_HANDLE_READABLE;
- tty->read_line_handle = NULL;
- tty->read_line_buffer = uv_null_buf_;
- tty->read_raw_wait = NULL;
+ tty->tty.rd.read_line_handle = NULL;
+ tty->tty.rd.read_line_buffer = uv_null_buf_;
+ tty->tty.rd.read_raw_wait = NULL;
/* Init keycode-to-vt100 mapper state. */
- tty->last_key_len = 0;
- tty->last_key_offset = 0;
- tty->last_utf16_high_surrogate = 0;
- memset(&tty->last_input_record, 0, sizeof tty->last_input_record);
+ tty->tty.rd.last_key_len = 0;
+ tty->tty.rd.last_key_offset = 0;
+ tty->tty.rd.last_utf16_high_surrogate = 0;
+ memset(&tty->tty.rd.last_input_record, 0, sizeof tty->tty.rd.last_input_record);
} else {
/* TTY output specific fields. */
tty->flags |= UV_HANDLE_WRITABLE;
/* Init utf8-to-utf16 conversion state. */
- tty->utf8_bytes_left = 0;
- tty->utf8_codepoint = 0;
+ tty->tty.wr.utf8_bytes_left = 0;
+ tty->tty.wr.utf8_codepoint = 0;
/* Initialize eol conversion state */
- tty->previous_eol = 0;
+ tty->tty.wr.previous_eol = 0;
/* Init ANSI parser state. */
- tty->ansi_parser_state = ANSI_NORMAL;
+ tty->tty.wr.ansi_parser_state = ANSI_NORMAL;
}
return 0;
@@ -268,8 +268,8 @@ static void CALLBACK uv_tty_post_raw_read(void* data, BOOLEAN didTimeout) {
handle = (uv_tty_t*) req->data;
loop = handle->loop;
- UnregisterWait(handle->read_raw_wait);
- handle->read_raw_wait = NULL;
+ UnregisterWait(handle->tty.rd.read_raw_wait);
+ handle->tty.rd.read_raw_wait = NULL;
SET_REQ_SUCCESS(req);
POST_COMPLETION_FOR_REQ(loop, req);
@@ -285,19 +285,19 @@ static void uv_tty_queue_read_raw(uv_loop_t* loop, uv_tty_t* handle) {
assert(handle->handle && handle->handle != INVALID_HANDLE_VALUE);
- handle->read_line_buffer = uv_null_buf_;
+ handle->tty.rd.read_line_buffer = uv_null_buf_;
req = &handle->read_req;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
- r = RegisterWaitForSingleObject(&handle->read_raw_wait,
+ r = RegisterWaitForSingleObject(&handle->tty.rd.read_raw_wait,
handle->handle,
uv_tty_post_raw_read,
(void*) req,
INFINITE,
WT_EXECUTEINWAITTHREAD | WT_EXECUTEONLYONCE);
if (!r) {
- handle->read_raw_wait = NULL;
+ handle->tty.rd.read_raw_wait = NULL;
SET_REQ_ERROR(req, GetLastError());
uv_insert_pending_req(loop, (uv_req_t*)req);
}
@@ -321,12 +321,12 @@ static DWORD CALLBACK uv_tty_line_read_thread(void* data) {
handle = (uv_tty_t*) req->data;
loop = handle->loop;
- assert(handle->read_line_buffer.base != NULL);
- assert(handle->read_line_buffer.len > 0);
+ assert(handle->tty.rd.read_line_buffer.base != NULL);
+ assert(handle->tty.rd.read_line_buffer.len > 0);
/* ReadConsole can't handle big buffers. */
- if (handle->read_line_buffer.len < MAX_INPUT_BUFFER_LENGTH) {
- bytes = handle->read_line_buffer.len;
+ if (handle->tty.rd.read_line_buffer.len < MAX_INPUT_BUFFER_LENGTH) {
+ bytes = handle->tty.rd.read_line_buffer.len;
} else {
bytes = MAX_INPUT_BUFFER_LENGTH;
}
@@ -335,7 +335,7 @@ static DWORD CALLBACK uv_tty_line_read_thread(void* data) {
/* One utf-16 codeunit never takes more than 3 utf-8 codeunits to encode */
chars = bytes / 3;
- if (ReadConsoleW(handle->read_line_handle,
+ if (ReadConsoleW(handle->tty.rd.read_line_handle,
(void*) utf16,
chars,
&read_chars,
@@ -344,12 +344,12 @@ static DWORD CALLBACK uv_tty_line_read_thread(void* data) {
0,
utf16,
read_chars,
- handle->read_line_buffer.base,
+ handle->tty.rd.read_line_buffer.base,
bytes,
NULL,
NULL);
SET_REQ_SUCCESS(req);
- req->overlapped.InternalHigh = read_bytes;
+ req->u.io.overlapped.InternalHigh = read_bytes;
} else {
SET_REQ_ERROR(req, GetLastError());
}
@@ -368,30 +368,30 @@ static void uv_tty_queue_read_line(uv_loop_t* loop, uv_tty_t* handle) {
assert(handle->handle && handle->handle != INVALID_HANDLE_VALUE);
req = &handle->read_req;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
- handle->alloc_cb((uv_handle_t*) handle, 8192, &handle->read_line_buffer);
- if (handle->read_line_buffer.len == 0) {
+ handle->alloc_cb((uv_handle_t*) handle, 8192, &handle->tty.rd.read_line_buffer);
+ if (handle->tty.rd.read_line_buffer.len == 0) {
handle->read_cb((uv_stream_t*) handle,
UV_ENOBUFS,
- &handle->read_line_buffer);
+ &handle->tty.rd.read_line_buffer);
return;
}
- assert(handle->read_line_buffer.base != NULL);
+ assert(handle->tty.rd.read_line_buffer.base != NULL);
/* Duplicate the console handle, so if we want to cancel the read, we can */
/* just close this handle duplicate. */
- if (handle->read_line_handle == NULL) {
+ if (handle->tty.rd.read_line_handle == NULL) {
HANDLE this_process = GetCurrentProcess();
r = DuplicateHandle(this_process,
handle->handle,
this_process,
- &handle->read_line_handle,
+ &handle->tty.rd.read_line_handle,
0,
0,
DUPLICATE_SAME_ACCESS);
if (!r) {
- handle->read_line_handle = NULL;
+ handle->tty.rd.read_line_handle = NULL;
SET_REQ_ERROR(req, GetLastError());
uv_insert_pending_req(loop, (uv_req_t*)req);
goto out;
@@ -489,8 +489,8 @@ static const char* get_vt100_fn_key(DWORD code, char shift, char ctrl,
void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
uv_req_t* req) {
- /* Shortcut for handle->last_input_record.Event.KeyEvent. */
-#define KEV handle->last_input_record.Event.KeyEvent
+ /* Shortcut for handle->tty.rd.last_input_record.Event.KeyEvent. */
+#define KEV handle->tty.rd.last_input_record.Event.KeyEvent
DWORD records_left, records_read;
uv_buf_t buf;
@@ -531,12 +531,12 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
buf = uv_null_buf_;
buf_used = 0;
- while ((records_left > 0 || handle->last_key_len > 0) &&
+ while ((records_left > 0 || handle->tty.rd.last_key_len > 0) &&
(handle->flags & UV_HANDLE_READING)) {
- if (handle->last_key_len == 0) {
+ if (handle->tty.rd.last_key_len == 0) {
/* Read the next input record */
if (!ReadConsoleInputW(handle->handle,
- &handle->last_input_record,
+ &handle->tty.rd.last_input_record,
1,
&records_read)) {
handle->flags &= ~UV_HANDLE_READING;
@@ -551,7 +551,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
/* If the window was resized, recompute the virtual window size. This */
/* will trigger a SIGWINCH signal if the window size changed in an */
/* way that matters to libuv. */
- if (handle->last_input_record.EventType == WINDOW_BUFFER_SIZE_EVENT) {
+ if (handle->tty.rd.last_input_record.EventType == WINDOW_BUFFER_SIZE_EVENT) {
CONSOLE_SCREEN_BUFFER_INFO info;
EnterCriticalSection(&uv_tty_output_lock);
@@ -567,7 +567,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
}
/* Ignore other events that are not key or resize events. */
- if (handle->last_input_record.EventType != KEY_EVENT) {
+ if (handle->tty.rd.last_input_record.EventType != KEY_EVENT) {
continue;
}
@@ -613,7 +613,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
if (KEV.uChar.UnicodeChar >= 0xD800 &&
KEV.uChar.UnicodeChar < 0xDC00) {
/* UTF-16 high surrogate */
- handle->last_utf16_high_surrogate = KEV.uChar.UnicodeChar;
+ handle->tty.rd.last_utf16_high_surrogate = KEV.uChar.UnicodeChar;
continue;
}
@@ -622,7 +622,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
if ((KEV.dwControlKeyState & (LEFT_ALT_PRESSED | RIGHT_ALT_PRESSED))
&& !(KEV.dwControlKeyState & (LEFT_CTRL_PRESSED |
RIGHT_CTRL_PRESSED)) && KEV.bKeyDown) {
- handle->last_key[0] = '\033';
+ handle->tty.rd.last_key[0] = '\033';
prefix_len = 1;
} else {
prefix_len = 0;
@@ -631,14 +631,14 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
if (KEV.uChar.UnicodeChar >= 0xDC00 &&
KEV.uChar.UnicodeChar < 0xE000) {
/* UTF-16 surrogate pair */
- WCHAR utf16_buffer[2] = { handle->last_utf16_high_surrogate,
+ WCHAR utf16_buffer[2] = { handle->tty.rd.last_utf16_high_surrogate,
KEV.uChar.UnicodeChar};
char_len = WideCharToMultiByte(CP_UTF8,
0,
utf16_buffer,
2,
- &handle->last_key[prefix_len],
- sizeof handle->last_key,
+ &handle->tty.rd.last_key[prefix_len],
+ sizeof handle->tty.rd.last_key,
NULL,
NULL);
} else {
@@ -647,14 +647,14 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
0,
&KEV.uChar.UnicodeChar,
1,
- &handle->last_key[prefix_len],
- sizeof handle->last_key,
+ &handle->tty.rd.last_key[prefix_len],
+ sizeof handle->tty.rd.last_key,
NULL,
NULL);
}
/* Whatever happened, the last character wasn't a high surrogate. */
- handle->last_utf16_high_surrogate = 0;
+ handle->tty.rd.last_utf16_high_surrogate = 0;
/* If the utf16 character(s) couldn't be converted something must */
/* be wrong. */
@@ -667,8 +667,8 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
goto out;
}
- handle->last_key_len = (unsigned char) (prefix_len + char_len);
- handle->last_key_offset = 0;
+ handle->tty.rd.last_key_len = (unsigned char) (prefix_len + char_len);
+ handle->tty.rd.last_key_offset = 0;
continue;
} else {
@@ -690,23 +690,23 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
/* Prefix with \x033 when the alt key was held. */
if (KEV.dwControlKeyState & (LEFT_ALT_PRESSED | RIGHT_ALT_PRESSED)) {
- handle->last_key[0] = '\033';
+ handle->tty.rd.last_key[0] = '\033';
prefix_len = 1;
} else {
prefix_len = 0;
}
/* Copy the vt100 sequence to the handle buffer. */
- assert(prefix_len + vt100_len < sizeof handle->last_key);
- memcpy(&handle->last_key[prefix_len], vt100, vt100_len);
+ assert(prefix_len + vt100_len < sizeof handle->tty.rd.last_key);
+ memcpy(&handle->tty.rd.last_key[prefix_len], vt100, vt100_len);
- handle->last_key_len = (unsigned char) (prefix_len + vt100_len);
- handle->last_key_offset = 0;
+ handle->tty.rd.last_key_len = (unsigned char) (prefix_len + vt100_len);
+ handle->tty.rd.last_key_offset = 0;
continue;
}
} else {
/* Copy any bytes left from the last keypress to the user buffer. */
- if (handle->last_key_offset < handle->last_key_len) {
+ if (handle->tty.rd.last_key_offset < handle->tty.rd.last_key_len) {
/* Allocate a buffer if needed */
if (buf_used == 0) {
handle->alloc_cb((uv_handle_t*) handle, 1024, &buf);
@@ -717,7 +717,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
assert(buf.base != NULL);
}
- buf.base[buf_used++] = handle->last_key[handle->last_key_offset++];
+ buf.base[buf_used++] = handle->tty.rd.last_key[handle->tty.rd.last_key_offset++];
/* If the buffer is full, emit it */
if ((size_t) buf_used == buf.len) {
@@ -731,11 +731,11 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
/* Apply dwRepeat from the last input record. */
if (--KEV.wRepeatCount > 0) {
- handle->last_key_offset = 0;
+ handle->tty.rd.last_key_offset = 0;
continue;
}
- handle->last_key_len = 0;
+ handle->tty.rd.last_key_len = 0;
continue;
}
}
@@ -766,15 +766,15 @@ void uv_process_tty_read_line_req(uv_loop_t* loop, uv_tty_t* handle,
assert(handle->type == UV_TTY);
assert(handle->flags & UV_HANDLE_TTY_READABLE);
- buf = handle->read_line_buffer;
+ buf = handle->tty.rd.read_line_buffer;
handle->flags &= ~UV_HANDLE_READ_PENDING;
- handle->read_line_buffer = uv_null_buf_;
+ handle->tty.rd.read_line_buffer = uv_null_buf_;
if (!REQ_SUCCESS(req)) {
/* Read was not successful */
if ((handle->flags & UV_HANDLE_READING) &&
- handle->read_line_handle != NULL) {
+ handle->tty.rd.read_line_handle != NULL) {
/* Real error */
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
@@ -789,7 +789,7 @@ void uv_process_tty_read_line_req(uv_loop_t* loop, uv_tty_t* handle,
} else {
/* Read successful */
/* TODO: read unicode, convert to utf-8 */
- DWORD bytes = req->overlapped.InternalHigh;
+ DWORD bytes = req->u.io.overlapped.InternalHigh;
handle->read_cb((uv_stream_t*) handle, bytes, &buf);
}
@@ -811,7 +811,7 @@ void uv_process_tty_read_req(uv_loop_t* loop, uv_tty_t* handle,
/* If the read_line_buffer member is zero, it must have been an raw read. */
/* Otherwise it was a line-buffered read. */
/* FIXME: This is quite obscure. Use a flag or something. */
- if (handle->read_line_buffer.len == 0) {
+ if (handle->tty.rd.read_line_buffer.len == 0) {
uv_process_tty_read_raw_req(loop, handle, req);
} else {
uv_process_tty_read_line_req(loop, handle, req);
@@ -840,7 +840,7 @@ int uv_tty_read_start(uv_tty_t* handle, uv_alloc_cb alloc_cb,
/* Maybe the user stopped reading half-way while processing key events. */
/* Short-circuit if this could be the case. */
- if (handle->last_key_len > 0) {
+ if (handle->tty.rd.last_key_len > 0) {
SET_REQ_SUCCESS(&handle->read_req);
uv_insert_pending_req(handle->loop, (uv_req_t*) &handle->read_req);
return 0;
@@ -869,10 +869,10 @@ int uv_tty_read_stop(uv_tty_t* handle) {
}
/* Cancel line-buffered read */
- if (handle->read_line_handle != NULL) {
+ if (handle->tty.rd.read_line_handle != NULL) {
/* Closing this handle will cancel the ReadConsole operation */
- CloseHandle(handle->read_line_handle);
- handle->read_line_handle = NULL;
+ CloseHandle(handle->tty.rd.read_line_handle);
+ handle->tty.rd.read_line_handle = NULL;
}
@@ -1149,8 +1149,8 @@ static int uv_tty_clear(uv_tty_t* handle, int dir, char entire_screen,
} while (0)
static int uv_tty_set_style(uv_tty_t* handle, DWORD* error) {
- unsigned short argc = handle->ansi_csi_argc;
- unsigned short* argv = handle->ansi_csi_argv;
+ unsigned short argc = handle->tty.wr.ansi_csi_argc;
+ unsigned short* argv = handle->tty.wr.ansi_csi_argv;
int i;
CONSOLE_SCREEN_BUFFER_INFO info;
@@ -1319,12 +1319,12 @@ static int uv_tty_save_state(uv_tty_t* handle, unsigned char save_attributes,
uv_tty_update_virtual_window(&info);
- handle->saved_position.X = info.dwCursorPosition.X;
- handle->saved_position.Y = info.dwCursorPosition.Y - uv_tty_virtual_offset;
+ handle->tty.wr.saved_position.X = info.dwCursorPosition.X;
+ handle->tty.wr.saved_position.Y = info.dwCursorPosition.Y - uv_tty_virtual_offset;
handle->flags |= UV_HANDLE_TTY_SAVED_POSITION;
if (save_attributes) {
- handle->saved_attributes = info.wAttributes &
+ handle->tty.wr.saved_attributes = info.wAttributes &
(FOREGROUND_INTENSITY | BACKGROUND_INTENSITY);
handle->flags |= UV_HANDLE_TTY_SAVED_ATTRIBUTES;
}
@@ -1344,9 +1344,9 @@ static int uv_tty_restore_state(uv_tty_t* handle,
if (handle->flags & UV_HANDLE_TTY_SAVED_POSITION) {
if (uv_tty_move_caret(handle,
- handle->saved_position.X,
+ handle->tty.wr.saved_position.X,
0,
- handle->saved_position.Y,
+ handle->tty.wr.saved_position.Y,
0,
error) != 0) {
return -1;
@@ -1362,7 +1362,7 @@ static int uv_tty_restore_state(uv_tty_t* handle,
new_attributes = info.wAttributes;
new_attributes &= ~(FOREGROUND_INTENSITY | BACKGROUND_INTENSITY);
- new_attributes |= handle->saved_attributes;
+ new_attributes |= handle->tty.wr.saved_attributes;
if (!SetConsoleTextAttribute(handle->handle, new_attributes)) {
*error = GetLastError();
@@ -1412,10 +1412,10 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
} while (0)
/* Cache for fast access */
- unsigned char utf8_bytes_left = handle->utf8_bytes_left;
- unsigned int utf8_codepoint = handle->utf8_codepoint;
- unsigned char previous_eol = handle->previous_eol;
- unsigned char ansi_parser_state = handle->ansi_parser_state;
+ unsigned char utf8_bytes_left = handle->tty.wr.utf8_bytes_left;
+ unsigned int utf8_codepoint = handle->tty.wr.utf8_codepoint;
+ unsigned char previous_eol = handle->tty.wr.previous_eol;
+ unsigned char ansi_parser_state = handle->tty.wr.ansi_parser_state;
/* Store the error here. If we encounter an error, stop trying to do i/o */
/* but keep parsing the buffer so we leave the parser in a consistent */
@@ -1492,7 +1492,7 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 0233:
ansi_parser_state = ANSI_CSI;
- handle->ansi_csi_argc = 0;
+ handle->tty.wr.ansi_csi_argc = 0;
continue;
}
@@ -1500,7 +1500,7 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
switch (utf8_codepoint) {
case '[':
ansi_parser_state = ANSI_CSI;
- handle->ansi_csi_argc = 0;
+ handle->tty.wr.ansi_csi_argc = 0;
continue;
case '^':
@@ -1557,20 +1557,20 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
/* We were not currently parsing a number */
/* Check for too many arguments */
- if (handle->ansi_csi_argc >= ARRAY_SIZE(handle->ansi_csi_argv)) {
+ if (handle->tty.wr.ansi_csi_argc >= ARRAY_SIZE(handle->tty.wr.ansi_csi_argv)) {
ansi_parser_state |= ANSI_IGNORE;
continue;
}
ansi_parser_state |= ANSI_IN_ARG;
- handle->ansi_csi_argc++;
- handle->ansi_csi_argv[handle->ansi_csi_argc - 1] =
+ handle->tty.wr.ansi_csi_argc++;
+ handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] =
(unsigned short) utf8_codepoint - '0';
continue;
} else {
/* We were already parsing a number. Parse next digit. */
uint32_t value = 10 *
- handle->ansi_csi_argv[handle->ansi_csi_argc - 1];
+ handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1];
/* Check for overflow. */
if (value > UINT16_MAX) {
@@ -1578,7 +1578,7 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
continue;
}
- handle->ansi_csi_argv[handle->ansi_csi_argc - 1] =
+ handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] =
(unsigned short) value + (utf8_codepoint - '0');
continue;
}
@@ -1593,25 +1593,25 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
/* If ANSI_IN_ARG is not set, add another argument and */
/* default it to 0. */
/* Check for too many arguments */
- if (handle->ansi_csi_argc >= ARRAY_SIZE(handle->ansi_csi_argv)) {
+ if (handle->tty.wr.ansi_csi_argc >= ARRAY_SIZE(handle->tty.wr.ansi_csi_argv)) {
ansi_parser_state |= ANSI_IGNORE;
continue;
}
- handle->ansi_csi_argc++;
- handle->ansi_csi_argv[handle->ansi_csi_argc - 1] = 0;
+ handle->tty.wr.ansi_csi_argc++;
+ handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] = 0;
continue;
}
} else if (utf8_codepoint == '?' && !(ansi_parser_state & ANSI_IN_ARG) &&
- handle->ansi_csi_argc == 0) {
+ handle->tty.wr.ansi_csi_argc == 0) {
/* Ignores '?' if it is the first character after CSI[ */
/* This is an extension character from the VT100 codeset */
/* that is supported and used by most ANSI terminals today. */
continue;
} else if (utf8_codepoint >= '@' && utf8_codepoint <= '~' &&
- (handle->ansi_csi_argc > 0 || utf8_codepoint != '[')) {
+ (handle->tty.wr.ansi_csi_argc > 0 || utf8_codepoint != '[')) {
int x, y, d;
/* Command byte */
@@ -1619,50 +1619,50 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 'A':
/* cursor up */
FLUSH_TEXT();
- y = -(handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1);
+ y = -(handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1);
uv_tty_move_caret(handle, 0, 1, y, 1, error);
break;
case 'B':
/* cursor down */
FLUSH_TEXT();
- y = handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1;
+ y = handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1;
uv_tty_move_caret(handle, 0, 1, y, 1, error);
break;
case 'C':
/* cursor forward */
FLUSH_TEXT();
- x = handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1;
+ x = handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1;
uv_tty_move_caret(handle, x, 1, 0, 1, error);
break;
case 'D':
/* cursor back */
FLUSH_TEXT();
- x = -(handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1);
+ x = -(handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1);
uv_tty_move_caret(handle, x, 1, 0, 1, error);
break;
case 'E':
/* cursor next line */
FLUSH_TEXT();
- y = handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1;
+ y = handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1;
uv_tty_move_caret(handle, 0, 0, y, 1, error);
break;
case 'F':
/* cursor previous line */
FLUSH_TEXT();
- y = -(handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 1);
+ y = -(handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 1);
uv_tty_move_caret(handle, 0, 0, y, 1, error);
break;
case 'G':
/* cursor horizontal move absolute */
FLUSH_TEXT();
- x = (handle->ansi_csi_argc >= 1 && handle->ansi_csi_argv[0])
- ? handle->ansi_csi_argv[0] - 1 : 0;
+ x = (handle->tty.wr.ansi_csi_argc >= 1 && handle->tty.wr.ansi_csi_argv[0])
+ ? handle->tty.wr.ansi_csi_argv[0] - 1 : 0;
uv_tty_move_caret(handle, x, 0, 0, 1, error);
break;
@@ -1670,17 +1670,17 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 'f':
/* cursor move absolute */
FLUSH_TEXT();
- y = (handle->ansi_csi_argc >= 1 && handle->ansi_csi_argv[0])
- ? handle->ansi_csi_argv[0] - 1 : 0;
- x = (handle->ansi_csi_argc >= 2 && handle->ansi_csi_argv[1])
- ? handle->ansi_csi_argv[1] - 1 : 0;
+ y = (handle->tty.wr.ansi_csi_argc >= 1 && handle->tty.wr.ansi_csi_argv[0])
+ ? handle->tty.wr.ansi_csi_argv[0] - 1 : 0;
+ x = (handle->tty.wr.ansi_csi_argc >= 2 && handle->tty.wr.ansi_csi_argv[1])
+ ? handle->tty.wr.ansi_csi_argv[1] - 1 : 0;
uv_tty_move_caret(handle, x, 0, y, 0, error);
break;
case 'J':
/* Erase screen */
FLUSH_TEXT();
- d = handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 0;
+ d = handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 0;
if (d >= 0 && d <= 2) {
uv_tty_clear(handle, d, 1, error);
}
@@ -1689,7 +1689,7 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 'K':
/* Erase line */
FLUSH_TEXT();
- d = handle->ansi_csi_argc ? handle->ansi_csi_argv[0] : 0;
+ d = handle->tty.wr.ansi_csi_argc ? handle->tty.wr.ansi_csi_argv[0] : 0;
if (d >= 0 && d <= 2) {
uv_tty_clear(handle, d, 0, error);
}
@@ -1715,8 +1715,8 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 'l':
/* Hide the cursor */
- if (handle->ansi_csi_argc == 1 &&
- handle->ansi_csi_argv[0] == 25) {
+ if (handle->tty.wr.ansi_csi_argc == 1 &&
+ handle->tty.wr.ansi_csi_argv[0] == 25) {
FLUSH_TEXT();
uv_tty_set_cursor_visibility(handle, 0, error);
}
@@ -1724,8 +1724,8 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
case 'h':
/* Show the cursor */
- if (handle->ansi_csi_argc == 1 &&
- handle->ansi_csi_argv[0] == 25) {
+ if (handle->tty.wr.ansi_csi_argc == 1 &&
+ handle->tty.wr.ansi_csi_argv[0] == 25) {
FLUSH_TEXT();
uv_tty_set_cursor_visibility(handle, 1, error);
}
@@ -1830,10 +1830,10 @@ static int uv_tty_write_bufs(uv_tty_t* handle,
FLUSH_TEXT();
/* Copy cached values back to struct. */
- handle->utf8_bytes_left = utf8_bytes_left;
- handle->utf8_codepoint = utf8_codepoint;
- handle->previous_eol = previous_eol;
- handle->ansi_parser_state = ansi_parser_state;
+ handle->tty.wr.utf8_bytes_left = utf8_bytes_left;
+ handle->tty.wr.utf8_codepoint = utf8_codepoint;
+ handle->tty.wr.previous_eol = previous_eol;
+ handle->tty.wr.ansi_parser_state = ansi_parser_state;
LeaveCriticalSection(&uv_tty_output_lock);
@@ -1861,10 +1861,10 @@ int uv_tty_write(uv_loop_t* loop,
req->cb = cb;
handle->reqs_pending++;
- handle->write_reqs_pending++;
+ handle->stream.conn.write_reqs_pending++;
REGISTER_HANDLE_REQ(loop, handle, req);
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
if (!uv_tty_write_bufs(handle, bufs, nbufs, &error)) {
SET_REQ_SUCCESS(req);
@@ -1883,7 +1883,7 @@ int uv__tty_try_write(uv_tty_t* handle,
unsigned int nbufs) {
DWORD error;
- if (handle->write_reqs_pending > 0)
+ if (handle->stream.conn.write_reqs_pending > 0)
return UV_EAGAIN;
if (uv_tty_write_bufs(handle, bufs, nbufs, &error))
@@ -1897,7 +1897,7 @@ void uv_process_tty_write_req(uv_loop_t* loop, uv_tty_t* handle,
uv_write_t* req) {
int err;
- handle->write_queue_size -= req->queued_bytes;
+ handle->write_queue_size -= req->u.io.queued_bytes;
UNREGISTER_HANDLE_REQ(loop, handle, req);
if (req->cb) {
@@ -1905,9 +1905,9 @@ void uv_process_tty_write_req(uv_loop_t* loop, uv_tty_t* handle,
req->cb(req, uv_translate_sys_error(err));
}
- handle->write_reqs_pending--;
- if (handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
+ handle->stream.conn.write_reqs_pending--;
+ if (handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
uv_want_endgame(loop, (uv_handle_t*)handle);
}
@@ -1933,20 +1933,20 @@ void uv_tty_close(uv_tty_t* handle) {
void uv_tty_endgame(uv_loop_t* loop, uv_tty_t* handle) {
if (!(handle->flags & UV_HANDLE_TTY_READABLE) &&
- handle->shutdown_req != NULL &&
- handle->write_reqs_pending == 0) {
- UNREGISTER_HANDLE_REQ(loop, handle, handle->shutdown_req);
+ handle->stream.conn.shutdown_req != NULL &&
+ handle->stream.conn.write_reqs_pending == 0) {
+ UNREGISTER_HANDLE_REQ(loop, handle, handle->stream.conn.shutdown_req);
/* TTY shutdown is really just a no-op */
- if (handle->shutdown_req->cb) {
+ if (handle->stream.conn.shutdown_req->cb) {
if (handle->flags & UV__HANDLE_CLOSING) {
- handle->shutdown_req->cb(handle->shutdown_req, UV_ECANCELED);
+ handle->stream.conn.shutdown_req->cb(handle->stream.conn.shutdown_req, UV_ECANCELED);
} else {
- handle->shutdown_req->cb(handle->shutdown_req, 0);
+ handle->stream.conn.shutdown_req->cb(handle->stream.conn.shutdown_req, 0);
}
}
- handle->shutdown_req = NULL;
+ handle->stream.conn.shutdown_req = NULL;
DECREASE_PENDING_REQ_COUNT(handle);
return;
@@ -1957,12 +1957,12 @@ void uv_tty_endgame(uv_loop_t* loop, uv_tty_t* handle) {
/* The console handle duplicate used for line reading should be destroyed */
/* by uv_tty_read_stop. */
assert(!(handle->flags & UV_HANDLE_TTY_READABLE) ||
- handle->read_line_handle == NULL);
+ handle->tty.rd.read_line_handle == NULL);
/* The wait handle used for raw reading should be unregistered when the */
/* wait callback runs. */
assert(!(handle->flags & UV_HANDLE_TTY_READABLE) ||
- handle->read_raw_wait == NULL);
+ handle->tty.rd.read_raw_wait == NULL);
assert(!(handle->flags & UV_HANDLE_CLOSED));
uv__handle_close(handle);
diff --git a/deps/uv/src/win/udp.c b/deps/uv/src/win/udp.c
index 73b5bd5e467b40..197e5d828f2ad4 100644
--- a/deps/uv/src/win/udp.c
+++ b/deps/uv/src/win/udp.c
@@ -244,7 +244,7 @@ static void uv_udp_queue_recv(uv_loop_t* loop, uv_udp_t* handle) {
assert(!(handle->flags & UV_HANDLE_READ_PENDING));
req = &handle->recv_req;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
/*
* Preallocate a read buffer if the number of active streams is below
@@ -272,13 +272,13 @@ static void uv_udp_queue_recv(uv_loop_t* loop, uv_udp_t* handle) {
&flags,
(struct sockaddr*) &handle->recv_from,
&handle->recv_from_len,
- &req->overlapped,
+ &req->u.io.overlapped,
NULL);
if (UV_SUCCEEDED_WITHOUT_IOCP(result == 0)) {
/* Process the req without IOCP. */
handle->flags |= UV_HANDLE_READ_PENDING;
- req->overlapped.InternalHigh = bytes;
+ req->u.io.overlapped.InternalHigh = bytes;
handle->reqs_pending++;
uv_insert_pending_req(loop, req);
} else if (UV_SUCCEEDED_WITH_IOCP(result == 0)) {
@@ -304,13 +304,13 @@ static void uv_udp_queue_recv(uv_loop_t* loop, uv_udp_t* handle) {
1,
&bytes,
&flags,
- &req->overlapped,
+ &req->u.io.overlapped,
NULL);
if (UV_SUCCEEDED_WITHOUT_IOCP(result == 0)) {
/* Process the req without IOCP. */
handle->flags |= UV_HANDLE_READ_PENDING;
- req->overlapped.InternalHigh = bytes;
+ req->u.io.overlapped.InternalHigh = bytes;
handle->reqs_pending++;
uv_insert_pending_req(loop, req);
} else if (UV_SUCCEEDED_WITH_IOCP(result == 0)) {
@@ -384,7 +384,7 @@ static int uv__send(uv_udp_send_t* req,
req->type = UV_UDP_SEND;
req->handle = handle;
req->cb = cb;
- memset(&req->overlapped, 0, sizeof(req->overlapped));
+ memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
result = WSASendTo(handle->socket,
(WSABUF*)bufs,
@@ -393,22 +393,22 @@ static int uv__send(uv_udp_send_t* req,
0,
addr,
addrlen,
- &req->overlapped,
+ &req->u.io.overlapped,
NULL);
if (UV_SUCCEEDED_WITHOUT_IOCP(result == 0)) {
/* Request completed immediately. */
- req->queued_bytes = 0;
+ req->u.io.queued_bytes = 0;
handle->reqs_pending++;
- handle->send_queue_size += req->queued_bytes;
+ handle->send_queue_size += req->u.io.queued_bytes;
handle->send_queue_count++;
REGISTER_HANDLE_REQ(loop, handle, req);
uv_insert_pending_req(loop, (uv_req_t*)req);
} else if (UV_SUCCEEDED_WITH_IOCP(result == 0)) {
/* Request queued by the kernel. */
- req->queued_bytes = uv__count_bufs(bufs, nbufs);
+ req->u.io.queued_bytes = uv__count_bufs(bufs, nbufs);
handle->reqs_pending++;
- handle->send_queue_size += req->queued_bytes;
+ handle->send_queue_size += req->u.io.queued_bytes;
handle->send_queue_count++;
REGISTER_HANDLE_REQ(loop, handle, req);
} else {
@@ -459,7 +459,7 @@ void uv_process_udp_recv_req(uv_loop_t* loop, uv_udp_t* handle,
/* Successful read */
partial = !REQ_SUCCESS(req);
handle->recv_cb(handle,
- req->overlapped.InternalHigh,
+ req->u.io.overlapped.InternalHigh,
&handle->recv_buffer,
(const struct sockaddr*) &handle->recv_from,
partial ? UV_UDP_PARTIAL : 0);
@@ -536,9 +536,9 @@ void uv_process_udp_send_req(uv_loop_t* loop, uv_udp_t* handle,
assert(handle->type == UV_UDP);
- assert(handle->send_queue_size >= req->queued_bytes);
+ assert(handle->send_queue_size >= req->u.io.queued_bytes);
assert(handle->send_queue_count >= 1);
- handle->send_queue_size -= req->queued_bytes;
+ handle->send_queue_size -= req->u.io.queued_bytes;
handle->send_queue_count--;
UNREGISTER_HANDLE_REQ(loop, handle, req);
diff --git a/deps/uv/src/win/util.c b/deps/uv/src/win/util.c
index 43d843ff5c42b5..3697d5aa6aaf20 100644
--- a/deps/uv/src/win/util.c
+++ b/deps/uv/src/win/util.c
@@ -22,7 +22,6 @@
#include
#include
#include
-#include
#include
#include
#include
@@ -535,14 +534,14 @@ int uv_uptime(double* uptime) {
return uv_translate_sys_error(result);
}
- free(malloced_buffer);
-
buffer_size *= 2;
/* Don't let the buffer grow infinitely. */
if (buffer_size > 1 << 20) {
goto internalError;
}
+ free(malloced_buffer);
+
buffer = malloced_buffer = (BYTE*) malloc(buffer_size);
if (malloced_buffer == NULL) {
*uptime = 0;
diff --git a/deps/uv/test/benchmark-getaddrinfo.c b/deps/uv/test/benchmark-getaddrinfo.c
index c7f99a2fcb8720..1dbc23ddba009d 100644
--- a/deps/uv/test/benchmark-getaddrinfo.c
+++ b/deps/uv/test/benchmark-getaddrinfo.c
@@ -83,8 +83,9 @@ BENCHMARK_IMPL(getaddrinfo) {
ASSERT(calls_initiated == TOTAL_CALLS);
ASSERT(calls_completed == TOTAL_CALLS);
- LOGF("getaddrinfo: %.0f req/s\n",
- (double) calls_completed / (double) (end_time - start_time) * 1000.0);
+ fprintf(stderr, "getaddrinfo: %.0f req/s\n",
+ (double) calls_completed / (double) (end_time - start_time) * 1000.0);
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/benchmark-loop-count.c b/deps/uv/test/benchmark-loop-count.c
index 5cb813238d4f75..970a94c2fecb5c 100644
--- a/deps/uv/test/benchmark-loop-count.c
+++ b/deps/uv/test/benchmark-loop-count.c
@@ -62,10 +62,11 @@ BENCHMARK_IMPL(loop_count) {
ASSERT(ticks == NUM_TICKS);
- LOGF("loop_count: %d ticks in %.2fs (%.0f/s)\n",
- NUM_TICKS,
- ns / 1e9,
- NUM_TICKS / (ns / 1e9));
+ fprintf(stderr, "loop_count: %d ticks in %.2fs (%.0f/s)\n",
+ NUM_TICKS,
+ ns / 1e9,
+ NUM_TICKS / (ns / 1e9));
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
@@ -83,7 +84,8 @@ BENCHMARK_IMPL(loop_count_timed) {
uv_run(loop, UV_RUN_DEFAULT);
- LOGF("loop_count: %lu ticks (%.0f ticks/s)\n", ticks, ticks / 5.0);
+ fprintf(stderr, "loop_count: %lu ticks (%.0f ticks/s)\n", ticks, ticks / 5.0);
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/benchmark-million-timers.c b/deps/uv/test/benchmark-million-timers.c
index 6027d6088aa789..60a308bef13d40 100644
--- a/deps/uv/test/benchmark-million-timers.c
+++ b/deps/uv/test/benchmark-million-timers.c
@@ -75,10 +75,11 @@ BENCHMARK_IMPL(million_timers) {
ASSERT(close_cb_called == NUM_TIMERS);
free(timers);
- LOGF("%.2f seconds total\n", (after_all - before_all) / 1e9);
- LOGF("%.2f seconds init\n", (before_run - before_all) / 1e9);
- LOGF("%.2f seconds dispatch\n", (after_run - before_run) / 1e9);
- LOGF("%.2f seconds cleanup\n", (after_all - after_run) / 1e9);
+ fprintf(stderr, "%.2f seconds total\n", (after_all - before_all) / 1e9);
+ fprintf(stderr, "%.2f seconds init\n", (before_run - before_all) / 1e9);
+ fprintf(stderr, "%.2f seconds dispatch\n", (after_run - before_run) / 1e9);
+ fprintf(stderr, "%.2f seconds cleanup\n", (after_all - after_run) / 1e9);
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/benchmark-ping-pongs.c b/deps/uv/test/benchmark-ping-pongs.c
index bb560d7d21fd6b..646a7df9447036 100644
--- a/deps/uv/test/benchmark-ping-pongs.c
+++ b/deps/uv/test/benchmark-ping-pongs.c
@@ -80,7 +80,8 @@ static void pinger_close_cb(uv_handle_t* handle) {
pinger_t* pinger;
pinger = (pinger_t*)handle->data;
- LOGF("ping_pongs: %d roundtrips/s\n", (1000 * pinger->pongs) / TIME);
+ fprintf(stderr, "ping_pongs: %d roundtrips/s\n", (1000 * pinger->pongs) / TIME);
+ fflush(stderr);
free(pinger);
diff --git a/deps/uv/test/benchmark-pound.c b/deps/uv/test/benchmark-pound.c
index 587928549eac8a..79f36345037cd4 100644
--- a/deps/uv/test/benchmark-pound.c
+++ b/deps/uv/test/benchmark-pound.c
@@ -299,11 +299,12 @@ static int pound_it(int concurrency,
/* Number of fractional seconds it took to run the benchmark. */
secs = (double)(end_time - start_time) / NANOSEC;
- LOGF("%s-conn-pound-%d: %.0f accepts/s (%d failed)\n",
- type,
- concurrency,
- closed_streams / secs,
- conns_failed);
+ fprintf(stderr, "%s-conn-pound-%d: %.0f accepts/s (%d failed)\n",
+ type,
+ concurrency,
+ closed_streams / secs,
+ conns_failed);
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/benchmark-pump.c b/deps/uv/test/benchmark-pump.c
index d58f46a384b899..88f2dc5c658e27 100644
--- a/deps/uv/test/benchmark-pump.c
+++ b/deps/uv/test/benchmark-pump.c
@@ -90,9 +90,10 @@ static void show_stats(uv_timer_t* handle) {
int i;
#if PRINT_STATS
- LOGF("connections: %d, write: %.1f gbit/s\n",
- write_sockets,
- gbit(nsent, STATS_INTERVAL));
+ fprintf(stderr, "connections: %d, write: %.1f gbit/s\n",
+ write_sockets,
+ gbit(nsent, STATS_INTERVAL));
+ fflush(stderr);
#endif
/* Exit if the show is over */
@@ -101,10 +102,11 @@ static void show_stats(uv_timer_t* handle) {
uv_update_time(loop);
diff = uv_now(loop) - start_time;
- LOGF("%s_pump%d_client: %.1f gbit/s\n",
- type == TCP ? "tcp" : "pipe",
- write_sockets,
- gbit(nsent_total, diff));
+ fprintf(stderr, "%s_pump%d_client: %.1f gbit/s\n",
+ type == TCP ? "tcp" : "pipe",
+ write_sockets,
+ gbit(nsent_total, diff));
+ fflush(stderr);
for (i = 0; i < write_sockets; i++) {
if (type == TCP)
@@ -128,10 +130,11 @@ static void read_show_stats(void) {
uv_update_time(loop);
diff = uv_now(loop) - start_time;
- LOGF("%s_pump%d_server: %.1f gbit/s\n",
- type == TCP ? "tcp" : "pipe",
- max_read_sockets,
- gbit(nrecv_total, diff));
+ fprintf(stderr, "%s_pump%d_server: %.1f gbit/s\n",
+ type == TCP ? "tcp" : "pipe",
+ max_read_sockets,
+ gbit(nrecv_total, diff));
+ fflush(stderr);
}
@@ -213,7 +216,10 @@ static void do_write(uv_stream_t* stream) {
static void connect_cb(uv_connect_t* req, int status) {
int i;
- if (status) LOG(uv_strerror(status));
+ if (status) {
+ fprintf(stderr, "%s", uv_strerror(status));
+ fflush(stderr);
+ }
ASSERT(status == 0);
write_sockets++;
diff --git a/deps/uv/test/benchmark-sizes.c b/deps/uv/test/benchmark-sizes.c
index 8ccf10ee475fb1..9bf42f91537d58 100644
--- a/deps/uv/test/benchmark-sizes.c
+++ b/deps/uv/test/benchmark-sizes.c
@@ -24,22 +24,23 @@
BENCHMARK_IMPL(sizes) {
- LOGF("uv_shutdown_t: %u bytes\n", (unsigned int) sizeof(uv_shutdown_t));
- LOGF("uv_write_t: %u bytes\n", (unsigned int) sizeof(uv_write_t));
- LOGF("uv_connect_t: %u bytes\n", (unsigned int) sizeof(uv_connect_t));
- LOGF("uv_udp_send_t: %u bytes\n", (unsigned int) sizeof(uv_udp_send_t));
- LOGF("uv_tcp_t: %u bytes\n", (unsigned int) sizeof(uv_tcp_t));
- LOGF("uv_pipe_t: %u bytes\n", (unsigned int) sizeof(uv_pipe_t));
- LOGF("uv_tty_t: %u bytes\n", (unsigned int) sizeof(uv_tty_t));
- LOGF("uv_prepare_t: %u bytes\n", (unsigned int) sizeof(uv_prepare_t));
- LOGF("uv_check_t: %u bytes\n", (unsigned int) sizeof(uv_check_t));
- LOGF("uv_idle_t: %u bytes\n", (unsigned int) sizeof(uv_idle_t));
- LOGF("uv_async_t: %u bytes\n", (unsigned int) sizeof(uv_async_t));
- LOGF("uv_timer_t: %u bytes\n", (unsigned int) sizeof(uv_timer_t));
- LOGF("uv_fs_poll_t: %u bytes\n", (unsigned int) sizeof(uv_fs_poll_t));
- LOGF("uv_fs_event_t: %u bytes\n", (unsigned int) sizeof(uv_fs_event_t));
- LOGF("uv_process_t: %u bytes\n", (unsigned int) sizeof(uv_process_t));
- LOGF("uv_poll_t: %u bytes\n", (unsigned int) sizeof(uv_poll_t));
- LOGF("uv_loop_t: %u bytes\n", (unsigned int) sizeof(uv_loop_t));
+ fprintf(stderr, "uv_shutdown_t: %u bytes\n", (unsigned int) sizeof(uv_shutdown_t));
+ fprintf(stderr, "uv_write_t: %u bytes\n", (unsigned int) sizeof(uv_write_t));
+ fprintf(stderr, "uv_connect_t: %u bytes\n", (unsigned int) sizeof(uv_connect_t));
+ fprintf(stderr, "uv_udp_send_t: %u bytes\n", (unsigned int) sizeof(uv_udp_send_t));
+ fprintf(stderr, "uv_tcp_t: %u bytes\n", (unsigned int) sizeof(uv_tcp_t));
+ fprintf(stderr, "uv_pipe_t: %u bytes\n", (unsigned int) sizeof(uv_pipe_t));
+ fprintf(stderr, "uv_tty_t: %u bytes\n", (unsigned int) sizeof(uv_tty_t));
+ fprintf(stderr, "uv_prepare_t: %u bytes\n", (unsigned int) sizeof(uv_prepare_t));
+ fprintf(stderr, "uv_check_t: %u bytes\n", (unsigned int) sizeof(uv_check_t));
+ fprintf(stderr, "uv_idle_t: %u bytes\n", (unsigned int) sizeof(uv_idle_t));
+ fprintf(stderr, "uv_async_t: %u bytes\n", (unsigned int) sizeof(uv_async_t));
+ fprintf(stderr, "uv_timer_t: %u bytes\n", (unsigned int) sizeof(uv_timer_t));
+ fprintf(stderr, "uv_fs_poll_t: %u bytes\n", (unsigned int) sizeof(uv_fs_poll_t));
+ fprintf(stderr, "uv_fs_event_t: %u bytes\n", (unsigned int) sizeof(uv_fs_event_t));
+ fprintf(stderr, "uv_process_t: %u bytes\n", (unsigned int) sizeof(uv_process_t));
+ fprintf(stderr, "uv_poll_t: %u bytes\n", (unsigned int) sizeof(uv_poll_t));
+ fprintf(stderr, "uv_loop_t: %u bytes\n", (unsigned int) sizeof(uv_loop_t));
+ fflush(stderr);
return 0;
}
diff --git a/deps/uv/test/benchmark-spawn.c b/deps/uv/test/benchmark-spawn.c
index 9cae41a83afd61..ed9ad608f3790e 100644
--- a/deps/uv/test/benchmark-spawn.c
+++ b/deps/uv/test/benchmark-spawn.c
@@ -155,8 +155,9 @@ BENCHMARK_IMPL(spawn) {
uv_update_time(loop);
end_time = uv_now(loop);
- LOGF("spawn: %.0f spawns/s\n",
- (double) N / (double) (end_time - start_time) * 1000.0);
+ fprintf(stderr, "spawn: %.0f spawns/s\n",
+ (double) N / (double) (end_time - start_time) * 1000.0);
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/run-benchmarks.c b/deps/uv/test/run-benchmarks.c
index 8d4f549799e8b3..6e42623d54cdec 100644
--- a/deps/uv/test/run-benchmarks.c
+++ b/deps/uv/test/run-benchmarks.c
@@ -41,7 +41,8 @@ int main(int argc, char **argv) {
case 2: return maybe_run_test(argc, argv);
case 3: return run_test_part(argv[1], argv[2]);
default:
- LOGF("Too many arguments.\n");
+ fprintf(stderr, "Too many arguments.\n");
+ fflush(stderr);
return EXIT_FAILURE;
}
diff --git a/deps/uv/test/run-tests.c b/deps/uv/test/run-tests.c
index e92c93008e72ef..1f458745327398 100644
--- a/deps/uv/test/run-tests.c
+++ b/deps/uv/test/run-tests.c
@@ -56,7 +56,8 @@ int main(int argc, char **argv) {
case 2: return maybe_run_test(argc, argv);
case 3: return run_test_part(argv[1], argv[2]);
default:
- LOGF("Too many arguments.\n");
+ fprintf(stderr, "Too many arguments.\n");
+ fflush(stderr);
return EXIT_FAILURE;
}
diff --git a/deps/uv/test/runner.c b/deps/uv/test/runner.c
index e896d43b7627df..e094defc7e7de5 100644
--- a/deps/uv/test/runner.c
+++ b/deps/uv/test/runner.c
@@ -43,13 +43,14 @@ static void log_progress(int total,
total = 1;
progress = 100 * (passed + failed + skipped + todos) / total;
- LOGF("[%% %3d|+ %3d|- %3d|T %3d|S %3d]: %s",
- progress,
- passed,
- failed,
- todos,
- skipped,
- name);
+ fprintf(stderr, "[%% %3d|+ %3d|- %3d|T %3d|S %3d]: %s",
+ progress,
+ passed,
+ failed,
+ todos,
+ skipped,
+ name);
+ fflush(stderr);
}
@@ -109,7 +110,8 @@ int run_tests(int benchmark_output) {
}
if (tap_output) {
- LOGF("1..%d\n", total);
+ fprintf(stderr, "1..%d\n", total);
+ fflush(stderr);
}
/* Run all tests. */
@@ -184,7 +186,8 @@ void log_tap_result(int test_count,
reason[0] = '\0';
}
- LOGF("%s %d - %s%s%s\n", result, test_count, test, directive, reason);
+ fprintf(stderr, "%s %d - %s%s%s\n", result, test_count, test, directive, reason);
+ fflush(stderr);
}
@@ -320,49 +323,55 @@ int run_test(const char* test,
/* Show error and output from processes if the test failed. */
if (status != 0 || task->show_output) {
if (tap_output) {
- LOGF("#");
+ fprintf(stderr, "#");
} else if (status == TEST_TODO) {
- LOGF("\n`%s` todo\n", test);
+ fprintf(stderr, "\n`%s` todo\n", test);
} else if (status == TEST_SKIP) {
- LOGF("\n`%s` skipped\n", test);
+ fprintf(stderr, "\n`%s` skipped\n", test);
} else if (status != 0) {
- LOGF("\n`%s` failed: %s\n", test, errmsg);
+ fprintf(stderr, "\n`%s` failed: %s\n", test, errmsg);
} else {
- LOGF("\n");
+ fprintf(stderr, "\n");
}
+ fflush(stderr);
for (i = 0; i < process_count; i++) {
switch (process_output_size(&processes[i])) {
case -1:
- LOGF("Output from process `%s`: (unavailable)\n",
- process_get_name(&processes[i]));
+ fprintf(stderr, "Output from process `%s`: (unavailable)\n",
+ process_get_name(&processes[i]));
+ fflush(stderr);
break;
case 0:
- LOGF("Output from process `%s`: (no output)\n",
- process_get_name(&processes[i]));
+ fprintf(stderr, "Output from process `%s`: (no output)\n",
+ process_get_name(&processes[i]));
+ fflush(stderr);
break;
default:
- LOGF("Output from process `%s`:\n", process_get_name(&processes[i]));
+ fprintf(stderr, "Output from process `%s`:\n", process_get_name(&processes[i]));
+ fflush(stderr);
process_copy_output(&processes[i], fileno(stderr));
break;
}
}
if (!tap_output) {
- LOG("=============================================================\n");
+ fprintf(stderr, "=============================================================\n");
}
/* In benchmark mode show concise output from the main process. */
} else if (benchmark_output) {
switch (process_output_size(main_proc)) {
case -1:
- LOGF("%s: (unavailable)\n", test);
+ fprintf(stderr, "%s: (unavailable)\n", test);
+ fflush(stderr);
break;
case 0:
- LOGF("%s: (no output)\n", test);
+ fprintf(stderr, "%s: (no output)\n", test);
+ fflush(stderr);
break;
default:
@@ -397,7 +406,8 @@ int run_test_part(const char* test, const char* part) {
}
}
- LOGF("No test part with that name: %s:%s\n", test, part);
+ fprintf(stderr, "No test part with that name: %s:%s\n", test, part);
+ fflush(stderr);
return 255;
}
diff --git a/deps/uv/test/task.h b/deps/uv/test/task.h
index 07584c52996f8c..ea0503e8feefe4 100644
--- a/deps/uv/test/task.h
+++ b/deps/uv/test/task.h
@@ -76,19 +76,6 @@ typedef enum {
PIPE
} stream_type;
-/* Log to stderr. */
-#define LOG(...) \
- do { \
- fprintf(stderr, "%s", __VA_ARGS__); \
- fflush(stderr); \
- } while (0)
-
-#define LOGF(...) \
- do { \
- fprintf(stderr, __VA_ARGS__); \
- fflush(stderr); \
- } while (0)
-
/* Die with fatal error. */
#define FATAL(msg) \
do { \
@@ -158,13 +145,15 @@ enum test_status {
#define RETURN_TODO(explanation) \
do { \
- LOGF("%s\n", explanation); \
+ fprintf(stderr, "%s\n", explanation); \
+ fflush(stderr); \
return TEST_TODO; \
} while (0)
#define RETURN_SKIP(explanation) \
do { \
- LOGF("%s\n", explanation); \
+ fprintf(stderr, "%s\n", explanation); \
+ fflush(stderr); \
return TEST_SKIP; \
} while (0)
@@ -190,10 +179,15 @@ enum test_status {
#include
+/* Define inline for MSVC */
+# ifdef _MSC_VER
+# define inline __inline
+# endif
+
/* Emulate snprintf() on Windows, _snprintf() doesn't zero-terminate the buffer
* on overflow...
*/
-static int snprintf(char* buf, size_t len, const char* fmt, ...) {
+inline int snprintf(char* buf, size_t len, const char* fmt, ...) {
va_list ap;
int n;
diff --git a/deps/uv/test/test-fs.c b/deps/uv/test/test-fs.c
index cc5dc744501ec4..a0600b30797dd9 100644
--- a/deps/uv/test/test-fs.c
+++ b/deps/uv/test/test-fs.c
@@ -1094,7 +1094,8 @@ TEST_IMPL(fs_fstat) {
#elif defined(__sun) || \
defined(_BSD_SOURCE) || \
defined(_SVID_SOURCE) || \
- defined(_XOPEN_SOURCE)
+ defined(_XOPEN_SOURCE) || \
+ defined(_DEFAULT_SOURCE)
ASSERT(s->st_atim.tv_sec == t.st_atim.tv_sec);
ASSERT(s->st_atim.tv_nsec == t.st_atim.tv_nsec);
ASSERT(s->st_mtim.tv_sec == t.st_mtim.tv_sec);
@@ -1155,6 +1156,7 @@ TEST_IMPL(fs_access) {
/* Setup. */
unlink("test_file");
+ rmdir("test_dir");
loop = uv_default_loop();
@@ -1198,6 +1200,16 @@ TEST_IMPL(fs_access) {
ASSERT(req.result == 0);
uv_fs_req_cleanup(&req);
+ /* Directory access */
+ r = uv_fs_mkdir(loop, &req, "test_dir", 0777, NULL);
+ ASSERT(r == 0);
+ uv_fs_req_cleanup(&req);
+
+ r = uv_fs_access(loop, &req, "test_dir", W_OK, NULL);
+ ASSERT(r == 0);
+ ASSERT(req.result == 0);
+ uv_fs_req_cleanup(&req);
+
/*
* Run the loop just to check we don't have make any extraneous uv_ref()
* calls. This should drop out immediately.
@@ -1206,6 +1218,7 @@ TEST_IMPL(fs_access) {
/* Cleanup. */
unlink("test_file");
+ rmdir("test_dir");
MAKE_VALGRIND_HAPPY();
return 0;
@@ -1310,6 +1323,65 @@ TEST_IMPL(fs_chmod) {
}
+TEST_IMPL(fs_unlink_readonly) {
+ int r;
+ uv_fs_t req;
+ uv_file file;
+
+ /* Setup. */
+ unlink("test_file");
+
+ loop = uv_default_loop();
+
+ r = uv_fs_open(loop,
+ &req,
+ "test_file",
+ O_RDWR | O_CREAT,
+ S_IWUSR | S_IRUSR,
+ NULL);
+ ASSERT(r >= 0);
+ ASSERT(req.result >= 0);
+ file = req.result;
+ uv_fs_req_cleanup(&req);
+
+ iov = uv_buf_init(test_buf, sizeof(test_buf));
+ r = uv_fs_write(loop, &req, file, &iov, 1, -1, NULL);
+ ASSERT(r == sizeof(test_buf));
+ ASSERT(req.result == sizeof(test_buf));
+ uv_fs_req_cleanup(&req);
+
+ close(file);
+
+ /* Make the file read-only */
+ r = uv_fs_chmod(loop, &req, "test_file", 0400, NULL);
+ ASSERT(r == 0);
+ ASSERT(req.result == 0);
+ uv_fs_req_cleanup(&req);
+
+ check_permission("test_file", 0400);
+
+ /* Try to unlink the file */
+ r = uv_fs_unlink(loop, &req, "test_file", NULL);
+ ASSERT(r == 0);
+ ASSERT(req.result == 0);
+ uv_fs_req_cleanup(&req);
+
+ /*
+ * Run the loop just to check we don't have make any extraneous uv_ref()
+ * calls. This should drop out immediately.
+ */
+ uv_run(loop, UV_RUN_DEFAULT);
+
+ /* Cleanup. */
+ uv_fs_chmod(loop, &req, "test_file", 0600, NULL);
+ uv_fs_req_cleanup(&req);
+ unlink("test_file");
+
+ MAKE_VALGRIND_HAPPY();
+ return 0;
+}
+
+
TEST_IMPL(fs_chown) {
int r;
uv_fs_t req;
diff --git a/deps/uv/test/test-handle-fileno.c b/deps/uv/test/test-handle-fileno.c
index df5e984ab74338..3fe933adebdd87 100644
--- a/deps/uv/test/test-handle-fileno.c
+++ b/deps/uv/test/test-handle-fileno.c
@@ -102,7 +102,8 @@ TEST_IMPL(handle_fileno) {
tty_fd = get_tty_fd();
if (tty_fd < 0) {
- LOGF("Cannot open a TTY fd");
+ fprintf(stderr, "Cannot open a TTY fd");
+ fflush(stderr);
} else {
r = uv_tty_init(loop, &tty, tty_fd, 0);
ASSERT(r == 0);
diff --git a/deps/uv/test/test-idle.c b/deps/uv/test/test-idle.c
index 0e991c368cfd81..f49d1964827278 100644
--- a/deps/uv/test/test-idle.c
+++ b/deps/uv/test/test-idle.c
@@ -46,7 +46,8 @@ static void timer_cb(uv_timer_t* handle) {
uv_close((uv_handle_t*) &timer_handle, close_cb);
timer_cb_called++;
- LOGF("timer_cb %d\n", timer_cb_called);
+ fprintf(stderr, "timer_cb %d\n", timer_cb_called);
+ fflush(stderr);
}
@@ -54,7 +55,8 @@ static void idle_cb(uv_idle_t* handle) {
ASSERT(handle == &idle_handle);
idle_cb_called++;
- LOGF("idle_cb %d\n", idle_cb_called);
+ fprintf(stderr, "idle_cb %d\n", idle_cb_called);
+ fflush(stderr);
}
@@ -62,7 +64,8 @@ static void check_cb(uv_check_t* handle) {
ASSERT(handle == &check_handle);
check_cb_called++;
- LOGF("check_cb %d\n", check_cb_called);
+ fprintf(stderr, "check_cb %d\n", check_cb_called);
+ fflush(stderr);
}
diff --git a/deps/uv/test/test-ip6-addr.c b/deps/uv/test/test-ip6-addr.c
index cf8491fb1b4d15..869b099e0fccaf 100644
--- a/deps/uv/test/test-ip6-addr.c
+++ b/deps/uv/test/test-ip6-addr.c
@@ -77,14 +77,16 @@ TEST_IMPL(ip6_addr_link_local) {
device_name);
#endif
- LOGF("Testing link-local address %s "
- "(iface_index: 0x%02x, device_name: %s)\n",
- scoped_addr,
- iface_index,
- device_name);
+ fprintf(stderr, "Testing link-local address %s "
+ "(iface_index: 0x%02x, device_name: %s)\n",
+ scoped_addr,
+ iface_index,
+ device_name);
+ fflush(stderr);
ASSERT(0 == uv_ip6_addr(scoped_addr, TEST_PORT, &addr));
- LOGF("Got scope_id 0x%02x\n", addr.sin6_scope_id);
+ fprintf(stderr, "Got scope_id 0x%02x\n", addr.sin6_scope_id);
+ fflush(stderr);
ASSERT(iface_index == addr.sin6_scope_id);
}
diff --git a/deps/uv/test/test-list.h b/deps/uv/test/test-list.h
index aac15e0d9df0b7..1e3c13d5e9267d 100644
--- a/deps/uv/test/test-list.h
+++ b/deps/uv/test/test-list.h
@@ -43,6 +43,7 @@ TEST_DECLARE (semaphore_1)
TEST_DECLARE (semaphore_2)
TEST_DECLARE (semaphore_3)
TEST_DECLARE (tty)
+TEST_DECLARE (tty_file)
TEST_DECLARE (stdio_over_pipes)
TEST_DECLARE (ip6_pton)
TEST_DECLARE (ipc_listen_before_write)
@@ -61,6 +62,7 @@ TEST_DECLARE (multiple_listen)
TEST_DECLARE (tcp_write_after_connect)
#endif
TEST_DECLARE (tcp_writealot)
+TEST_DECLARE (tcp_write_fail)
TEST_DECLARE (tcp_try_write)
TEST_DECLARE (tcp_write_queue_order)
TEST_DECLARE (tcp_open)
@@ -195,6 +197,9 @@ TEST_DECLARE (fail_always)
TEST_DECLARE (pass_always)
TEST_DECLARE (socket_buffer_size)
TEST_DECLARE (spawn_fails)
+#ifndef _WIN32
+TEST_DECLARE (spawn_fails_check_for_waitpid_cleanup)
+#endif
TEST_DECLARE (spawn_exit_code)
TEST_DECLARE (spawn_stdout)
TEST_DECLARE (spawn_stdin)
@@ -209,6 +214,8 @@ TEST_DECLARE (spawn_setuid_fails)
TEST_DECLARE (spawn_setgid_fails)
TEST_DECLARE (spawn_stdout_to_file)
TEST_DECLARE (spawn_stdout_and_stderr_to_file)
+TEST_DECLARE (spawn_stdout_and_stderr_to_file2)
+TEST_DECLARE (spawn_stdout_and_stderr_to_file_swap)
TEST_DECLARE (spawn_auto_unref)
TEST_DECLARE (spawn_closed_process_io)
TEST_DECLARE (spawn_reads_child_path)
@@ -227,6 +234,7 @@ TEST_DECLARE (fs_mkdtemp)
TEST_DECLARE (fs_fstat)
TEST_DECLARE (fs_access)
TEST_DECLARE (fs_chmod)
+TEST_DECLARE (fs_unlink_readonly)
TEST_DECLARE (fs_chown)
TEST_DECLARE (fs_link)
TEST_DECLARE (fs_readlink)
@@ -343,6 +351,7 @@ TASK_LIST_START
#endif
TEST_ENTRY (pipe_set_non_blocking)
TEST_ENTRY (tty)
+ TEST_ENTRY (tty_file)
TEST_ENTRY (stdio_over_pipes)
TEST_ENTRY (ip6_pton)
TEST_ENTRY (ipc_listen_before_write)
@@ -372,6 +381,9 @@ TASK_LIST_START
TEST_ENTRY (tcp_writealot)
TEST_HELPER (tcp_writealot, tcp4_echo_server)
+ TEST_ENTRY (tcp_write_fail)
+ TEST_HELPER (tcp_write_fail, tcp4_echo_server)
+
TEST_ENTRY (tcp_try_write)
TEST_ENTRY (tcp_write_queue_order)
@@ -551,6 +563,9 @@ TASK_LIST_START
TEST_ENTRY (socket_buffer_size)
TEST_ENTRY (spawn_fails)
+#ifndef _WIN32
+ TEST_ENTRY (spawn_fails_check_for_waitpid_cleanup)
+#endif
TEST_ENTRY (spawn_exit_code)
TEST_ENTRY (spawn_stdout)
TEST_ENTRY (spawn_stdin)
@@ -565,6 +580,8 @@ TASK_LIST_START
TEST_ENTRY (spawn_setgid_fails)
TEST_ENTRY (spawn_stdout_to_file)
TEST_ENTRY (spawn_stdout_and_stderr_to_file)
+ TEST_ENTRY (spawn_stdout_and_stderr_to_file2)
+ TEST_ENTRY (spawn_stdout_and_stderr_to_file_swap)
TEST_ENTRY (spawn_auto_unref)
TEST_ENTRY (spawn_closed_process_io)
TEST_ENTRY (spawn_reads_child_path)
@@ -611,6 +628,7 @@ TASK_LIST_START
TEST_ENTRY (fs_fstat)
TEST_ENTRY (fs_access)
TEST_ENTRY (fs_chmod)
+ TEST_ENTRY (fs_unlink_readonly)
TEST_ENTRY (fs_chown)
TEST_ENTRY (fs_utime)
TEST_ENTRY (fs_futime)
diff --git a/deps/uv/test/test-loop-handles.c b/deps/uv/test/test-loop-handles.c
index 0986de52981e1b..c3e8498ae90a6b 100644
--- a/deps/uv/test/test-loop-handles.c
+++ b/deps/uv/test/test-loop-handles.c
@@ -113,7 +113,8 @@ static void timer_cb(uv_timer_t* handle) {
static void idle_2_close_cb(uv_handle_t* handle) {
- LOG("IDLE_2_CLOSE_CB\n");
+ fprintf(stderr, "%s", "IDLE_2_CLOSE_CB\n");
+ fflush(stderr);
ASSERT(handle == (uv_handle_t*)&idle_2_handle);
@@ -125,7 +126,8 @@ static void idle_2_close_cb(uv_handle_t* handle) {
static void idle_2_cb(uv_idle_t* handle) {
- LOG("IDLE_2_CB\n");
+ fprintf(stderr, "%s", "IDLE_2_CB\n");
+ fflush(stderr);
ASSERT(handle == &idle_2_handle);
@@ -138,7 +140,8 @@ static void idle_2_cb(uv_idle_t* handle) {
static void idle_1_cb(uv_idle_t* handle) {
int r;
- LOG("IDLE_1_CB\n");
+ fprintf(stderr, "%s", "IDLE_1_CB\n");
+ fflush(stderr);
ASSERT(handle != NULL);
ASSERT(idles_1_active > 0);
@@ -164,7 +167,8 @@ static void idle_1_cb(uv_idle_t* handle) {
static void idle_1_close_cb(uv_handle_t* handle) {
- LOG("IDLE_1_CLOSE_CB\n");
+ fprintf(stderr, "%s", "IDLE_1_CLOSE_CB\n");
+ fflush(stderr);
ASSERT(handle != NULL);
@@ -173,7 +177,8 @@ static void idle_1_close_cb(uv_handle_t* handle) {
static void prepare_1_close_cb(uv_handle_t* handle) {
- LOG("PREPARE_1_CLOSE_CB");
+ fprintf(stderr, "%s", "PREPARE_1_CLOSE_CB");
+ fflush(stderr);
ASSERT(handle == (uv_handle_t*)&prepare_1_handle);
prepare_1_close_cb_called++;
@@ -181,7 +186,8 @@ static void prepare_1_close_cb(uv_handle_t* handle) {
static void check_close_cb(uv_handle_t* handle) {
- LOG("CHECK_CLOSE_CB\n");
+ fprintf(stderr, "%s", "CHECK_CLOSE_CB\n");
+ fflush(stderr);
ASSERT(handle == (uv_handle_t*)&check_handle);
check_close_cb_called++;
@@ -189,7 +195,8 @@ static void check_close_cb(uv_handle_t* handle) {
static void prepare_2_close_cb(uv_handle_t* handle) {
- LOG("PREPARE_2_CLOSE_CB\n");
+ fprintf(stderr, "%s", "PREPARE_2_CLOSE_CB\n");
+ fflush(stderr);
ASSERT(handle == (uv_handle_t*)&prepare_2_handle);
prepare_2_close_cb_called++;
@@ -199,8 +206,8 @@ static void prepare_2_close_cb(uv_handle_t* handle) {
static void check_cb(uv_check_t* handle) {
int i, r;
- LOG("CHECK_CB\n");
-
+ fprintf(stderr, "%s", "CHECK_CB\n");
+ fflush(stderr);
ASSERT(handle == &check_handle);
if (loop_iteration < ITERATIONS) {
@@ -235,8 +242,8 @@ static void check_cb(uv_check_t* handle) {
static void prepare_2_cb(uv_prepare_t* handle) {
int r;
- LOG("PREPARE_2_CB\n");
-
+ fprintf(stderr, "%s", "PREPARE_2_CB\n");
+ fflush(stderr);
ASSERT(handle == &prepare_2_handle);
/* prepare_2 gets started by prepare_1 when (loop_iteration % 2 == 0), */
@@ -255,8 +262,8 @@ static void prepare_2_cb(uv_prepare_t* handle) {
static void prepare_1_cb(uv_prepare_t* handle) {
int r;
- LOG("PREPARE_1_CB\n");
-
+ fprintf(stderr, "%s", "PREPARE_1_CB\n");
+ fflush(stderr);
ASSERT(handle == &prepare_1_handle);
if (loop_iteration % 2 == 0) {
diff --git a/deps/uv/test/test-osx-select.c b/deps/uv/test/test-osx-select.c
index 6ccf603483488a..a0afda9181ebd9 100644
--- a/deps/uv/test/test-osx-select.c
+++ b/deps/uv/test/test-osx-select.c
@@ -39,6 +39,7 @@ static void alloc_cb(uv_handle_t* handle, size_t size, uv_buf_t* buf) {
static void read_cb(uv_stream_t* stream, ssize_t nread, const uv_buf_t* buf) {
fprintf(stdout, "got data %d\n", ++read_count);
+ fflush(stdout);
if (read_count == 3)
uv_close((uv_handle_t*) stream, NULL);
@@ -55,7 +56,8 @@ TEST_IMPL(osx_select) {
fd = open("/dev/tty", O_RDONLY);
if (fd < 0) {
- LOGF("Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fprintf(stderr, "Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fflush(stderr);
return TEST_SKIP;
}
@@ -107,7 +109,8 @@ TEST_IMPL(osx_select_many_fds) {
fd = open("/dev/tty", O_RDONLY);
if (fd < 0) {
- LOGF("Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fprintf(stderr, "Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fflush(stderr);
return TEST_SKIP;
}
diff --git a/deps/uv/test/test-pipe-set-non-blocking.c b/deps/uv/test/test-pipe-set-non-blocking.c
index 5cf2c19e7fbd29..fcc9fc0da85e99 100644
--- a/deps/uv/test/test-pipe-set-non-blocking.c
+++ b/deps/uv/test/test-pipe-set-non-blocking.c
@@ -88,8 +88,8 @@ TEST_IMPL(pipe_set_non_blocking) {
uv_close((uv_handle_t*) &pipe_handle, NULL);
ASSERT(0 == uv_run(uv_default_loop(), UV_RUN_DEFAULT));
- ASSERT(0 == close(fd[1])); /* fd[0] is closed by uv_close(). */
ASSERT(0 == uv_thread_join(&thread));
+ ASSERT(0 == close(fd[1])); /* fd[0] is closed by uv_close(). */
uv_barrier_destroy(&ctx.barrier);
MAKE_VALGRIND_HAPPY();
diff --git a/deps/uv/test/test-spawn.c b/deps/uv/test/test-spawn.c
index 9b0030029c187c..d01862abe1d97a 100644
--- a/deps/uv/test/test-spawn.c
+++ b/deps/uv/test/test-spawn.c
@@ -21,6 +21,7 @@
#include "uv.h"
#include "task.h"
+#include
#include
#include
#include
@@ -34,6 +35,7 @@
# include
#else
# include
+# include
#endif
@@ -180,6 +182,37 @@ TEST_IMPL(spawn_fails) {
}
+#ifndef _WIN32
+TEST_IMPL(spawn_fails_check_for_waitpid_cleanup) {
+ int r;
+ int status;
+ int err;
+
+ init_process_options("", fail_cb);
+ options.file = options.args[0] = "program-that-had-better-not-exist";
+
+ r = uv_spawn(uv_default_loop(), &process, &options);
+ ASSERT(r == UV_ENOENT || r == UV_EACCES);
+ ASSERT(0 == uv_is_active((uv_handle_t*) &process));
+ ASSERT(0 == uv_run(uv_default_loop(), UV_RUN_DEFAULT));
+
+ /* verify the child is successfully cleaned up within libuv */
+ do
+ err = waitpid(process.pid, &status, 0);
+ while (err == -1 && errno == EINTR);
+
+ ASSERT(err == -1);
+ ASSERT(errno == ECHILD);
+
+ uv_close((uv_handle_t*) &process, NULL);
+ ASSERT(0 == uv_run(uv_default_loop(), UV_RUN_DEFAULT));
+
+ MAKE_VALGRIND_HAPPY();
+ return 0;
+}
+#endif
+
+
TEST_IMPL(spawn_exit_code) {
int r;
@@ -342,6 +375,163 @@ TEST_IMPL(spawn_stdout_and_stderr_to_file) {
}
+TEST_IMPL(spawn_stdout_and_stderr_to_file2) {
+#ifndef _WIN32
+ int r;
+ uv_file file;
+ uv_fs_t fs_req;
+ uv_stdio_container_t stdio[3];
+ uv_buf_t buf;
+
+ /* Setup. */
+ unlink("stdout_file");
+
+ init_process_options("spawn_helper6", exit_cb);
+
+ /* Replace stderr with our file */
+ r = uv_fs_open(uv_default_loop(),
+ &fs_req,
+ "stdout_file",
+ O_CREAT | O_RDWR,
+ S_IRUSR | S_IWUSR,
+ NULL);
+ ASSERT(r != -1);
+ uv_fs_req_cleanup(&fs_req);
+ file = dup2(r, STDERR_FILENO);
+ ASSERT(file != -1);
+
+ options.stdio = stdio;
+ options.stdio[0].flags = UV_IGNORE;
+ options.stdio[1].flags = UV_INHERIT_FD;
+ options.stdio[1].data.fd = file;
+ options.stdio[2].flags = UV_INHERIT_FD;
+ options.stdio[2].data.fd = file;
+ options.stdio_count = 3;
+
+ r = uv_spawn(uv_default_loop(), &process, &options);
+ ASSERT(r == 0);
+
+ r = uv_run(uv_default_loop(), UV_RUN_DEFAULT);
+ ASSERT(r == 0);
+
+ ASSERT(exit_cb_called == 1);
+ ASSERT(close_cb_called == 1);
+
+ buf = uv_buf_init(output, sizeof(output));
+ r = uv_fs_read(uv_default_loop(), &fs_req, file, &buf, 1, 0, NULL);
+ ASSERT(r == 27);
+ uv_fs_req_cleanup(&fs_req);
+
+ r = uv_fs_close(uv_default_loop(), &fs_req, file, NULL);
+ ASSERT(r == 0);
+ uv_fs_req_cleanup(&fs_req);
+
+ printf("output is: %s", output);
+ ASSERT(strcmp("hello world\nhello errworld\n", output) == 0);
+
+ /* Cleanup. */
+ unlink("stdout_file");
+
+ MAKE_VALGRIND_HAPPY();
+ return 0;
+#else
+ RETURN_SKIP("Unix only test");
+#endif
+}
+
+
+TEST_IMPL(spawn_stdout_and_stderr_to_file_swap) {
+#ifndef _WIN32
+ int r;
+ uv_file stdout_file;
+ uv_file stderr_file;
+ uv_fs_t fs_req;
+ uv_stdio_container_t stdio[3];
+ uv_buf_t buf;
+
+ /* Setup. */
+ unlink("stdout_file");
+ unlink("stderr_file");
+
+ init_process_options("spawn_helper6", exit_cb);
+
+ /* open 'stdout_file' and replace STDOUT_FILENO with it */
+ r = uv_fs_open(uv_default_loop(),
+ &fs_req,
+ "stdout_file",
+ O_CREAT | O_RDWR,
+ S_IRUSR | S_IWUSR,
+ NULL);
+ ASSERT(r != -1);
+ uv_fs_req_cleanup(&fs_req);
+ stdout_file = dup2(r, STDOUT_FILENO);
+ ASSERT(stdout_file != -1);
+
+ /* open 'stderr_file' and replace STDERR_FILENO with it */
+ r = uv_fs_open(uv_default_loop(), &fs_req, "stderr_file", O_CREAT | O_RDWR,
+ S_IRUSR | S_IWUSR, NULL);
+ ASSERT(r != -1);
+ uv_fs_req_cleanup(&fs_req);
+ stderr_file = dup2(r, STDERR_FILENO);
+ ASSERT(stderr_file != -1);
+
+ /* now we're going to swap them: the child process' stdout will be our
+ * stderr_file and vice versa */
+ options.stdio = stdio;
+ options.stdio[0].flags = UV_IGNORE;
+ options.stdio[1].flags = UV_INHERIT_FD;
+ options.stdio[1].data.fd = stderr_file;
+ options.stdio[2].flags = UV_INHERIT_FD;
+ options.stdio[2].data.fd = stdout_file;
+ options.stdio_count = 3;
+
+ r = uv_spawn(uv_default_loop(), &process, &options);
+ ASSERT(r == 0);
+
+ r = uv_run(uv_default_loop(), UV_RUN_DEFAULT);
+ ASSERT(r == 0);
+
+ ASSERT(exit_cb_called == 1);
+ ASSERT(close_cb_called == 1);
+
+ buf = uv_buf_init(output, sizeof(output));
+
+ /* check the content of stdout_file */
+ r = uv_fs_read(uv_default_loop(), &fs_req, stdout_file, &buf, 1, 0, NULL);
+ ASSERT(r >= 15);
+ uv_fs_req_cleanup(&fs_req);
+
+ r = uv_fs_close(uv_default_loop(), &fs_req, stdout_file, NULL);
+ ASSERT(r == 0);
+ uv_fs_req_cleanup(&fs_req);
+
+ printf("output is: %s", output);
+ ASSERT(strncmp("hello errworld\n", output, 15) == 0);
+
+ /* check the content of stderr_file */
+ r = uv_fs_read(uv_default_loop(), &fs_req, stderr_file, &buf, 1, 0, NULL);
+ ASSERT(r >= 12);
+ uv_fs_req_cleanup(&fs_req);
+
+ r = uv_fs_close(uv_default_loop(), &fs_req, stderr_file, NULL);
+ ASSERT(r == 0);
+ uv_fs_req_cleanup(&fs_req);
+
+ printf("output is: %s", output);
+ ASSERT(strncmp("hello world\n", output, 12) == 0);
+
+ /* Cleanup. */
+ unlink("stdout_file");
+ unlink("stderr_file");
+
+ MAKE_VALGRIND_HAPPY();
+ return 0;
+#else
+ RETURN_SKIP("Unix only test");
+#endif
+}
+
+
TEST_IMPL(spawn_stdin) {
int r;
uv_pipe_t out;
@@ -1007,7 +1197,7 @@ TEST_IMPL(environment_creation) {
return 0;
}
-// Regression test for issue #909
+/* Regression test for issue #909 */
TEST_IMPL(spawn_with_an_odd_path) {
int r;
diff --git a/deps/uv/test/test-tcp-write-fail.c b/deps/uv/test/test-tcp-write-fail.c
new file mode 100644
index 00000000000000..2840d8161032be
--- /dev/null
+++ b/deps/uv/test/test-tcp-write-fail.c
@@ -0,0 +1,115 @@
+/* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ */
+
+#include "uv.h"
+#include "task.h"
+#include
+#include
+#ifndef _WIN32
+# include
+#endif
+
+
+static int connect_cb_called = 0;
+static int write_cb_called = 0;
+static int close_cb_called = 0;
+
+static uv_connect_t connect_req;
+static uv_write_t write_req;
+
+
+static void close_socket(uv_tcp_t* sock) {
+ uv_os_fd_t fd;
+ int r;
+
+ r = uv_fileno((uv_handle_t*)sock, &fd);
+ ASSERT(r == 0);
+#ifdef _WIN32
+ r = closesocket(fd);
+#else
+ r = close(fd);
+#endif
+ ASSERT(r == 0);
+}
+
+
+static void close_cb(uv_handle_t* handle) {
+ ASSERT(handle != NULL);
+ close_cb_called++;
+}
+
+
+static void write_cb(uv_write_t* req, int status) {
+ ASSERT(req != NULL);
+
+ ASSERT(status != 0);
+ fprintf(stderr, "uv_write error: %s\n", uv_strerror(status));
+ write_cb_called++;
+
+ uv_close((uv_handle_t*)(req->handle), close_cb);
+}
+
+
+static void connect_cb(uv_connect_t* req, int status) {
+ uv_buf_t buf;
+ uv_stream_t* stream;
+ int r;
+
+ ASSERT(req == &connect_req);
+ ASSERT(status == 0);
+
+ stream = req->handle;
+ connect_cb_called++;
+
+ /* close the socket, the hard way */
+ close_socket((uv_tcp_t*)stream);
+
+ buf = uv_buf_init("hello\n", 6);
+ r = uv_write(&write_req, stream, &buf, 1, write_cb);
+ ASSERT(r == 0);
+}
+
+
+TEST_IMPL(tcp_write_fail) {
+ struct sockaddr_in addr;
+ uv_tcp_t client;
+ int r;
+
+ ASSERT(0 == uv_ip4_addr("127.0.0.1", TEST_PORT, &addr));
+
+ r = uv_tcp_init(uv_default_loop(), &client);
+ ASSERT(r == 0);
+
+ r = uv_tcp_connect(&connect_req,
+ &client,
+ (const struct sockaddr*) &addr,
+ connect_cb);
+ ASSERT(r == 0);
+
+ uv_run(uv_default_loop(), UV_RUN_DEFAULT);
+
+ ASSERT(connect_cb_called == 1);
+ ASSERT(write_cb_called == 1);
+ ASSERT(close_cb_called == 1);
+
+ MAKE_VALGRIND_HAPPY();
+ return 0;
+}
diff --git a/deps/uv/test/test-timer-again.c b/deps/uv/test/test-timer-again.c
index 095cd9e707baa8..f93c509be5dc0a 100644
--- a/deps/uv/test/test-timer-again.c
+++ b/deps/uv/test/test-timer-again.c
@@ -47,8 +47,9 @@ static void repeat_1_cb(uv_timer_t* handle) {
ASSERT(handle == &repeat_1);
ASSERT(uv_timer_get_repeat((uv_timer_t*)handle) == 50);
- LOGF("repeat_1_cb called after %ld ms\n",
- (long int)(uv_now(uv_default_loop()) - start_time));
+ fprintf(stderr, "repeat_1_cb called after %ld ms\n",
+ (long int)(uv_now(uv_default_loop()) - start_time));
+ fflush(stderr);
repeat_1_cb_called++;
@@ -69,8 +70,9 @@ static void repeat_2_cb(uv_timer_t* handle) {
ASSERT(handle == &repeat_2);
ASSERT(repeat_2_cb_allowed);
- LOGF("repeat_2_cb called after %ld ms\n",
- (long int)(uv_now(uv_default_loop()) - start_time));
+ fprintf(stderr, "repeat_2_cb called after %ld ms\n",
+ (long int)(uv_now(uv_default_loop()) - start_time));
+ fflush(stderr);
repeat_2_cb_called++;
@@ -80,8 +82,9 @@ static void repeat_2_cb(uv_timer_t* handle) {
return;
}
- LOGF("uv_timer_get_repeat %ld ms\n",
- (long int)uv_timer_get_repeat(&repeat_2));
+ fprintf(stderr, "uv_timer_get_repeat %ld ms\n",
+ (long int)uv_timer_get_repeat(&repeat_2));
+ fflush(stderr);
ASSERT(uv_timer_get_repeat(&repeat_2) == 100);
/* This shouldn't take effect immediately. */
@@ -129,8 +132,9 @@ TEST_IMPL(timer_again) {
ASSERT(repeat_2_cb_called == 2);
ASSERT(close_cb_called == 2);
- LOGF("Test took %ld ms (expected ~700 ms)\n",
- (long int)(uv_now(uv_default_loop()) - start_time));
+ fprintf(stderr, "Test took %ld ms (expected ~700 ms)\n",
+ (long int)(uv_now(uv_default_loop()) - start_time));
+ fflush(stderr);
MAKE_VALGRIND_HAPPY();
return 0;
diff --git a/deps/uv/test/test-tty.c b/deps/uv/test/test-tty.c
index 7e1ce2668899f8..81e612c1d6ae1c 100644
--- a/deps/uv/test/test-tty.c
+++ b/deps/uv/test/test-tty.c
@@ -66,13 +66,15 @@ TEST_IMPL(tty) {
#else /* unix */
ttyin_fd = open("/dev/tty", O_RDONLY, 0);
if (ttyin_fd < 0) {
- LOGF("Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fprintf(stderr, "Cannot open /dev/tty as read-only: %s\n", strerror(errno));
+ fflush(stderr);
return TEST_SKIP;
}
ttyout_fd = open("/dev/tty", O_WRONLY, 0);
if (ttyout_fd < 0) {
- LOGF("Cannot open /dev/tty as write-only: %s\n", strerror(errno));
+ fprintf(stderr, "Cannot open /dev/tty as write-only: %s\n", strerror(errno));
+ fflush(stderr);
return TEST_SKIP;
}
#endif
@@ -111,13 +113,20 @@ TEST_IMPL(tty) {
ASSERT(height > 10);
/* Turn on raw mode. */
- r = uv_tty_set_mode(&tty_in, 1);
+ r = uv_tty_set_mode(&tty_in, UV_TTY_MODE_RAW);
ASSERT(r == 0);
/* Turn off raw mode. */
- r = uv_tty_set_mode(&tty_in, 0);
+ r = uv_tty_set_mode(&tty_in, UV_TTY_MODE_NORMAL);
ASSERT(r == 0);
+ /* Calling uv_tty_reset_mode() repeatedly should not clobber errno. */
+ errno = 0;
+ ASSERT(0 == uv_tty_reset_mode());
+ ASSERT(0 == uv_tty_reset_mode());
+ ASSERT(0 == uv_tty_reset_mode());
+ ASSERT(0 == errno);
+
/* TODO check the actual mode! */
uv_close((uv_handle_t*) &tty_in, NULL);
@@ -128,3 +137,45 @@ TEST_IMPL(tty) {
MAKE_VALGRIND_HAPPY();
return 0;
}
+
+
+TEST_IMPL(tty_file) {
+#ifndef _WIN32
+ uv_loop_t loop;
+ uv_tty_t tty;
+ int fd;
+
+ ASSERT(0 == uv_loop_init(&loop));
+
+ fd = open("test/fixtures/empty_file", O_RDONLY);
+ if (fd != -1) {
+ ASSERT(UV_EINVAL == uv_tty_init(&loop, &tty, fd, 1));
+ ASSERT(0 == close(fd));
+ }
+
+ fd = open("/dev/random", O_RDONLY);
+ if (fd != -1) {
+ ASSERT(UV_EINVAL == uv_tty_init(&loop, &tty, fd, 1));
+ ASSERT(0 == close(fd));
+ }
+
+ fd = open("/dev/zero", O_RDONLY);
+ if (fd != -1) {
+ ASSERT(UV_EINVAL == uv_tty_init(&loop, &tty, fd, 1));
+ ASSERT(0 == close(fd));
+ }
+
+ fd = open("/dev/tty", O_RDONLY);
+ if (fd != -1) {
+ ASSERT(0 == uv_tty_init(&loop, &tty, fd, 1));
+ ASSERT(0 == close(fd));
+ uv_close((uv_handle_t*) &tty, NULL);
+ }
+
+ ASSERT(0 == uv_run(&loop, UV_RUN_DEFAULT));
+ ASSERT(0 == uv_loop_close(&loop));
+
+ MAKE_VALGRIND_HAPPY();
+#endif
+ return 0;
+}
diff --git a/deps/uv/uv.gyp b/deps/uv/uv.gyp
index 1ef8c05adc6496..acaed862d7c66e 100644
--- a/deps/uv/uv.gyp
+++ b/deps/uv/uv.gyp
@@ -39,7 +39,7 @@
'_FILE_OFFSET_BITS=64',
],
}],
- ['OS == "mac"', {
+ ['OS in "mac ios"', {
'defines': [ '_DARWIN_USE_64_BIT_INODE=1' ],
}],
['OS == "linux"', {
@@ -167,18 +167,17 @@
'cflags': [ '-fPIC' ],
}],
['uv_library=="shared_library" and OS!="mac"', {
- 'link_settings': {
- # Must correspond with UV_VERSION_MAJOR and UV_VERSION_MINOR
- # in include/uv-version.h
- 'libraries': [ '-Wl,-soname,libuv.so.1.0' ],
- },
+ # This will cause gyp to set soname
+ # Must correspond with UV_VERSION_MAJOR
+ # in include/uv-version.h
+ 'product_extension': 'so.1',
}],
],
}],
- [ 'OS in "linux mac android"', {
+ [ 'OS in "linux mac ios android"', {
'sources': [ 'src/unix/proctitle.c' ],
}],
- [ 'OS=="mac"', {
+ [ 'OS in "mac ios"', {
'sources': [
'src/unix/darwin.c',
'src/unix/fsevents.c',
@@ -261,7 +260,7 @@
'libraries': [ '-lkvm' ],
},
}],
- [ 'OS in "mac freebsd dragonflybsd openbsd netbsd".split()', {
+ [ 'OS in "ios mac freebsd dragonflybsd openbsd netbsd".split()', {
'sources': [ 'src/unix/kqueue.c' ],
}],
['uv_library=="shared_library"', {
@@ -364,6 +363,7 @@
'test/test-tcp-write-to-half-open-connection.c',
'test/test-tcp-write-after-connect.c',
'test/test-tcp-writealot.c',
+ 'test/test-tcp-write-fail.c',
'test/test-tcp-try-write.c',
'test/test-tcp-unexpected-read.c',
'test/test-tcp-oob.c',
diff --git a/deps/uv/vcbuild.bat b/deps/uv/vcbuild.bat
index d3b7aa154eec6a..084ab9578fe10d 100644
--- a/deps/uv/vcbuild.bat
+++ b/deps/uv/vcbuild.bat
@@ -90,8 +90,8 @@ if defined noprojgen goto msbuild
@rem Generate the VS project.
if exist build\gyp goto have_gyp
-echo git clone https://git.chromium.org/external/gyp.git build/gyp
-git clone https://git.chromium.org/external/gyp.git build/gyp
+echo git clone https://chromium.googlesource.com/external/gyp build/gyp
+git clone https://chromium.googlesource.com/external/gyp build/gyp
if errorlevel 1 goto gyp_install_failed
goto have_gyp
diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore
index f720bee948660a..cc424333d31334 100644
--- a/deps/v8/.gitignore
+++ b/deps/v8/.gitignore
@@ -24,11 +24,13 @@
.cproject
.d8_history
.gclient_entries
+.landmines
.project
.pydevproject
.settings
.*.sw?
bsuite
+compile_commands.json
d8
d8_g
gccauses
diff --git a/deps/v8/.ycm_extra_conf.py b/deps/v8/.ycm_extra_conf.py
new file mode 100644
index 00000000000000..e065a0896bdb82
--- /dev/null
+++ b/deps/v8/.ycm_extra_conf.py
@@ -0,0 +1,193 @@
+# Copyright 2015 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Autocompletion config for YouCompleteMe in V8.
+#
+# USAGE:
+#
+# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
+# (Googlers should check out [go/ycm])
+#
+# 2. Profit
+#
+#
+# Usage notes:
+#
+# * You must use ninja & clang to build V8.
+#
+# * You must have run gyp_v8 and built V8 recently.
+#
+#
+# Hacking notes:
+#
+# * The purpose of this script is to construct an accurate enough command line
+# for YCM to pass to clang so it can build and extract the symbols.
+#
+# * Right now, we only pull the -I and -D flags. That seems to be sufficient
+# for everything I've used it for.
+#
+# * That whole ninja & clang thing? We could support other configs if someone
+# were willing to write the correct commands and a parser.
+#
+# * This has only been tested on gTrusty.
+
+
+import os
+import os.path
+import subprocess
+import sys
+
+
+# Flags from YCM's default config.
+flags = [
+'-DUSE_CLANG_COMPLETER',
+'-std=gnu++0x',
+'-x',
+'c++',
+]
+
+
+def PathExists(*args):
+ return os.path.exists(os.path.join(*args))
+
+
+def FindV8SrcFromFilename(filename):
+ """Searches for the root of the V8 checkout.
+
+ Simply checks parent directories until it finds .gclient and v8/.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (String) Path of 'v8/', or None if unable to find.
+ """
+ curdir = os.path.normpath(os.path.dirname(filename))
+ while not (PathExists(curdir, 'v8') and PathExists(curdir, 'v8', 'DEPS')
+ and (PathExists(curdir, '.gclient')
+ or PathExists(curdir, 'v8', '.git'))):
+ nextdir = os.path.normpath(os.path.join(curdir, '..'))
+ if nextdir == curdir:
+ return None
+ curdir = nextdir
+ return os.path.join(curdir, 'v8')
+
+
+def GetClangCommandFromNinjaForFilename(v8_root, filename):
+ """Returns the command line to build |filename|.
+
+ Asks ninja how it would build the source file. If the specified file is a
+ header, tries to find its companion source file first.
+
+ Args:
+ v8_root: (String) Path to v8/.
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (List of Strings) Command line arguments for clang.
+ """
+ if not v8_root:
+ return []
+
+ # Generally, everyone benefits from including V8's root, because all of
+ # V8's includes are relative to that.
+ v8_flags = ['-I' + os.path.join(v8_root)]
+
+ # Version of Clang used to compile V8 can be newer then version of
+ # libclang that YCM uses for completion. So it's possible that YCM's libclang
+ # doesn't know about some used warning options, which causes compilation
+ # warnings (and errors, because of '-Werror');
+ v8_flags.append('-Wno-unknown-warning-option')
+
+ # Header files can't be built. Instead, try to match a header file to its
+ # corresponding source file.
+ if filename.endswith('.h'):
+ alternates = ['.cc', '.cpp']
+ for alt_extension in alternates:
+ alt_name = filename[:-2] + alt_extension
+ if os.path.exists(alt_name):
+ filename = alt_name
+ break
+ else:
+ if filename.endswith('-inl.h'):
+ for alt_extension in alternates:
+ alt_name = filename[:-6] + alt_extension
+ if os.path.exists(alt_name):
+ filename = alt_name
+ break;
+ else:
+ # If this is a standalone -inl.h file with no source, the best we can
+ # do is try to use the default flags.
+ return v8_flags
+ else:
+ # If this is a standalone .h file with no source, the best we can do is
+ # try to use the default flags.
+ return v8_flags
+
+ sys.path.append(os.path.join(v8_root, 'tools', 'ninja'))
+ from ninja_output import GetNinjaOutputDirectory
+ out_dir = os.path.realpath(GetNinjaOutputDirectory(v8_root))
+
+ # Ninja needs the path to the source file relative to the output build
+ # directory.
+ rel_filename = os.path.relpath(os.path.realpath(filename), out_dir)
+
+ # Ask ninja how it would build our source file.
+ p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t',
+ 'commands', rel_filename + '^'],
+ stdout=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode:
+ return v8_flags
+
+ # Ninja might execute several commands to build something. We want the last
+ # clang command.
+ clang_line = None
+ for line in reversed(stdout.split('\n')):
+ if 'clang' in line:
+ clang_line = line
+ break
+ else:
+ return v8_flags
+
+ # Parse flags that are important for YCM's purposes.
+ for flag in clang_line.split(' '):
+ if flag.startswith('-I'):
+ # Relative paths need to be resolved, because they're relative to the
+ # output dir, not the source.
+ if flag[2] == '/':
+ v8_flags.append(flag)
+ else:
+ abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
+ v8_flags.append('-I' + abs_path)
+ elif flag.startswith('-std'):
+ v8_flags.append(flag)
+ elif flag.startswith('-') and flag[1] in 'DWFfmO':
+ if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
+ # These flags causes libclang (3.3) to crash. Remove it until things
+ # are fixed.
+ continue
+ v8_flags.append(flag)
+
+ return v8_flags
+
+
+def FlagsForFile(filename):
+ """This is the main entry point for YCM. Its interface is fixed.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (Dictionary)
+ 'flags': (List of Strings) Command line flags.
+ 'do_cache': (Boolean) True if the result should be cached.
+ """
+ v8_root = FindV8SrcFromFilename(filename)
+ v8_flags = GetClangCommandFromNinjaForFilename(v8_root, filename)
+ final_flags = flags + v8_flags
+ return {
+ 'flags': final_flags,
+ 'do_cache': True
+ }
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index 6cda4f239730b0..5b976b8b79f103 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -3,78 +3,97 @@
#
# Name/Organization
-Google Inc.
-Sigma Designs Inc.
-ARM Ltd.
-Hewlett-Packard Development Company, LP
-Igalia, S.L.
-Joyent, Inc.
-Bloomberg Finance L.P.
-NVIDIA Corporation
-BlackBerry Limited
-Opera Software ASA
-Intel Corporation
-MIPS Technologies, Inc.
-Imagination Technologies, LLC
-Loongson Technology Corporation Limited
+Google Inc. <*@google.com>
+The Chromium Authors <*@chromium.org>
+Sigma Designs Inc. <*@sdesigns.com>
+ARM Ltd. <*@arm.com>
+Hewlett-Packard Development Company, LP <*@palm.com>
+Igalia, S.L. <*@igalia.com>
+Joyent, Inc. <*@joyent.com>
+Bloomberg Finance L.P. <*@bloomberg.net>
+NVIDIA Corporation <*@nvidia.com>
+BlackBerry Limited <*@blackberry.com>
+Opera Software ASA <*@opera.com>
+Intel Corporation <*@intel.com>
+MIPS Technologies, Inc. <*@mips.com>
+Imagination Technologies, LLC <*@imgtec.com>
+Loongson Technology Corporation Limited <*@loongson.cn>
+Code Aurora Forum <*@codeaurora.org>
+Home Jinni Inc. <*@homejinni.com>
+IBM Inc. <*@*.ibm.com>
+Samsung <*@*.samsung.com>
+Joyent, Inc <*@joyent.com>
+RT-RK Computer Based System <*@rt-rk.com>
+Amazon, Inc <*@amazon.com>
+ST Microelectronics <*@st.com>
+Yandex LLC <*@yandex-team.ru>
+StrongLoop, Inc. <*@strongloop.com>
+Aaron Bieber
+Abdulla Kamar
Akinori MUSHA
Alexander Botero-Lowry
Alexander Karpinsky
-Alexandre Rames
Alexandre Vassalotti
+Alexis Campailla
Andreas Anyuru
-Baptiste Afsa
+Andrew Paprocki
+Andrei Kashcha
+Ben Noordhuis
Bert Belder
Burcu Dogan
Caitlin Potter
Craig Schlenter
-Chunyang Dai
+Christopher A. Taylor
Daniel Andersson
Daniel James
-Derek J Conrod
-Dineel D Sule
+Douglas Crosher
Erich Ocean
Fedor Indutny
+Felix Geisendörfer
Filipe David Manana
-Haitao Feng
+Geoffrey Garside
+Han Choongwoo
+Hirofumi Mako
Ioseb Dzmanashvili
Isiah Meadows
-Jacob Bramley
Jan de Mooij
Jay Freeman
James Pike
-Joel Stanley
+Jianghua Yang
+Joel Stanley
Johan Bergström
-John Jozwiak
Jonathan Liu
-Kun Zhang
+JunHo Seo
+Kang-Hao (Kenny) Lu
Luis Reis
-Martyn Capewell
+Luke Zarko
+Maciej Małecki
Mathias Bynens
Matt Hanselman
+Matthew Sporleder
Maxim Mossienko
Michael Lutz
Michael Smith
Mike Gilbert
+Nicolas Antonius Ernst Leopold Maria Kaiser
Paolo Giarrusso
Patrick Gansterer
Peter Varga
+Paul Lind
Rafal Krypa
-Rajeev R Krithivasan
Refael Ackermann
Rene Rebe
Robert Mustacchi
-Rodolph Perfetta
-Ryan Dahl
+Robert Nagy
+Ryan Dahl
Sandro Santilli
Sanjoy Das
-Subrato K De
+Seo Sanghyeon
Tobias Burnus
-Vincent Belliard
+Victor Costan
Vlad Burlik
-Weiliang Lin
-Xi Qian
-Yuqiang Xian
-Zaheer Ahmad
+Vladimir Shutoff
+Yu Yin
Zhongping Wang
+柳荣一
diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn
index 6534eea8594810..fc0ea8eb6802bf 100644
--- a/deps/v8/BUILD.gn
+++ b/deps/v8/BUILD.gn
@@ -2,6 +2,10 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/android/config.gni")
+import("//build/config/arm.gni")
+import("//build/config/mips.gni")
+
# Because standalone V8 builds are not supported, assume this is part of a
# Chromium build.
import("//build/module_args/v8.gni")
@@ -18,10 +22,22 @@ v8_interpreted_regexp = false
v8_object_print = false
v8_postmortem_support = false
v8_use_snapshot = true
-v8_enable_extra_checks = is_debug
-v8_target_arch = cpu_arch
+v8_target_arch = target_cpu
v8_random_seed = "314159265"
+v8_toolset_for_d8 = "host"
+# The snapshot needs to be compiled for the host, but compiled with
+# a toolchain that matches the bit-width of the target.
+#
+# TODO(GYP): For now we only support 32-bit little-endian target builds from an
+# x64 Linux host. Eventually we need to support all of the host/target
+# configurations v8 runs on.
+if (host_cpu == "x64" && host_os == "linux" &&
+ (target_cpu == "arm" || target_cpu == "mipsel" || target_cpu == "x86")) {
+ snapshot_toolchain = "//build/toolchain/linux:clang_x86"
+} else {
+ snapshot_toolchain = default_toolchain
+}
###############################################################################
# Configurations
@@ -63,54 +79,31 @@ config("features") {
defines = []
if (v8_enable_disassembler == true) {
- defines += [
- "ENABLE_DISASSEMBLER",
- ]
+ defines += [ "ENABLE_DISASSEMBLER" ]
}
if (v8_enable_gdbjit == true) {
- defines += [
- "ENABLE_GDB_JIT_INTERFACE",
- ]
+ defines += [ "ENABLE_GDB_JIT_INTERFACE" ]
}
if (v8_object_print == true) {
- defines += [
- "OBJECT_PRINT",
- ]
+ defines += [ "OBJECT_PRINT" ]
}
if (v8_enable_verify_heap == true) {
- defines += [
- "VERIFY_HEAP",
- ]
+ defines += [ "VERIFY_HEAP" ]
}
if (v8_interpreted_regexp == true) {
- defines += [
- "V8_INTERPRETED_REGEXP",
- ]
+ defines += [ "V8_INTERPRETED_REGEXP" ]
}
if (v8_deprecation_warnings == true) {
- defines += [
- "V8_DEPRECATION_WARNINGS",
- ]
+ defines += [ "V8_DEPRECATION_WARNINGS" ]
}
if (v8_enable_i18n_support == true) {
- defines += [
- "V8_I18N_SUPPORT",
- ]
- }
- if (v8_enable_extra_checks == true) {
- defines += [
- "ENABLE_EXTRA_CHECKS",
- ]
+ defines += [ "V8_I18N_SUPPORT" ]
}
if (v8_enable_handle_zapping == true) {
- defines += [
- "ENABLE_HANDLE_ZAPPING",
- ]
+ defines += [ "ENABLE_HANDLE_ZAPPING" ]
}
if (v8_use_external_startup_data == true) {
- defines += [
- "V8_USE_EXTERNAL_STARTUP_DATA",
- ]
+ defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
}
}
@@ -120,27 +113,57 @@ config("toolchain") {
defines = []
cflags = []
- # TODO(jochen): Add support for arm, mips, mipsel.
+ # TODO(jochen): Add support for arm subarchs, mips, mipsel, mips64el.
+
+ if (v8_target_arch == "arm") {
+ defines += [ "V8_TARGET_ARCH_ARM" ]
+ if (current_cpu == "arm") {
+ if (arm_version == 7) {
+ defines += [ "CAN_USE_ARMV7_INSTRUCTIONS" ]
+ }
+ if (arm_fpu == "vfpv3-d16") {
+ defines += [ "CAN_USE_VFP3_INSTRUCTIONS" ]
+ } else if (arm_fpu == "vfpv3") {
+ defines += [
+ "CAN_USE_VFP3_INSTRUCTIONS",
+ "CAN_USE_VFP32DREGS",
+ ]
+ } else if (arm_fpu == "neon") {
+ defines += [
+ "CAN_USE_VFP3_INSTRUCTIONS",
+ "CAN_USE_VFP32DREGS",
+ "CAN_USE_NEON",
+ ]
+ }
+ } else {
+ # These defines ares used for the ARM simulator.
+ defines += [
+ "CAN_USE_ARMV7_INSTRUCTIONS",
+ "CAN_USE_VFP3_INSTRUCTIONS",
+ "CAN_USE_VFP32DREGS",
+ "USE_EABI_HARDFLOAT=0",
+ ]
+ }
+ # TODO(jochen): Add support for arm_test_noprobe.
+ }
if (v8_target_arch == "arm64") {
- defines += [
- "V8_TARGET_ARCH_ARM64",
- ]
+ defines += [ "V8_TARGET_ARCH_ARM64" ]
+ }
+ if (v8_target_arch == "mipsel") {
+ defines += [ "V8_TARGET_ARCH_MIPS" ]
+ }
+ if (v8_target_arch == "mips64el") {
+ defines += [ "V8_TARGET_ARCH_MIPS64" ]
}
if (v8_target_arch == "x86") {
- defines += [
- "V8_TARGET_ARCH_IA32",
- ]
+ defines += [ "V8_TARGET_ARCH_IA32" ]
}
if (v8_target_arch == "x64") {
- defines += [
- "V8_TARGET_ARCH_X64",
- ]
+ defines += [ "V8_TARGET_ARCH_X64" ]
}
if (is_win) {
- defines += [
- "WIN32",
- ]
+ defines += [ "WIN32" ]
# TODO(jochen): Support v8_enable_prof.
}
@@ -170,7 +193,7 @@ action("js2c") {
# The script depends on this other script, this rule causes a rebuild if it
# changes.
- source_prereqs = [ "tools/jsmin.py" ]
+ inputs = [ "tools/jsmin.py" ]
sources = [
"src/runtime.js",
@@ -179,9 +202,8 @@ action("js2c") {
"src/array.js",
"src/string.js",
"src/uri.js",
- "src/third_party/fdlibm/fdlibm.js",
"src/math.js",
- "src/apinatives.js",
+ "src/third_party/fdlibm/fdlibm.js",
"src/date.js",
"src/regexp.js",
"src/arraybuffer.js",
@@ -199,11 +221,12 @@ action("js2c") {
"src/debug-debugger.js",
"src/mirror-debugger.js",
"src/liveedit-debugger.js",
+ "src/templates.js",
"src/macros.py",
]
outputs = [
- "$target_gen_dir/libraries.cc"
+ "$target_gen_dir/libraries.cc",
]
if (v8_enable_i18n_support) {
@@ -211,15 +234,15 @@ action("js2c") {
}
args = [
- rebase_path("$target_gen_dir/libraries.cc", root_build_dir),
- "CORE",
- ] + rebase_path(sources, root_build_dir)
+ rebase_path("$target_gen_dir/libraries.cc", root_build_dir),
+ "CORE",
+ ] + rebase_path(sources, root_build_dir)
if (v8_use_external_startup_data) {
outputs += [ "$target_gen_dir/libraries.bin" ]
args += [
"--startup_blob",
- rebase_path("$target_gen_dir/libraries.bin", root_build_dir)
+ rebase_path("$target_gen_dir/libraries.bin", root_build_dir),
]
}
}
@@ -231,56 +254,73 @@ action("js2c_experimental") {
# The script depends on this other script, this rule causes a rebuild if it
# changes.
- source_prereqs = [ "tools/jsmin.py" ]
+ inputs = [ "tools/jsmin.py" ]
sources = [
"src/macros.py",
"src/proxy.js",
"src/generator.js",
- "src/harmony-string.js",
"src/harmony-array.js",
"src/harmony-array-includes.js",
"src/harmony-typedarray.js",
- "src/harmony-classes.js",
"src/harmony-tostring.js",
- "src/harmony-templates.js",
- "src/harmony-regexp.js"
+ "src/harmony-regexp.js",
+ "src/harmony-reflect.js"
]
outputs = [
- "$target_gen_dir/experimental-libraries.cc"
+ "$target_gen_dir/experimental-libraries.cc",
]
args = [
- rebase_path("$target_gen_dir/experimental-libraries.cc", root_build_dir),
- "EXPERIMENTAL",
- ] + rebase_path(sources, root_build_dir)
+ rebase_path("$target_gen_dir/experimental-libraries.cc",
+ root_build_dir),
+ "EXPERIMENTAL",
+ ] + rebase_path(sources, root_build_dir)
if (v8_use_external_startup_data) {
outputs += [ "$target_gen_dir/libraries_experimental.bin" ]
args += [
"--startup_blob",
- rebase_path("$target_gen_dir/libraries_experimental.bin", root_build_dir)
+ rebase_path("$target_gen_dir/libraries_experimental.bin", root_build_dir),
]
}
}
+action("d8_js2c") {
+ visibility = [ ":*" ] # Only targets in this file can depend on this.
+
+ script = "tools/js2c.py"
+
+ inputs = [
+ "src/d8.js",
+ "src/macros.py",
+ ]
+
+ outputs = [
+ "$target_gen_dir/d8-js.cc",
+ ]
+
+ args = rebase_path(outputs, root_build_dir) + [ "D8" ] +
+ rebase_path(inputs, root_build_dir)
+}
+
if (v8_use_external_startup_data) {
action("natives_blob") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
deps = [
":js2c",
- ":js2c_experimental"
+ ":js2c_experimental",
]
sources = [
"$target_gen_dir/libraries.bin",
- "$target_gen_dir/libraries_experimental.bin"
+ "$target_gen_dir/libraries_experimental.bin",
]
outputs = [
- "$root_out_dir/natives_blob.bin"
+ "$root_out_dir/natives_blob.bin",
]
script = "tools/concatenate-files.py"
@@ -300,48 +340,52 @@ action("postmortem-metadata") {
]
outputs = [
- "$target_gen_dir/debug-support.cc"
+ "$target_gen_dir/debug-support.cc",
]
- args =
- rebase_path(outputs, root_build_dir) +
- rebase_path(sources, root_build_dir)
+ args = rebase_path(outputs, root_build_dir) +
+ rebase_path(sources, root_build_dir)
}
action("run_mksnapshot") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
- deps = [ ":mksnapshot($host_toolchain)" ]
+ deps = [
+ ":mksnapshot($snapshot_toolchain)",
+ ]
script = "tools/run.py"
outputs = [
- "$target_gen_dir/snapshot.cc"
+ "$target_gen_dir/snapshot.cc",
]
args = [
- "./" + rebase_path(get_label_info(":mksnapshot($host_toolchain)",
+ "./" + rebase_path(get_label_info(":mksnapshot($snapshot_toolchain)",
"root_out_dir") + "/mksnapshot",
root_build_dir),
"--log-snapshot-positions",
- "--logfile", rebase_path("$target_gen_dir/snapshot.log", root_build_dir),
- rebase_path("$target_gen_dir/snapshot.cc", root_build_dir)
+ "--logfile",
+ rebase_path("$target_gen_dir/snapshot.log", root_build_dir),
+ rebase_path("$target_gen_dir/snapshot.cc", root_build_dir),
]
if (v8_random_seed != "0") {
- args += [ "--random-seed", v8_random_seed ]
+ args += [
+ "--random-seed",
+ v8_random_seed,
+ ]
}
if (v8_use_external_startup_data) {
outputs += [ "$root_out_dir/snapshot_blob.bin" ]
args += [
"--startup_blob",
- rebase_path("$root_out_dir/snapshot_blob.bin", root_build_dir)
+ rebase_path("$root_out_dir/snapshot_blob.bin", root_build_dir),
]
}
}
-
###############################################################################
# Source Sets (aka static libraries)
#
@@ -358,12 +402,16 @@ source_set("v8_nosnapshot") {
sources = [
"$target_gen_dir/libraries.cc",
"$target_gen_dir/experimental-libraries.cc",
- "src/snapshot-empty.cc",
+ "src/snapshot/snapshot-empty.cc",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
+ ]
}
source_set("v8_snapshot") {
@@ -384,7 +432,11 @@ source_set("v8_snapshot") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
+ ]
}
if (v8_use_external_startup_data) {
@@ -400,13 +452,17 @@ if (v8_use_external_startup_data) {
]
sources = [
- "src/natives-external.cc",
- "src/snapshot-external.cc",
+ "src/snapshot/natives-external.cc",
+ "src/snapshot/snapshot-external.cc",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
+ ]
}
}
@@ -424,6 +480,8 @@ source_set("v8_base") {
"src/allocation-tracker.h",
"src/api.cc",
"src/api.h",
+ "src/api-natives.cc",
+ "src/api-natives.h",
"src/arguments.cc",
"src/arguments.h",
"src/assembler.cc",
@@ -432,8 +490,6 @@ source_set("v8_base") {
"src/assert-scope.cc",
"src/ast-numbering.cc",
"src/ast-numbering.h",
- "src/ast-this-access-visitor.cc",
- "src/ast-this-access-visitor.h",
"src/ast-value-factory.cc",
"src/ast-value-factory.h",
"src/ast.cc",
@@ -478,6 +534,8 @@ source_set("v8_base") {
"src/compilation-statistics.h",
"src/compiler/access-builder.cc",
"src/compiler/access-builder.h",
+ "src/compiler/all-nodes.cc",
+ "src/compiler/all-nodes.h",
"src/compiler/ast-graph-builder.cc",
"src/compiler/ast-graph-builder.h",
"src/compiler/ast-loop-assignment-analyzer.cc",
@@ -498,16 +556,15 @@ source_set("v8_base") {
"src/compiler/control-builders.cc",
"src/compiler/control-builders.h",
"src/compiler/control-equivalence.h",
+ "src/compiler/control-flow-optimizer.cc",
+ "src/compiler/control-flow-optimizer.h",
"src/compiler/control-reducer.cc",
"src/compiler/control-reducer.h",
"src/compiler/diamond.h",
"src/compiler/frame.h",
"src/compiler/gap-resolver.cc",
"src/compiler/gap-resolver.h",
- "src/compiler/generic-algorithm.h",
- "src/compiler/graph-builder.cc",
"src/compiler/graph-builder.h",
- "src/compiler/graph-inl.h",
"src/compiler/graph-reducer.cc",
"src/compiler/graph-reducer.h",
"src/compiler/graph-replay.cc",
@@ -532,10 +589,12 @@ source_set("v8_base") {
"src/compiler/js-graph.h",
"src/compiler/js-inlining.cc",
"src/compiler/js-inlining.h",
- "src/compiler/js-intrinsic-builder.cc",
- "src/compiler/js-intrinsic-builder.h",
+ "src/compiler/js-intrinsic-lowering.cc",
+ "src/compiler/js-intrinsic-lowering.h",
"src/compiler/js-operator.cc",
"src/compiler/js-operator.h",
+ "src/compiler/js-type-feedback.cc",
+ "src/compiler/js-type-feedback.h",
"src/compiler/js-typed-lowering.cc",
"src/compiler/js-typed-lowering.h",
"src/compiler/jump-threading.cc",
@@ -543,8 +602,11 @@ source_set("v8_base") {
"src/compiler/linkage-impl.h",
"src/compiler/linkage.cc",
"src/compiler/linkage.h",
+ "src/compiler/liveness-analyzer.cc",
+ "src/compiler/liveness-analyzer.h",
"src/compiler/load-elimination.cc",
"src/compiler/load-elimination.h",
+ "src/compiler/loop-peeling.cc",
"src/compiler/loop-analysis.cc",
"src/compiler/loop-analysis.h",
"src/compiler/machine-operator-reducer.cc",
@@ -555,12 +617,14 @@ source_set("v8_base") {
"src/compiler/machine-type.h",
"src/compiler/move-optimizer.cc",
"src/compiler/move-optimizer.h",
- "src/compiler/node-aux-data-inl.h",
"src/compiler/node-aux-data.h",
"src/compiler/node-cache.cc",
"src/compiler/node-cache.h",
+ "src/compiler/node-marker.cc",
+ "src/compiler/node-marker.h",
+ "src/compiler/node-matchers.cc",
"src/compiler/node-matchers.h",
- "src/compiler/node-properties-inl.h",
+ "src/compiler/node-properties.cc",
"src/compiler/node-properties.h",
"src/compiler/node.cc",
"src/compiler/node.h",
@@ -570,6 +634,8 @@ source_set("v8_base") {
"src/compiler/operator-properties.h",
"src/compiler/operator.cc",
"src/compiler/operator.h",
+ "src/compiler/osr.cc",
+ "src/compiler/osr.h",
"src/compiler/pipeline.cc",
"src/compiler/pipeline.h",
"src/compiler/pipeline-statistics.cc",
@@ -597,6 +663,8 @@ source_set("v8_base") {
"src/compiler/simplified-operator.h",
"src/compiler/source-position.cc",
"src/compiler/source-position.h",
+ "src/compiler/state-values-utils.cc",
+ "src/compiler/state-values-utils.h",
"src/compiler/typer.cc",
"src/compiler/typer.h",
"src/compiler/value-numbering-reducer.cc",
@@ -775,8 +843,6 @@ source_set("v8_base") {
"src/ic/ic-compiler.h",
"src/ic/stub-cache.cc",
"src/ic/stub-cache.h",
- "src/interface.cc",
- "src/interface.h",
"src/interface-descriptors.cc",
"src/interface-descriptors.h",
"src/interpreter-irregexp.cc",
@@ -813,8 +879,9 @@ source_set("v8_base") {
"src/macro-assembler.h",
"src/messages.cc",
"src/messages.h",
+ "src/modules.cc",
+ "src/modules.h",
"src/msan.h",
- "src/natives.h",
"src/objects-debug.cc",
"src/objects-inl.h",
"src/objects-printer.cc",
@@ -826,6 +893,8 @@ source_set("v8_base") {
"src/ostreams.h",
"src/parser.cc",
"src/parser.h",
+ "src/pending-compilation-error-handler.cc",
+ "src/pending-compilation-error-handler.h",
"src/perf-jit.cc",
"src/perf-jit.h",
"src/preparse-data-format.h",
@@ -855,7 +924,6 @@ source_set("v8_base") {
"src/rewriter.h",
"src/runtime-profiler.cc",
"src/runtime-profiler.h",
- "src/runtime/runtime-api.cc",
"src/runtime/runtime-array.cc",
"src/runtime/runtime-classes.cc",
"src/runtime/runtime-collections.cc",
@@ -896,20 +964,23 @@ source_set("v8_base") {
"src/scopeinfo.h",
"src/scopes.cc",
"src/scopes.h",
- "src/serialize.cc",
- "src/serialize.h",
"src/small-pointer-list.h",
"src/smart-pointers.h",
- "src/snapshot-common.cc",
- "src/snapshot-source-sink.cc",
- "src/snapshot-source-sink.h",
- "src/snapshot.h",
+ "src/snapshot/natives.h",
+ "src/snapshot/serialize.cc",
+ "src/snapshot/serialize.h",
+ "src/snapshot/snapshot-common.cc",
+ "src/snapshot/snapshot-source-sink.cc",
+ "src/snapshot/snapshot-source-sink.h",
+ "src/snapshot/snapshot.h",
"src/string-builder.cc",
"src/string-builder.h",
"src/string-search.cc",
"src/string-search.h",
"src/string-stream.cc",
"src/string-stream.h",
+ "src/strings-storage.cc",
+ "src/strings-storage.h",
"src/strtod.cc",
"src/strtod.h",
"src/token.cc",
@@ -949,7 +1020,6 @@ source_set("v8_base") {
"src/version.h",
"src/vm-state-inl.h",
"src/vm-state.h",
- "src/zone-inl.h",
"src/zone.cc",
"src/zone.h",
"src/third_party/fdlibm/fdlibm.cc",
@@ -1209,7 +1279,11 @@ source_set("v8_base") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
+ ]
if (!is_debug) {
configs -= [ "//build/config/compiler:optimize" ]
@@ -1217,7 +1291,9 @@ source_set("v8_base") {
}
defines = []
- deps = [ ":v8_libbase" ]
+ deps = [
+ ":v8_libbase",
+ ]
if (is_win) {
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
@@ -1229,6 +1305,7 @@ source_set("v8_base") {
if (is_win) {
deps += [ "//third_party/icu:icudata" ]
}
+
# TODO(jochen): Add support for icu_use_data_file_flag
defines += [ "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE" ]
} else {
@@ -1297,7 +1374,11 @@ source_set("v8_libbase") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config_base", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config_base",
+ ":features",
+ ":toolchain",
+ ]
if (!is_debug) {
configs -= [ "//build/config/compiler:optimize" ]
@@ -1307,21 +1388,17 @@ source_set("v8_libbase") {
defines = []
if (is_posix) {
- sources += [
- "src/base/platform/platform-posix.cc"
- ]
+ sources += [ "src/base/platform/platform-posix.cc" ]
}
if (is_linux) {
- sources += [
- "src/base/platform/platform-linux.cc"
- ]
+ sources += [ "src/base/platform/platform-linux.cc" ]
- libs = [ "rt" ]
+ libs = [ "dl", "rt" ]
} else if (is_android) {
defines += [ "CAN_USE_VFP_INSTRUCTIONS" ]
- if (build_os == "mac") {
+ if (host_os == "mac") {
if (current_toolchain == host_toolchain) {
sources += [ "src/base/platform/platform-macos.cc" ]
} else {
@@ -1344,7 +1421,10 @@ source_set("v8_libbase") {
defines += [ "_CRT_RAND_S" ] # for rand_s()
- libs = [ "winmm.lib", "ws2_32.lib" ]
+ libs = [
+ "winmm.lib",
+ "ws2_32.lib",
+ ]
}
# TODO(jochen): Add support for qnx, freebsd, openbsd, netbsd, and solaris.
@@ -1363,7 +1443,11 @@ source_set("v8_libplatform") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config_base", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config_base",
+ ":features",
+ ":toolchain",
+ ]
if (!is_debug) {
configs -= [ "//build/config/compiler:optimize" ]
@@ -1379,17 +1463,21 @@ source_set("v8_libplatform") {
# Executables
#
-if (current_toolchain == host_toolchain) {
+if (current_toolchain == snapshot_toolchain) {
executable("mksnapshot") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
- "src/mksnapshot.cc",
+ "src/snapshot/mksnapshot.cc",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
+ ]
deps = [
":v8_base",
@@ -1405,64 +1493,109 @@ if (current_toolchain == host_toolchain) {
#
if (component_mode == "shared_library") {
+ component("v8") {
+ sources = [
+ "src/v8dll-main.cc",
+ ]
-component("v8") {
- sources = [
- "src/v8dll-main.cc",
- ]
+ if (v8_use_snapshot && v8_use_external_startup_data) {
+ deps = [
+ ":v8_base",
+ ":v8_external_snapshot",
+ ]
+ } else if (v8_use_snapshot) {
+ deps = [
+ ":v8_base",
+ ":v8_snapshot",
+ ]
+ } else {
+ assert(!v8_use_external_startup_data)
+ deps = [
+ ":v8_base",
+ ":v8_nosnapshot",
+ ]
+ }
- if (v8_use_snapshot && v8_use_external_startup_data) {
- deps = [
- ":v8_base",
- ":v8_external_snapshot",
- ]
- } else if (v8_use_snapshot) {
- deps = [
- ":v8_base",
- ":v8_snapshot",
- ]
- } else {
- assert(!v8_use_external_startup_data)
- deps = [
- ":v8_base",
- ":v8_nosnapshot",
+ configs -= [ "//build/config/compiler:chromium_code" ]
+ configs += [ "//build/config/compiler:no_chromium_code" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
]
- }
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
- configs += [ ":internal_config", ":features", ":toolchain" ]
+ direct_dependent_configs = [ ":external_config" ]
- direct_dependent_configs = [ ":external_config" ]
+ libs = []
+ if (is_android && current_toolchain != host_toolchain) {
+ libs += [ "log" ]
+ }
+ }
+} else {
+ group("v8") {
+ if (v8_use_snapshot && v8_use_external_startup_data) {
+ deps = [
+ ":v8_base",
+ ":v8_external_snapshot",
+ ]
+ } else if (v8_use_snapshot) {
+ deps = [
+ ":v8_base",
+ ":v8_snapshot",
+ ]
+ } else {
+ assert(!v8_use_external_startup_data)
+ deps = [
+ ":v8_base",
+ ":v8_nosnapshot",
+ ]
+ }
- libs = []
- if (is_android && current_toolchain != host_toolchain) {
- libs += [ "log" ]
+ direct_dependent_configs = [ ":external_config" ]
}
}
-} else {
-
-group("v8") {
- if (v8_use_snapshot && v8_use_external_startup_data) {
- deps = [
- ":v8_base",
- ":v8_external_snapshot",
+if ((current_toolchain == host_toolchain && v8_toolset_for_d8 == "host") ||
+ (current_toolchain != host_toolchain && v8_toolset_for_d8 == "target")) {
+ executable("d8") {
+ sources = [
+ "src/d8.cc",
+ "src/d8.h",
+ "src/startup-data-util.h",
+ "src/startup-data-util.cc",
]
- } else if (v8_use_snapshot) {
- deps = [
- ":v8_base",
- ":v8_snapshot",
+
+ configs -= [ "//build/config/compiler:chromium_code" ]
+ configs += [ "//build/config/compiler:no_chromium_code" ]
+ configs += [
+ ":internal_config",
+ ":features",
+ ":toolchain",
]
- } else {
- assert(!v8_use_external_startup_data)
+
deps = [
- ":v8_base",
- ":v8_nosnapshot",
+ ":d8_js2c",
+ ":v8",
+ ":v8_libplatform",
+ "//build/config/sanitizers:deps",
]
- }
- direct_dependent_configs = [ ":external_config" ]
-}
+ # TODO(jochen): Add support for readline and vtunejit.
+
+ if (is_posix) {
+ sources += [ "src/d8-posix.cc" ]
+ } else if (is_win) {
+ sources += [ "src/d8-windows.cc" ]
+ }
+ if (component_mode != "shared_library") {
+ sources += [
+ "src/d8-debug.cc",
+ "$target_gen_dir/d8-js.cc",
+ ]
+ }
+ if (v8_enable_i18n_support) {
+ deps += [ "//third_party/icu" ]
+ }
+ }
}
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index d42a2f1564de79..69ecd92ba61c8e 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,979 @@
+2015-03-30: Version 4.3.61
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-28: Version 4.3.60
+
+ Reland^2 "Filter invalid slots out from the SlotsBuffer after marking."
+ (Chromium issues 454297, 470801).
+
+ This fixes missing incremental write barrier issue when double fields
+ unboxing is enabled (Chromium issue 469146).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-27: Version 4.3.59
+
+ Use a slot that is located on a heap page when removing invalid entries
+ from the SlotsBuffer (Chromium issue 470801).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-26: Version 4.3.58
+
+ Return timestamp of the last recorded interval to the caller of
+ HeapProfiler::GetHeapStats (Chromium issue 467222).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-26: Version 4.3.57
+
+ Reland [V8] Removed SourceLocationRestrict (Chromium issue 468781).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-25: Version 4.3.56
+
+ Remove v8::Isolate::ClearInterrupt.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-25: Version 4.3.55
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-24: Version 4.3.54
+
+ Do not assign positions to parser-generated desugarings (Chromium issue
+ 468661).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-24: Version 4.3.53
+
+ Filter invalid slots out from the SlotsBuffer after marking (Chromium
+ issue 454297).
+
+ Fix OOM bug 3976 (issue 3976).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-24: Version 4.3.52
+
+ Remove calls to IdleNotification().
+
+ Save heap object tracking data in heap snapshot (Chromium issue 467222).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-24: Version 4.3.51
+
+ [V8] Removed SourceLocationRestrict (Chromium issue 468781).
+
+ [turbofan] Fix control reducer bug with walking non-control edges during
+ ConnectNTL phase (Chromium issue 469605).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-23: Version 4.3.50
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-23: Version 4.3.49
+
+ Ensure we don't overflow in BCE (Chromium issue 469148).
+
+ [turbofan] Fix lowering of Math.max for integral inputs (Chromium issue
+ 468162).
+
+ Use libdl to get symbols for backtraces.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-19: Version 4.3.48
+
+ Clarify what APIs return Maybe and MaybeLocal values (issue 3929).
+
+ Introduce explicit constant for per Context debug data set by embedder
+ (Chromium issue 466631).
+
+ Adjust key behaviour for weak collections (issues 3970, 3971, Chromium
+ issue 460083).
+
+ Turn on overapproximation of the weak closure (issue 3862).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-18: Version 4.3.47
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-17: Version 4.3.46
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-17: Version 4.3.45
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-17: Version 4.3.44
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-16: Version 4.3.43
+
+ Bugfix in hydrogen GVN (Chromium issue 467481).
+
+ Remove obsolete TakeHeapSnapshot method from API (Chromium issue
+ 465651).
+
+ Beautify syntax error for unterminated argument list (Chromium issue
+ 339474).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-16: Version 4.3.42
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-15: Version 4.3.41
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-14: Version 4.3.40
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-14: Version 4.3.39
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-14: Version 4.3.38
+
+ Remove --harmony-scoping flag.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-13: Version 4.3.37
+
+ Implement TDZ in StoreIC for top-level lexicals (issue 3941).
+
+ Turn on job-based optimizing compiler (issue 3608).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-13: Version 4.3.36
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-12: Version 4.3.35
+
+ Add Cast() for Int32 and Uint32 (Chromium issue 462402).
+
+ Incorrect handling of HTransitionElementsKind in hydrogen check
+ elimination phase fixed (Chromium issue 460917).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-12: Version 4.3.34
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-12: Version 4.3.33
+
+ Fix the toolchain used to build the snapshots in GN (Chromium issues
+ 395249, 465456).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-11: Version 4.3.32
+
+ Reland of Remove slots that point to unboxed doubles from the
+ StoreBuffer/SlotsBuffer (Chromium issues 454297, 465273).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-11: Version 4.3.31
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-11: Version 4.3.30
+
+ Remove uid and title from HeapSnapshot (Chromium issue 465651).
+
+ Remove deprecated CpuProfiler methods.
+
+ [turbofan] Fix --turbo-osr for OSRing into inner loop inside for-in
+ (Chromium issue 462775).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-10: Version 4.3.29
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-10: Version 4.3.28
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-10: Version 4.3.27
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-07: Version 4.3.26
+
+ Remove slots that point to unboxed doubles from the
+ StoreBuffer/SlotsBuffer (Chromium issue 454297).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-06: Version 4.3.25
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-06: Version 4.3.24
+
+ convert more things to maybe (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-05: Version 4.3.23
+
+ [V8] Use Function.name for stack frames in v8::StackTrace (Chromium
+ issue 17356).
+
+ Allow passing sourceMapUrl when compiling scripts (Chromium issue
+ 462572).
+
+ convert compile functions to use maybe (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-05: Version 4.3.22
+
+ give UniquePersistent full move semantics (issue 3669).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-05: Version 4.3.21
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-04: Version 4.3.20
+
+ convert remaining object functions to maybes (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-04: Version 4.3.19
+
+ ARM assembler: fix undefined behaviour in fits_shifter (Chromium issues
+ 444089, 463436).
+
+ Implement subclassing Arrays (issue 3930).
+
+ [es6] Fix for-const loops (issue 3983).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-04: Version 4.3.18
+
+ Implement subclassing Arrays (issue 3930).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-04: Version 4.3.17
+
+ Implement subclassing Arrays (issue 3930).
+
+ convert more object functions to return maybes (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-03: Version 4.3.16
+
+ check for null context on execution entry (issue 3929).
+
+ convert object::* to return maybe values (issue 3929).
+
+ Removed funky Maybe constructor and made fields private (issue 3929).
+
+ Polish Maybe API a bit, removing useless creativity and fixing some
+ signatures (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-02: Version 4.3.15
+
+ Performance and stability improvements on all platforms.
+
+
+2015-03-02: Version 4.3.14
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-28: Version 4.3.13
+
+ Disallow subclassing Arrays (issue 3930).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-28: Version 4.3.12
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-27: Version 4.3.11
+
+ Disallow subclassing Arrays (issue 3930).
+
+ convert Value::*Value() function to return Maybe results (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-27: Version 4.3.10
+
+ Convert v8::Value::To* to use MaybeLocal (issue 3929).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-26: Version 4.3.9
+
+ Add public version macros (issue 3075).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-26: Version 4.3.8
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-25: Version 4.3.7
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-25: Version 4.3.6
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-25: Version 4.3.5
+
+ Turn on job based recompilation (issue 3608).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-24: Version 4.3.4
+
+ Reland "Correctly propagate terminate exception in TryCall." (issue
+ 3892).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-24: Version 4.3.3
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-24: Version 4.3.2
+
+ Update GN build files with the cpu_arch -> current_cpu change.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-23: Version 4.3.1
+
+ Limit size of first page based on serialized data (Chromium issue
+ 453111).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-19: Version 4.2.77
+
+ Make generator constructors configurable (issue 3902).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-19: Version 4.2.76
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-18: Version 4.2.75
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-18: Version 4.2.74
+
+ Correctly propagate terminate exception in TryCall (issue 3892).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-17: Version 4.2.73
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-17: Version 4.2.72
+
+ [turbofan] Fix control reducer with re-reducing branches (Chromium issue
+ 458876).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-16: Version 4.2.71
+
+ Implement ES6 rest parameters (issue 2159).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-13: Version 4.2.70
+
+ new classes: no longer experimental (issue 3834).
+
+ Make it possible to define arguments for CompileFunctionInContext.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-12: Version 4.2.69
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-11: Version 4.2.68
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-11: Version 4.2.67
+
+ Throw on range error when creating a string via API (issue 3853).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-11: Version 4.2.66
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-10: Version 4.2.65
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-10: Version 4.2.64
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-10: Version 4.2.63
+
+ Introduce a compile method that takes context extensions (Chromium issue
+ 456192).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-09: Version 4.2.62
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-09: Version 4.2.61
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-07: Version 4.2.60
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-07: Version 4.2.59
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-07: Version 4.2.58
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-06: Version 4.2.57
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-06: Version 4.2.56
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-06: Version 4.2.55
+
+ Protect against uninitialized lexical variables at top-level (Chromium
+ issue 452510).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-05: Version 4.2.54
+
+ Fix HConstant(double, ...) constructor (issue 3865).
+
+ Add NativeWeakMap to v8.h (Chromium issue 437416).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-05: Version 4.2.53
+
+ Fix issue with multiple properties and emit store (issue 3856).
+
+ Class methods should be non enumerable (issue 3330).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-04: Version 4.2.52
+
+ Add WeakKeyMap to v8.h (Chromium issue 437416).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-04: Version 4.2.51
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-03: Version 4.2.50
+
+ Reset inlining limits due to overly long compilation times in
+ Speedometer, Dart2JS (Chromium issue 454625).
+
+ Add WeakMap to v8.h (Chromium issue 437416).
+
+ [V8] Added line, column and script symbols for SyntaxError (Chromium
+ issue 443140).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-03: Version 4.2.49
+
+ Compute the same hash for all NaN values (issue 3859).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-03: Version 4.2.48
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-02: Version 4.2.47
+
+ Check global object behind global proxy for extensibility (Chromium
+ issue 454091).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-02: Version 4.2.46
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-02: Version 4.2.45
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-01: Version 4.2.44
+
+ Performance and stability improvements on all platforms.
+
+
+2015-02-01: Version 4.2.43
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-31: Version 4.2.42
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-31: Version 4.2.41
+
+ Layout descriptor sharing issue fixed (issue 3832, Chromium issue
+ 437713).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-30: Version 4.2.40
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-30: Version 4.2.38
+
+ Move object literal checking into checker classes (issue 3819).
+
+ [turbofan] Fix OSR compilations of for-in.
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-30: Version 4.2.37
+
+ Do not create unresolved variables when parsing arrow functions lazily
+ (issue 3501).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-29: Version 4.2.36
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-29: Version 4.2.35
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-28: Version 4.2.34
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-28: Version 4.2.33
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-27: Version 4.2.32
+
+ Do not generalize field representations when making elements kind or
+ observed transition (Chromium issue 448711).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-27: Version 4.2.31
+
+ [x86] Disable AVX unless the operating system explicitly claims to
+ support it (issue 3846, Chromium issue 452033).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-27: Version 4.2.30
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-26: Version 4.2.29
+
+ MIPS: Fixed Hydrogen environment handling for mul-i ARM and ARM64 (issue
+ 451322).
+
+ [turbofan] Simplify reduction if IfTrue and IfFalse and fix bugs
+ (Chromium issue 451958).
+
+ Add HeapNumber fast path to v8::Value::{Uint,Int}32Value() (Chromium
+ issue 446097).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-26: Version 4.2.28
+
+ Fixed Hydrogen environment handling for mul-i on ARM and ARM64 (issue
+ 451322).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-25: Version 4.2.27
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-24: Version 4.2.26
+
+ ES6 Array.prototype.toString falls back on Object.prototype.toString if
+ method "join" is not callable (issue 3793).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-23: Version 4.2.25
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-23: Version 4.2.24
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-23: Version 4.2.23
+
+ [x86] Blacklist AVX for Windows versions before 6.1 (Windows 7) (issue
+ 3846).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-23: Version 4.2.22
+
+ Fix run-time ARMv6 detection (issue 3844).
+
+ Support concatenating with zero-size arrays with DICTIONARY_ELEMENTS in
+ Runtime_ArrayConcat (Chromium issue 450895).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-22: Version 4.2.21
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-22: Version 4.2.20
+
+ Add a pretty printer to improve the error message non-function calls
+ (Chromium issue 259443).
+
+ Remove implicit uint8_t to char cast in string replace (Chromium issue
+ 446196).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-21: Version 4.2.19
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-20: Version 4.2.18
+
+ Fix issue with __proto__ when using ES6 object literals (issue 3818).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-20: Version 4.2.17
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-20: Version 4.2.16
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-19: Version 4.2.15
+
+ Unobscurified OFStream (Chromium issue 448102).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-19: Version 4.2.14
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-18: Version 4.2.13
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-18: Version 4.2.12
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-17: Version 4.2.11
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-16: Version 4.2.10
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-16: Version 4.2.9
+
+ MIPS: ES6 computed property names (issue 3754).
+
+ ES6 computed property names (issue 3754).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-15: Version 4.2.8
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-15: Version 4.2.7
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-15: Version 4.2.6
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-15: Version 4.2.5
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-14: Version 4.2.4
+
+ Auto-generate v8 version based on tags (Chromium issue 446166).
+
+ Remove support for signatures with arguments.
+
+ Add proper support for proxies to HType (Chromium issue 448730).
+
+ [turbofan] Fix truncation/representation sloppiness wrt. bool/bit (issue
+ 3812).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-14: Version 4.2.3
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-14: Version 4.2.2
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-14: Version 4.2.1
+
+ Map -0 to integer 0 for typed array constructors (Chromium issue
+ 447756).
+
+ Introduce a gyp variable to control whether or not slow dchecks are on.
+
+ Correctly setup the freelist of the coderange on Win64 (Chromium issue
+ 447555).
+
+ Fast forward V8 to version 4.2.
+
+ Remove "extra checks".
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-08: Version 3.32.7
+
+ Correctly parse line ends for debugging (issue 2825).
+
+ Fixed printing during DCE (issue 3679).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-08: Version 3.32.6
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-08: Version 3.32.5
+
+ Correct handling of exceptions occured during getting of exception stack
+ trace (Chromium issue 444805).
+
+ Fix bug in Runtime_CompileOptimized resulting from stack overflow
+ (Chromium issue 446774).
+
+ Turn on job-based recompilation (issue 3608).
+
+ Performance and stability improvements on all platforms.
+
+
+2015-01-07: Version 3.32.4
+
+ Performance and stability improvements on all platforms.
+
+
2015-01-07: Version 3.32.3
Performance and stability improvements on all platforms.
diff --git a/deps/v8/DEPS b/deps/v8/DEPS
index a81c7ecc38837f..42606acfcfbbf0 100644
--- a/deps/v8/DEPS
+++ b/deps/v8/DEPS
@@ -8,17 +8,17 @@ vars = {
deps = {
"v8/build/gyp":
- Var("git_url") + "/external/gyp.git" + "@" + "fe00999dfaee449d3465a9316778434884da4fa7", # from svn revision 2010
+ Var("git_url") + "/external/gyp.git" + "@" + "d174d75bf69c682cb62af9187879e01513b35e52",
"v8/third_party/icu":
- Var("git_url") + "/chromium/deps/icu.git" + "@" + "51c1a4ce5f362676aa1f1cfdb5b7e52edabfa5aa",
+ Var("git_url") + "/chromium/deps/icu.git" + "@" + "7c81740601355556e630da515b74d889ba2f8d08",
"v8/buildtools":
- Var("git_url") + "/chromium/buildtools.git" + "@" + "23a4e2f545c7b6340d7e5a2b74801941b0a86535",
+ Var("git_url") + "/chromium/buildtools.git" + "@" + "3b302fef93f7cc58d9b8168466905237484b2772",
"v8/testing/gtest":
- Var("git_url") + "/external/googletest.git" + "@" + "8245545b6dc9c4703e6496d1efd19e975ad2b038", # from svn revision 700
+ Var("git_url") + "/external/googletest.git" + "@" + "be1868139ffe0ccd0e8e3b37292b84c821d9c8ad",
"v8/testing/gmock":
Var("git_url") + "/external/googlemock.git" + "@" + "29763965ab52f24565299976b936d1265cb6a271", # from svn revision 501
"v8/tools/clang":
- Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "c945be21f6485fa177b43814f910b76cce921653",
+ Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "ea2f0a2d96ffc6f5a51c034db704ccc1a6543156",
}
deps_os = {
@@ -46,6 +46,17 @@ skip_child_includes = [
]
hooks = [
+ {
+ # This clobbers when necessary (based on get_landmines.py). It must be the
+ # first hook so that other things that get/generate into the output
+ # directory will not subsequently be clobbered.
+ 'name': 'landmines',
+ 'pattern': '.',
+ 'action': [
+ 'python',
+ 'v8/build/landmines.py',
+ ],
+ },
# Pull clang-format binaries using checked-in hashes.
{
"name": "clang_format_win",
@@ -80,6 +91,17 @@ hooks = [
"-s", "v8/buildtools/linux64/clang-format.sha1",
],
},
+ # Pull binutils for linux, enabled debug fission for faster linking /
+ # debugging when used with clang on Ubuntu Precise.
+ # https://code.google.com/p/chromium/issues/detail?id=352046
+ {
+ 'name': 'binutils',
+ 'pattern': 'v8/third_party/binutils',
+ 'action': [
+ 'python',
+ 'v8/third_party/binutils/download.py',
+ ],
+ },
{
# Pull clang if needed or requested via GYP_DEFINES.
# Note: On Win, this should run after win_toolchain, as it may use it.
diff --git a/deps/v8/Makefile b/deps/v8/Makefile
index 606b5d7bf176ab..055a57d286c9ce 100644
--- a/deps/v8/Makefile
+++ b/deps/v8/Makefile
@@ -27,8 +27,6 @@
# Variable default definitions. Override them by exporting them in your shell.
-CXX ?= g++
-LINK ?= g++
OUTDIR ?= out
TESTJOBS ?=
GYPFLAGS ?=
@@ -87,10 +85,17 @@ ifeq ($(snapshot), external)
endif
# extrachecks=on/off
ifeq ($(extrachecks), on)
- GYPFLAGS += -Dv8_enable_extra_checks=1 -Dv8_enable_handle_zapping=1
+ GYPFLAGS += -Ddcheck_always_on=1 -Dv8_enable_handle_zapping=1
endif
ifeq ($(extrachecks), off)
- GYPFLAGS += -Dv8_enable_extra_checks=0 -Dv8_enable_handle_zapping=0
+ GYPFLAGS += -Ddcheck_always_on=0 -Dv8_enable_handle_zapping=0
+endif
+# slowdchecks=on/off
+ifeq ($(slowdchecks), on)
+ GYPFLAGS += -Dv8_enable_slow_dchecks=1
+endif
+ifeq ($(slowdchecks), off)
+ GYPFLAGS += -Dv8_enable_slow_dchecks=0
endif
# gdbjit=on/off
ifeq ($(gdbjit), on)
@@ -103,10 +108,6 @@ endif
ifeq ($(vtunejit), on)
GYPFLAGS += -Dv8_enable_vtunejit=1
endif
-# optdebug=on
-ifeq ($(optdebug), on)
- GYPFLAGS += -Dv8_optimized_debug=2
-endif
# unalignedaccess=on
ifeq ($(unalignedaccess), on)
GYPFLAGS += -Dv8_can_use_unaligned_accesses=true
@@ -144,19 +145,17 @@ endif
ifeq ($(deprecationwarnings), on)
GYPFLAGS += -Dv8_deprecation_warnings=1
endif
-# asan=/path/to/clang++
-ifneq ($(strip $(asan)),)
- GYPFLAGS += -Dasan=1
- export CC=$(dir $(asan))clang
- export CXX=$(asan)
- export CXX_host=$(asan)
- export LINK=$(asan)
- export ASAN_SYMBOLIZER_PATH=$(dir $(asan))llvm-symbolizer
+# asan=on
+ifeq ($(asan), on)
+ GYPFLAGS += -Dasan=1 -Dclang=1
TESTFLAGS += --asan
ifeq ($(lsan), on)
GYPFLAGS += -Dlsan=1
endif
endif
+ifdef embedscript
+ GYPFLAGS += -Dembed_script=$(embedscript)
+endif
# arm specific flags.
# arm_version=
@@ -214,8 +213,6 @@ ifeq ($(arm_test_noprobe), on)
endif
# ----------------- available targets: --------------------
-# - "builddeps": pulls in external dependencies for building
-# - "dependencies": pulls in all external dependencies
# - "grokdump": rebuilds heap constants lists used by grokdump
# - any arch listed in ARCHES (see below)
# - any mode listed in MODES
@@ -233,17 +230,24 @@ endif
# Architectures and modes to be compiled. Consider these to be internal
# variables, don't override them (use the targets instead).
-ARCHES = ia32 x64 x32 arm arm64 mips mipsel mips64el x87
+ARCHES = ia32 x64 x32 arm arm64 mips mipsel mips64el x87 ppc ppc64
DEFAULT_ARCHES = ia32 x64 arm
MODES = release debug optdebug
DEFAULT_MODES = release debug
-ANDROID_ARCHES = android_ia32 android_arm android_arm64 android_mipsel android_x87
+ANDROID_ARCHES = android_ia32 android_x64 android_arm android_arm64 \
+ android_mipsel android_x87
NACL_ARCHES = nacl_ia32 nacl_x64
# List of files that trigger Makefile regeneration:
-GYPFILES = build/all.gyp build/features.gypi build/standalone.gypi \
- build/toolchain.gypi samples/samples.gyp src/d8.gyp \
- test/cctest/cctest.gyp test/unittests/unittests.gyp tools/gyp/v8.gyp
+GYPFILES = third_party/icu/icu.gypi third_party/icu/icu.gyp \
+ build/shim_headers.gypi build/features.gypi build/standalone.gypi \
+ build/toolchain.gypi build/all.gyp build/mac/asan.gyp \
+ build/android.gypi test/cctest/cctest.gyp \
+ test/unittests/unittests.gyp tools/gyp/v8.gyp \
+ tools/parser-shell.gyp testing/gmock.gyp testing/gtest.gyp \
+ buildtools/third_party/libc++abi/libc++abi.gyp \
+ buildtools/third_party/libc++/libc++.gyp samples/samples.gyp \
+ src/third_party/vtune/v8vtune.gyp src/d8.gyp
# If vtunejit=on, the v8vtune.gyp will be appended.
ifeq ($(vtunejit), on)
@@ -291,7 +295,6 @@ $(ARCHES): $(addprefix $$@.,$(DEFAULT_MODES))
# Defines how to build a particular target (e.g. ia32.release).
$(BUILDS): $(OUTDIR)/Makefile.$$@
@$(MAKE) -C "$(OUTDIR)" -f Makefile.$@ \
- CXX="$(CXX)" LINK="$(LINK)" \
BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \
python -c "print \
raw_input().replace('opt', '').capitalize()") \
@@ -299,7 +302,7 @@ $(BUILDS): $(OUTDIR)/Makefile.$$@
native: $(OUTDIR)/Makefile.native
@$(MAKE) -C "$(OUTDIR)" -f Makefile.native \
- CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \
+ BUILDTYPE=Release \
builddir="$(shell pwd)/$(OUTDIR)/$@"
$(ANDROID_ARCHES): $(addprefix $$@.,$(MODES))
@@ -423,6 +426,7 @@ $(OUT_MAKEFILES): $(GYPFILES) $(ENVFILE)
$(eval CXX_TARGET_ARCH:=$(shell $(CXX) -v 2>&1 | grep ^Target: | \
cut -f 2 -d " " | cut -f 1 -d "-" ))
$(eval CXX_TARGET_ARCH:=$(subst aarch64,arm64,$(CXX_TARGET_ARCH)))
+ $(eval CXX_TARGET_ARCH:=$(subst x86_64,x64,$(CXX_TARGET_ARCH)))
$(eval V8_TARGET_ARCH:=$(subst .,,$(suffix $(basename $@))))
PYTHONPATH="$(shell pwd)/tools/generate_shim_headers:$(shell pwd)/build:$(PYTHONPATH):$(shell pwd)/build/gyp/pylib:$(PYTHONPATH)" \
GYP_GENERATORS=make \
@@ -431,7 +435,7 @@ $(OUT_MAKEFILES): $(GYPFILES) $(ENVFILE)
-Dv8_target_arch=$(V8_TARGET_ARCH) \
$(if $(findstring $(CXX_TARGET_ARCH),$(V8_TARGET_ARCH)), \
-Dtarget_arch=$(V8_TARGET_ARCH),) \
- $(if $(findstring optdebug,$@),-Dv8_optimized_debug=2,) \
+ $(if $(findstring optdebug,$@),-Dv8_optimized_debug=1,) \
-S$(suffix $(basename $@))$(suffix $@) $(GYPFLAGS)
$(OUTDIR)/Makefile.native: $(GYPFILES) $(ENVFILE)
@@ -468,8 +472,11 @@ $(ENVFILE): $(ENVFILE).new
# Stores current GYPFLAGS in a file.
$(ENVFILE).new:
- @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; \
- echo "CXX=$(CXX)" >> $(ENVFILE).new
+ $(eval CXX_TARGET_ARCH:=$(shell $(CXX) -v 2>&1 | grep ^Target: | \
+ cut -f 2 -d " " | cut -f 1 -d "-" ))
+ $(eval CXX_TARGET_ARCH:=$(subst aarch64,arm64,$(CXX_TARGET_ARCH)))
+ $(eval CXX_TARGET_ARCH:=$(subst x86_64,x64,$(CXX_TARGET_ARCH)))
+ @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS) -Dtarget_arch=$(CXX_TARGET_ARCH)" > $(ENVFILE).new;
# Heap constants for grokdump.
DUMP_FILE = tools/v8heapconst.py
@@ -489,26 +496,5 @@ GPATH GRTAGS GSYMS GTAGS: gtags.files $(shell cat gtags.files 2> /dev/null)
gtags.clean:
rm -f gtags.files GPATH GRTAGS GSYMS GTAGS
-# Dependencies. "builddeps" are dependencies required solely for building,
-# "dependencies" includes also dependencies required for development.
-# Remember to keep these in sync with the DEPS file.
-builddeps:
- svn checkout --force https://gyp.googlecode.com/svn/trunk build/gyp \
- --revision 1831
- if svn info third_party/icu 2>&1 | grep -q icu46 ; then \
- svn switch --force \
- https://src.chromium.org/chrome/trunk/deps/third_party/icu52 \
- third_party/icu --revision 277999 ; \
- else \
- svn checkout --force \
- https://src.chromium.org/chrome/trunk/deps/third_party/icu52 \
- third_party/icu --revision 277999 ; \
- fi
- svn checkout --force https://googletest.googlecode.com/svn/trunk \
- testing/gtest --revision 692
- svn checkout --force https://googlemock.googlecode.com/svn/trunk \
- testing/gmock --revision 485
-
-dependencies: builddeps
- # The spec is a copy of the hooks in v8's DEPS file.
- gclient sync -r fb782d4369d5ae04f17a2fceef7de5a63e50f07b --spec="solutions = [{u'managed': False, u'name': u'buildtools', u'url': u'https://chromium.googlesource.com/chromium/buildtools.git', u'custom_deps': {}, u'custom_hooks': [{u'name': u'clang_format_win',u'pattern': u'.',u'action': [u'download_from_google_storage',u'--no_resume',u'--platform=win32',u'--no_auth',u'--bucket',u'chromium-clang-format',u'-s',u'buildtools/win/clang-format.exe.sha1']},{u'name': u'clang_format_mac',u'pattern': u'.',u'action': [u'download_from_google_storage',u'--no_resume',u'--platform=darwin',u'--no_auth',u'--bucket',u'chromium-clang-format',u'-s',u'buildtools/mac/clang-format.sha1']},{u'name': u'clang_format_linux',u'pattern': u'.',u'action': [u'download_from_google_storage',u'--no_resume',u'--platform=linux*',u'--no_auth',u'--bucket',u'chromium-clang-format',u'-s',u'buildtools/linux64/clang-format.sha1']}],u'deps_file': u'.DEPS.git', u'safesync_url': u''}]"
+dependencies builddeps:
+ $(error Use 'gclient sync' instead)
diff --git a/deps/v8/Makefile.android b/deps/v8/Makefile.android
index 2a3640382b4f5f..f89fd21fda004e 100644
--- a/deps/v8/Makefile.android
+++ b/deps/v8/Makefile.android
@@ -26,7 +26,8 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Those definitions should be consistent with the main Makefile
-ANDROID_ARCHES = android_ia32 android_arm android_arm64 android_mipsel android_x87
+ANDROID_ARCHES = android_ia32 android_x64 android_arm android_arm64 \
+ android_mipsel android_x87
MODES = release debug
# Generates all combinations of ANDROID ARCHES and MODES,
@@ -66,6 +67,11 @@ else ifeq ($(ARCH), android_ia32)
TOOLCHAIN_ARCH = x86
TOOLCHAIN_PREFIX = i686-linux-android
TOOLCHAIN_VER = 4.8
+else ifeq ($(ARCH), android_x64)
+ DEFINES = target_arch=x64 v8_target_arch=x64 android_target_arch=x86_64 android_target_platform=21
+ TOOLCHAIN_ARCH = x86_64
+ TOOLCHAIN_PREFIX = x86_64-linux-android
+ TOOLCHAIN_VER = 4.9
else ifeq ($(ARCH), android_x87)
DEFINES = target_arch=x87 v8_target_arch=x87 android_target_arch=x86 android_target_platform=14
TOOLCHAIN_ARCH = x86
diff --git a/deps/v8/OWNERS b/deps/v8/OWNERS
index 22a05cb17782de..d6db77ffe0f0d0 100644
--- a/deps/v8/OWNERS
+++ b/deps/v8/OWNERS
@@ -1,4 +1,5 @@
adamk@chromium.org
+arv@chromium.org
bmeurer@chromium.org
danno@chromium.org
dcarney@chromium.org
diff --git a/deps/v8/PRESUBMIT.py b/deps/v8/PRESUBMIT.py
index 040972e8da2db9..5b3d58d3ba50d8 100644
--- a/deps/v8/PRESUBMIT.py
+++ b/deps/v8/PRESUBMIT.py
@@ -69,6 +69,7 @@ def _V8PresubmitChecks(input_api, output_api):
from presubmit import SourceProcessor
from presubmit import CheckRuntimeVsNativesNameClashes
from presubmit import CheckExternalReferenceRegistration
+ from presubmit import CheckAuthorizedAuthor
results = []
if not CppLintProcessor().Run(input_api.PresubmitLocalPath()):
@@ -83,6 +84,7 @@ def _V8PresubmitChecks(input_api, output_api):
if not CheckExternalReferenceRegistration(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"External references registration check failed"))
+ results.extend(CheckAuthorizedAuthor(input_api, output_api))
return results
@@ -243,14 +245,17 @@ def GetPreferredTryMasters(project, change):
'tryserver.v8': {
'v8_linux_rel': set(['defaulttests']),
'v8_linux_dbg': set(['defaulttests']),
- 'v8_linux_nosnap_rel': set(['defaulttests']),
+ 'v8_linux_nodcheck_rel': set(['defaulttests']),
+ 'v8_linux_gcc_compile_rel': set(['defaulttests']),
'v8_linux64_rel': set(['defaulttests']),
- 'v8_linux_arm_dbg': set(['defaulttests']),
+ 'v8_linux64_asan_rel': set(['defaulttests']),
+ 'v8_win_rel': set(['defaulttests']),
+ 'v8_win_compile_dbg': set(['defaulttests']),
+ 'v8_win64_rel': set(['defaulttests']),
+ 'v8_mac_rel': set(['defaulttests']),
+ 'v8_linux_arm_rel': set(['defaulttests']),
'v8_linux_arm64_rel': set(['defaulttests']),
- 'v8_linux_layout_dbg': set(['defaulttests']),
+ 'v8_android_arm_compile_rel': set(['defaulttests']),
'v8_linux_chromium_gn_rel': set(['defaulttests']),
- 'v8_mac_rel': set(['defaulttests']),
- 'v8_win_rel': set(['defaulttests']),
- 'v8_win64_compile_rel': set(['defaulttests']),
},
}
diff --git a/deps/v8/README.md b/deps/v8/README.md
index bc1685affa99dc..5cd4b5811a4b47 100644
--- a/deps/v8/README.md
+++ b/deps/v8/README.md
@@ -18,13 +18,13 @@ Getting the Code
Checkout [depot tools](http://www.chromium.org/developers/how-tos/install-depot-tools), and run
-> `fetch v8`
+ fetch v8
This will checkout V8 into the directory `v8` and fetch all of its dependencies.
To stay up to date, run
-> `git pull origin`
-> `gclient sync`
+ git pull origin
+ gclient sync
For fetching all branches, add the following into your remote
configuration in `.git/config`:
diff --git a/deps/v8/build/android.gypi b/deps/v8/build/android.gypi
index 5d3b25a74636ca..533250e7f56f96 100644
--- a/deps/v8/build/android.gypi
+++ b/deps/v8/build/android.gypi
@@ -43,7 +43,13 @@
'android_stlport': '<(android_toolchain)/sources/cxx-stl/stlport/',
},
'android_include': '<(android_sysroot)/usr/include',
- 'android_lib': '<(android_sysroot)/usr/lib',
+ 'conditions': [
+ ['target_arch=="x64"', {
+ 'android_lib': '<(android_sysroot)/usr/lib64',
+ }, {
+ 'android_lib': '<(android_sysroot)/usr/lib',
+ }],
+ ],
'android_stlport_include': '<(android_stlport)/stlport',
'android_stlport_libs': '<(android_stlport)/libs',
}, {
@@ -52,7 +58,13 @@
'android_stlport': '<(android_ndk_root)/sources/cxx-stl/stlport/',
},
'android_include': '<(android_sysroot)/usr/include',
- 'android_lib': '<(android_sysroot)/usr/lib',
+ 'conditions': [
+ ['target_arch=="x64"', {
+ 'android_lib': '<(android_sysroot)/usr/lib64',
+ }, {
+ 'android_lib': '<(android_sysroot)/usr/lib',
+ }],
+ ],
'android_stlport_include': '<(android_stlport)/stlport',
'android_stlport_libs': '<(android_stlport)/libs',
}],
@@ -227,7 +239,7 @@
'target_conditions': [
['_type=="executable"', {
'conditions': [
- ['target_arch=="arm64"', {
+ ['target_arch=="arm64" or target_arch=="x64"', {
'ldflags': [
'-Wl,-dynamic-linker,/system/bin/linker64',
],
diff --git a/deps/v8/build/detect_v8_host_arch.py b/deps/v8/build/detect_v8_host_arch.py
index 3460a9a404f0ce..89e8286e1fdb45 100644
--- a/deps/v8/build/detect_v8_host_arch.py
+++ b/deps/v8/build/detect_v8_host_arch.py
@@ -41,6 +41,7 @@ def DoMain(_):
"""Hook to be called from gyp without starting a separate python
interpreter."""
host_arch = platform.machine()
+ host_system = platform.system();
# Convert machine type to format recognized by gyp.
if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
@@ -56,6 +57,13 @@ def DoMain(_):
elif host_arch.startswith('mips'):
host_arch = 'mipsel'
+ # Under AIX the value returned by platform.machine is not
+ # the best indicator of the host architecture
+ # AIX 6.1 which is the lowest level supported only provides
+ # a 64 bit kernel
+ if host_system == 'AIX':
+ host_arch = 'ppc64'
+
# platform.machine is based on running kernel. It's possible to use 64-bit
# kernel with 32-bit userland, e.g. to give linker slightly more memory.
# Distinguish between different userland bitness by querying
diff --git a/deps/v8/build/features.gypi b/deps/v8/build/features.gypi
index 465eba91480d72..5c60273a61bc6f 100644
--- a/deps/v8/build/features.gypi
+++ b/deps/v8/build/features.gypi
@@ -102,13 +102,9 @@
'DebugBaseCommon': {
'abstract': 1,
'variables': {
- 'v8_enable_extra_checks%': 1,
- 'v8_enable_handle_zapping%': 1,
+ 'v8_enable_handle_zapping%': 0,
},
'conditions': [
- ['v8_enable_extra_checks==1', {
- 'defines': ['ENABLE_EXTRA_CHECKS',],
- }],
['v8_enable_handle_zapping==1', {
'defines': ['ENABLE_HANDLE_ZAPPING',],
}],
@@ -116,13 +112,9 @@
}, # Debug
'Release': {
'variables': {
- 'v8_enable_extra_checks%': 0,
- 'v8_enable_handle_zapping%': 0,
+ 'v8_enable_handle_zapping%': 1,
},
'conditions': [
- ['v8_enable_extra_checks==1', {
- 'defines': ['ENABLE_EXTRA_CHECKS',],
- }],
['v8_enable_handle_zapping==1', {
'defines': ['ENABLE_HANDLE_ZAPPING',],
}],
diff --git a/deps/v8/build/get_landmines.py b/deps/v8/build/get_landmines.py
index 66a86cbb5082ad..f61c04de44361e 100755
--- a/deps/v8/build/get_landmines.py
+++ b/deps/v8/build/get_landmines.py
@@ -20,6 +20,7 @@ def main():
print 'Activating MSVS 2013.'
print 'Revert activation of MSVS 2013.'
print 'Activating MSVS 2013 again.'
+ print 'Clobber after ICU roll.'
return 0
diff --git a/deps/v8/build/gyp_environment.py b/deps/v8/build/gyp_environment.py
new file mode 100644
index 00000000000000..f1cee6ef8e00f6
--- /dev/null
+++ b/deps/v8/build/gyp_environment.py
@@ -0,0 +1,52 @@
+# Copyright 2015 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Sets up various automatic gyp environment variables. These are used by
+gyp_v8 and landmines.py which run at different stages of runhooks. To
+make sure settings are consistent between them, all setup should happen here.
+"""
+
+import os
+import sys
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+V8_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+
+
+def apply_gyp_environment(file_path=None):
+ """
+ Reads in a *.gyp_env file and applies the valid keys to os.environ.
+ """
+ if not file_path or not os.path.exists(file_path):
+ return
+ file_contents = open(file_path).read()
+ try:
+ file_data = eval(file_contents, {'__builtins__': None}, None)
+ except SyntaxError, e:
+ e.filename = os.path.abspath(file_path)
+ raise
+ supported_vars = ( 'V8_GYP_FILE',
+ 'V8_GYP_SYNTAX_CHECK',
+ 'GYP_DEFINES',
+ 'GYP_GENERATOR_FLAGS',
+ 'GYP_GENERATOR_OUTPUT', )
+ for var in supported_vars:
+ val = file_data.get(var)
+ if val:
+ if var in os.environ:
+ print 'INFO: Environment value for "%s" overrides value in %s.' % (
+ var, os.path.abspath(file_path)
+ )
+ else:
+ os.environ[var] = val
+
+
+def set_environment():
+ """Sets defaults for GYP_* variables."""
+
+ if 'SKIP_V8_GYP_ENV' not in os.environ:
+ # Update the environment based on v8.gyp_env
+ gyp_env_path = os.path.join(os.path.dirname(V8_ROOT), 'v8.gyp_env')
+ apply_gyp_environment(gyp_env_path)
diff --git a/deps/v8/build/gyp_v8 b/deps/v8/build/gyp_v8
index 14467eccaad29f..1e8a5c806ecca6 100755
--- a/deps/v8/build/gyp_v8
+++ b/deps/v8/build/gyp_v8
@@ -31,6 +31,7 @@
# is invoked by V8 beyond what can be done in the gclient hooks.
import glob
+import gyp_environment
import os
import platform
import shlex
@@ -48,34 +49,6 @@ sys.path.insert(
1, os.path.abspath(os.path.join(v8_root, 'tools', 'generate_shim_headers')))
-def apply_gyp_environment(file_path=None):
- """
- Reads in a *.gyp_env file and applies the valid keys to os.environ.
- """
- if not file_path or not os.path.exists(file_path):
- return
- file_contents = open(file_path).read()
- try:
- file_data = eval(file_contents, {'__builtins__': None}, None)
- except SyntaxError, e:
- e.filename = os.path.abspath(file_path)
- raise
- supported_vars = ( 'V8_GYP_FILE',
- 'V8_GYP_SYNTAX_CHECK',
- 'GYP_DEFINES',
- 'GYP_GENERATOR_FLAGS',
- 'GYP_GENERATOR_OUTPUT', )
- for var in supported_vars:
- val = file_data.get(var)
- if val:
- if var in os.environ:
- print 'INFO: Environment value for "%s" overrides value in %s.' % (
- var, os.path.abspath(file_path)
- )
- else:
- os.environ[var] = val
-
-
def additional_include_files(args=[]):
"""
Returns a list of additional (.gypi) files to include, without
@@ -109,13 +82,6 @@ def additional_include_files(args=[]):
def run_gyp(args):
rc = gyp.main(args)
- # Check for landmines (reasons to clobber the build). This must be run here,
- # rather than a separate runhooks step so that any environment modifications
- # from above are picked up.
- print 'Running build/landmines.py...'
- subprocess.check_call(
- [sys.executable, os.path.join(script_dir, 'landmines.py')])
-
if rc != 0:
print 'Error running GYP'
sys.exit(rc)
@@ -124,10 +90,7 @@ def run_gyp(args):
if __name__ == '__main__':
args = sys.argv[1:]
- if 'SKIP_V8_GYP_ENV' not in os.environ:
- # Update the environment based on v8.gyp_env
- gyp_env_path = os.path.join(os.path.dirname(v8_root), 'v8.gyp_env')
- apply_gyp_environment(gyp_env_path)
+ gyp_environment.set_environment()
# This could give false positives since it doesn't actually do real option
# parsing. Oh well.
diff --git a/deps/v8/build/landmine_utils.py b/deps/v8/build/landmine_utils.py
index e8b7c98d5fc6a3..cb3499132a3c2f 100644
--- a/deps/v8/build/landmine_utils.py
+++ b/deps/v8/build/landmine_utils.py
@@ -47,10 +47,19 @@ def gyp_defines():
return dict(arg.split('=', 1)
for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_generator_flags():
+ """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary."""
+ return dict(arg.split('=', 1)
+ for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', '')))
+
+
@memoize()
def gyp_msvs_version():
return os.environ.get('GYP_MSVS_VERSION', '')
+
@memoize()
def distributor():
"""
diff --git a/deps/v8/build/landmines.py b/deps/v8/build/landmines.py
index bd1fb28f719c62..97c63901c1a904 100755
--- a/deps/v8/build/landmines.py
+++ b/deps/v8/build/landmines.py
@@ -4,10 +4,9 @@
# found in the LICENSE file.
"""
-This script runs every build as a hook. If it detects that the build should
-be clobbered, it will touch the file /.landmine_triggered. The
-various build scripts will then check for the presence of this file and clobber
-accordingly. The script will also emit the reasons for the clobber to stdout.
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
A landmine is tripped when a builder checks out a different revision, and the
diff between the new landmines and the old ones is non-null. At this point, the
@@ -15,9 +14,13 @@
"""
import difflib
+import errno
+import gyp_environment
import logging
import optparse
import os
+import re
+import shutil
import sys
import subprocess
import time
@@ -28,46 +31,109 @@
SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-def get_target_build_dir(build_tool, target):
+def get_build_dir(build_tool, is_iphone=False):
"""
Returns output directory absolute path dependent on build and targets.
Examples:
- r'c:\b\build\slave\win\build\src\out\Release'
- '/mnt/data/b/build/slave/linux/build/src/out/Debug'
- '/b/build/slave/ios_rel_device/build/src/xcodebuild/Release-iphoneos'
+ r'c:\b\build\slave\win\build\src\out'
+ '/mnt/data/b/build/slave/linux/build/src/out'
+ '/b/build/slave/ios_rel_device/build/src/xcodebuild'
Keep this function in sync with tools/build/scripts/slave/compile.py
"""
ret = None
if build_tool == 'xcode':
- ret = os.path.join(SRC_DIR, 'xcodebuild', target)
+ ret = os.path.join(SRC_DIR, 'xcodebuild')
elif build_tool in ['make', 'ninja', 'ninja-ios']: # TODO: Remove ninja-ios.
- ret = os.path.join(SRC_DIR, 'out', target)
+ if 'CHROMIUM_OUT_DIR' in os.environ:
+ output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+ if not output_dir:
+ raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+ else:
+ output_dir = landmine_utils.gyp_generator_flags().get('output_dir', 'out')
+ ret = os.path.join(SRC_DIR, output_dir)
elif build_tool in ['msvs', 'vs', 'ib']:
- ret = os.path.join(SRC_DIR, 'build', target)
+ ret = os.path.join(SRC_DIR, 'build')
else:
raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
return os.path.abspath(ret)
-def set_up_landmines(target, new_landmines):
- """Does the work of setting, planting, and triggering landmines."""
- out_dir = get_target_build_dir(landmine_utils.builder(), target)
-
- landmines_path = os.path.join(out_dir, '.landmines')
- if not os.path.exists(out_dir):
+def extract_gn_build_commands(build_ninja_file):
+ """Extracts from a build.ninja the commands to run GN.
+
+ The commands to run GN are the gn rule and build.ninja build step at the
+ top of the build.ninja file. We want to keep these when deleting GN builds
+ since we want to preserve the command-line flags to GN.
+
+ On error, returns the empty string."""
+ result = ""
+ with open(build_ninja_file, 'r') as f:
+ # Read until the second blank line. The first thing GN writes to the file
+ # is the "rule gn" and the second is the section for "build build.ninja",
+ # separated by blank lines.
+ num_blank_lines = 0
+ while num_blank_lines < 2:
+ line = f.readline()
+ if len(line) == 0:
+ return '' # Unexpected EOF.
+ result += line
+ if line[0] == '\n':
+ num_blank_lines = num_blank_lines + 1
+ return result
+
+def delete_build_dir(build_dir):
+ # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+ build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+ if not os.path.exists(build_ninja_d_file):
+ shutil.rmtree(build_dir)
return
- if not os.path.exists(landmines_path):
- print "Landmines tracker didn't exists."
-
- # FIXME(machenbach): Clobber deletes the .landmines tracker. Difficult
- # to know if we are right after a clobber or if it is first-time landmines
- # deployment. Also, a landmine-triggered clobber right after a clobber is
- # not possible. Different clobber methods for msvs, xcode and make all
- # have different blacklists of files that are not deleted.
+ # GN builds aren't automatically regenerated when you sync. To avoid
+ # messing with the GN workflow, erase everything but the args file, and
+ # write a dummy build.ninja file that will automatically rerun GN the next
+ # time Ninja is run.
+ build_ninja_file = os.path.join(build_dir, 'build.ninja')
+ build_commands = extract_gn_build_commands(build_ninja_file)
+
+ try:
+ gn_args_file = os.path.join(build_dir, 'args.gn')
+ with open(gn_args_file, 'r') as f:
+ args_contents = f.read()
+ except IOError:
+ args_contents = ''
+
+ shutil.rmtree(build_dir)
+
+ # Put back the args file (if any).
+ os.mkdir(build_dir)
+ if args_contents != '':
+ with open(gn_args_file, 'w') as f:
+ f.write(args_contents)
+
+ # Write the build.ninja file sufficiently to regenerate itself.
+ with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+ if build_commands != '':
+ f.write(build_commands)
+ else:
+ # Couldn't parse the build.ninja file, write a default thing.
+ f.write('''rule gn
+command = gn -q gen //out/%s/
+description = Regenerating ninja files
+
+build build.ninja: gn
+generator = 1
+depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+ # Write a .d file for the build which references a nonexistant file. This
+ # will make Ninja always mark the build as dirty.
+ with open(build_ninja_d_file, 'w') as f:
+ f.write('build.ninja: nonexistant_file.gn\n')
+
+
+def needs_clobber(landmines_path, new_landmines):
if os.path.exists(landmines_path):
- triggered = os.path.join(out_dir, '.landmines_triggered')
with open(landmines_path, 'r') as f:
old_landmines = f.readlines()
if old_landmines != new_landmines:
@@ -75,14 +141,54 @@ def set_up_landmines(target, new_landmines):
diff = difflib.unified_diff(old_landmines, new_landmines,
fromfile='old_landmines', tofile='new_landmines',
fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+ sys.stdout.write('Clobbering due to:\n')
+ sys.stdout.writelines(diff)
+ return True
+ else:
+ sys.stdout.write('Clobbering due to missing landmines file.\n')
+ return True
+ return False
- with open(triggered, 'w') as f:
- f.writelines(diff)
- print "Setting landmine: %s" % triggered
- elif os.path.exists(triggered):
- # Remove false triggered landmines.
- os.remove(triggered)
- print "Removing landmine: %s" % triggered
+
+def clobber_if_necessary(new_landmines):
+ """Does the work of setting, planting, and triggering landmines."""
+ out_dir = get_build_dir(landmine_utils.builder())
+ landmines_path = os.path.normpath(os.path.join(out_dir, '..', '.landmines'))
+ try:
+ os.makedirs(out_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+
+ if needs_clobber(landmines_path, new_landmines):
+ # Clobber contents of build directory but not directory itself: some
+ # checkouts have the build directory mounted.
+ for f in os.listdir(out_dir):
+ path = os.path.join(out_dir, f)
+ if os.path.basename(out_dir) == 'build':
+ # Only delete build directories and files for MSVS builds as the folder
+ # shares some checked out files and directories.
+ if (os.path.isdir(path) and
+ re.search(r'(?:[Rr]elease)|(?:[Dd]ebug)', f)):
+ delete_build_dir(path)
+ elif (os.path.isfile(path) and
+ (path.endswith('.sln') or
+ path.endswith('.vcxproj') or
+ path.endswith('.vcxproj.user'))):
+ os.unlink(path)
+ else:
+ if os.path.isfile(path):
+ os.unlink(path)
+ elif os.path.isdir(path):
+ delete_build_dir(path)
+ if os.path.basename(out_dir) == 'xcodebuild':
+ # Xcodebuild puts an additional project file structure into build,
+ # while the output folder is xcodebuild.
+ project_dir = os.path.join(SRC_DIR, 'build', 'all.xcodeproj')
+ if os.path.exists(project_dir) and os.path.isdir(project_dir):
+ delete_build_dir(project_dir)
+
+ # Save current set of landmines for next time.
with open(landmines_path, 'w') as f:
f.writelines(new_landmines)
@@ -123,14 +229,14 @@ def main():
if landmine_utils.builder() in ('dump_dependency_json', 'eclipse'):
return 0
+ gyp_environment.set_environment()
+
landmines = []
for s in landmine_scripts:
proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
output, _ = proc.communicate()
landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
-
- for target in ('Debug', 'Release'):
- set_up_landmines(target, landmines)
+ clobber_if_necessary(landmines)
return 0
diff --git a/deps/v8/build/mac/asan.gyp b/deps/v8/build/mac/asan.gyp
new file mode 100644
index 00000000000000..3fc7f58d434915
--- /dev/null
+++ b/deps/v8/build/mac/asan.gyp
@@ -0,0 +1,31 @@
+# Copyright 2015 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'asan_dynamic_runtime',
+ 'toolsets': ['target', 'host'],
+ 'type': 'none',
+ 'variables': {
+ # Every target is going to depend on asan_dynamic_runtime, so allow
+ # this one to depend on itself.
+ 'prune_self_dependency': 1,
+ # Path is relative to this GYP file.
+ 'asan_rtl_mask_path':
+ '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/darwin',
+ 'asan_osx_dynamic':
+ '<(asan_rtl_mask_path)/libclang_rt.asan_osx_dynamic.dylib',
+ },
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)',
+ 'files': [
+ ' Call(v8::Handle fun,
- Handle data = Handle());
+ static V8_DEPRECATE_SOON(
+ "Use maybe version",
+ Local Call(v8::Handle fun,
+ Handle data = Handle()));
+ // TODO(dcarney): data arg should be a MaybeLocal
+ static MaybeLocal Call(Local context,
+ v8::Handle