diff --git a/.eslintrc.yaml b/.eslintrc.yaml index 713e2f1c93d08d..fbd024b7ea16db 100644 --- a/.eslintrc.yaml +++ b/.eslintrc.yaml @@ -62,17 +62,21 @@ rules: no-new-require: 2 no-path-concat: 2 no-restricted-modules: [2, sys, _linklist] - no-restricted-properties: [2, { - object: assert, - property: deepEqual, - message: Please use assert.deepStrictEqual(). - }, { - property: __defineGetter__, - message: __defineGetter__ is deprecated. - }, { - property: __defineSetter__, - message: __defineSetter__ is deprecated. - }] + no-restricted-properties: + - 2 + - object: assert + property: deepEqual + message: Use assert.deepStrictEqual(). + - object: assert + property: equal + message: Use assert.strictEqual() rather than assert.equal(). + - object: assert + property: notEqual + message: Use assert.notStrictEqual() rather than assert.notEqual(). + - property: __defineGetter__ + message: __defineGetter__ is deprecated. + - property: __defineSetter__, + message: __defineSetter__ is deprecated. # Stylistic Issues # http://eslint.org/docs/rules/#stylistic-issues @@ -92,7 +96,7 @@ rules: key-spacing: [2, {mode: minimum}] keyword-spacing: 2 linebreak-style: [2, unix] - max-len: [2, 80, 2] + max-len: [2, {code: 80, ignoreUrls: true, tabWidth: 2}] new-parens: 2 no-mixed-spaces-and-tabs: 2 no-multiple-empty-lines: [2, {max: 2, maxEOF: 0, maxBOF: 0}] diff --git a/.gitignore b/.gitignore index 4f129c4581ba8f..f8f99f5f839389 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,7 @@ !tools/doc/node_modules/**/.* !.editorconfig !.eslintignore -!.eslintrc +!.eslintrc.yaml !.gitattributes !.github !.gitignore diff --git a/.mailmap b/.mailmap index 36324c6aa2ae9b..722995b6b04b63 100644 --- a/.mailmap +++ b/.mailmap @@ -143,7 +143,7 @@ San-Tai Hsu Scott Blomquist Sergey Kryzhanovsky Shannen Saez -Shigeki Ohtsu +Shigeki Ohtsu Siddharth Mahendraker Simon Willison Stanislav Opichal diff --git a/BUILDING.md b/BUILDING.md index 75ef93f8c73a13..ec56e386595ae3 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -8,6 +8,71 @@ If you consistently can reproduce a test failure, search for it in the [Node.js issue tracker](https://github.com/nodejs/node/issues) or file a new issue. +## Supported platforms + +This list of supported platforms is current as of the branch / release to +which it is attached. + +### Input + +Node.js relies on V8 and libuv. Therefore, we adopt a subset of their +supported platforms. + +### Strategy + +Support is divided into three tiers: + +* **Tier 1**: Full test coverage and maintenance by the Node.js core team and + the broader community. +* **Tier 2**: Full test coverage but more limited maintenance, + often provided by the vendor of the platform. +* **Experimental**: Known to compile but not necessarily reliably or with + a full passing test suite. These are often working to be promoted to Tier + 2 but are not quite ready. There is at least one individual actively + providing maintenance and the team is striving to broaden quality and + reliability of support. + +### Supported platforms + +| System | Support type | Version | Architectures | Notes | +|--------------|--------------|----------------------------------|----------------------|------------------| +| GNU/Linux | Tier 1 | kernel >= 2.6.18, glibc >= 2.5 | x86, x64, arm, arm64 | | +| macOS | Tier 1 | >= 10.10 | x64 | | +| Windows | Tier 1 | >= Windows 7 or >= Windows2008R2 | x86, x64 | | +| SmartOS | Tier 2 | >= 14 < 16.4 | x86, x64 | see note1 | +| FreeBSD | Tier 2 | >= 10 | x64 | | +| GNU/Linux | Tier 2 | kernel >= 4.2.0, glibc >= 2.19 | ppc64be | | +| GNU/Linux | Tier 2 | kernel >= 3.13.0, glibc >= 2.19 | ppc64le | | +| AIX | Tier 2 | >= 6.1 TL09 | ppc64be | | +| GNU/Linux | Tier 2 | kernel >= 3.10, glibc >= 2.17 | s390x | | +| macOS | Experimental | >= 10.8 < 10.10 | x64 | no test coverage | +| Linux (musl) | Experimental | musl >= 1.0 | x64 | | + +note1 - The gcc4.8-libs package needs to be installed, because node + binaries have been built with GCC 4.8, for which runtime libraries are not + installed by default. For these node versions, the recommended binaries + are the ones available in pkgsrc, not the one available from nodejs.org. + Note that the binaries downloaded from the pkgsrc repositories are not + officially supported by the Node.js project, and instead are supported + by Joyent. SmartOS images >= 16.4 are not supported because + GCC 4.8 runtime libraries are not available in their pkgsrc repository + +### Supported toolchains + +Depending on host platform, the selection of toolchains may vary. + +#### Unix + +* GCC 4.8 or newer +* Clang 3.4 or newer + +#### Windows + +* Building Node: Visual Studio 2015 or Visual C++ Build Tools 2015 or newer +* Building native add-ons: Visual Studio 2013 or Visual C++ Build Tools 2015 + or newer + +## Building Node.js on supported platforms ### Unix / OS X @@ -20,9 +85,9 @@ Prerequisites: On OS X, you will also need: * [Xcode](https://developer.apple.com/xcode/download/) - * You also need to install the `Command Line Tools` via Xcode. You can find + - You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads` - * This step will install `gcc` and the related toolchain containing `make` + - This step will install `gcc` and the related toolchain containing `make` * After building, you may want to setup [firewall rules](tools/macosx-firewall.sh) to avoid popups asking to accept incoming network connections when running tests: @@ -51,7 +116,8 @@ the `-j4` flag. See the [GNU Make Documentation](https://www.gnu.org/software/make/manual/html_node/Parallel.html) for more information. -Note that the above requires that `python` resolve to Python 2.6 or 2.7 and not a newer version. +Note that the above requires that `python` resolve to Python 2.6 or 2.7 +and not a newer version. To run the tests: @@ -252,9 +318,11 @@ It is possible to build Node.js with **Note**: building in this way does **not** allow you to claim that the runtime is FIPS 140-2 validated. Instead you can indicate that the runtime -uses a validated module. See the [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) +uses a validated module. See the +[security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) page 60 for more details. In addition, the validation for the underlying module -is only valid if it is deployed in accordance with its [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf). +is only valid if it is deployed in accordance with its +[security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf). If you need FIPS validated cryptography it is recommended that you read both the [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) and [user guide](https://openssl.org/docs/fips/UserGuide-2.0.pdf). diff --git a/CHANGELOG.md b/CHANGELOG.md index 99177be7e6e852..c8bcf86958b077 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,7 +26,8 @@ release. -6.10.2
+6.10.3
+6.10.2
6.10.1
6.10.0
6.9.5
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 144fc393b8ac98..ab8217494e080f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,15 +46,6 @@ $ git remote add upstream git://github.com/nodejs/node.git For developing new features and bug fixes, the `master` branch should be pulled and built upon. -#### Respect the stability index - -The rules for the master branch are less strict; consult the -[stability index](./doc/api/documentation.md#stability-index) for details. - -In a nutshell, modules are at varying levels of API stability. Bug fixes are -always welcome but API or behavioral changes to modules at stability level 3 -(Locked) are off-limits. - #### Dependencies Node.js has several bundled dependencies in the *deps/* and the *tools/* diff --git a/Makefile b/Makefile index a2b64e63b4a378..7bc0a33bddbf8c 100644 --- a/Makefile +++ b/Makefile @@ -90,24 +90,24 @@ uninstall: $(PYTHON) tools/install.py $@ '$(DESTDIR)' '$(PREFIX)' clean: - -rm -rf out/Makefile $(NODE_EXE) $(NODE_G_EXE) out/$(BUILDTYPE)/$(NODE_EXE) \ + $(RM) -r out/Makefile $(NODE_EXE) $(NODE_G_EXE) out/$(BUILDTYPE)/$(NODE_EXE) \ out/$(BUILDTYPE)/node.exp - @if [ -d out ]; then find out/ -name '*.o' -o -name '*.a' -o -name '*.d' | xargs rm -rf; fi - -rm -rf node_modules - @if [ -d deps/icu ]; then echo deleting deps/icu; rm -rf deps/icu; fi - -rm -f test.tap + @if [ -d out ]; then find out/ -name '*.o' -o -name '*.a' -o -name '*.d' | xargs $(RM) -r; fi + $(RM) -r node_modules + @if [ -d deps/icu ]; then echo deleting deps/icu; $(RM) -r deps/icu; fi + $(RM) test.tap distclean: - -rm -rf out - -rm -f config.gypi icu_config.gypi config_fips.gypi - -rm -f config.mk - -rm -rf $(NODE_EXE) $(NODE_G_EXE) - -rm -rf node_modules - -rm -rf deps/icu - -rm -rf deps/icu4c*.tgz deps/icu4c*.zip deps/icu-tmp - -rm -f $(BINARYTAR).* $(TARBALL).* - -rm -rf deps/v8/testing/gmock - -rm -rf deps/v8/testing/gtest + $(RM) -r out + $(RM) config.gypi icu_config.gypi config_fips.gypi + $(RM) config.mk + $(RM) -r $(NODE_EXE) $(NODE_G_EXE) + $(RM) -r node_modules + $(RM) -r deps/icu + $(RM) -r deps/icu4c*.tgz deps/icu4c*.zip deps/icu-tmp + $(RM) $(BINARYTAR).* $(TARBALL).* + $(RM) -r deps/v8/testing/gmock + $(RM) -r deps/v8/testing/gtest check: test @@ -367,7 +367,7 @@ docopen: $(apidocs_html) @$(PYTHON) -mwebbrowser file://$(PWD)/out/doc/api/all.html docclean: - -rm -rf out/doc + $(RM) -r out/doc build-ci: $(PYTHON) ./configure $(CONFIG_FLAGS) @@ -542,8 +542,8 @@ release-only: fi $(PKG): release-only - rm -rf $(PKGDIR) - rm -rf out/deps out/Release + $(RM) -r $(PKGDIR) + $(RM) -r out/deps out/Release $(PYTHON) ./configure \ --dest-cpu=x64 \ --tag=$(TAG) \ @@ -574,24 +574,24 @@ $(TARBALL): release-only $(NODE_EXE) doc mkdir -p $(TARNAME)/doc/api cp doc/node.1 $(TARNAME)/doc/node.1 cp -r out/doc/api/* $(TARNAME)/doc/api/ - rm -rf $(TARNAME)/deps/v8/{test,samples,tools/profviz,tools/run-tests.py} - rm -rf $(TARNAME)/doc/images # too big - rm -rf $(TARNAME)/deps/uv/{docs,samples,test} - rm -rf $(TARNAME)/deps/openssl/openssl/{doc,demos,test} - rm -rf $(TARNAME)/deps/zlib/contrib # too big, unused - rm -rf $(TARNAME)/.{editorconfig,git*,mailmap} - rm -rf $(TARNAME)/tools/{eslint,eslint-rules,osx-pkg.pmdoc,pkgsrc} - rm -rf $(TARNAME)/tools/{osx-*,license-builder.sh,cpplint.py} - rm -rf $(TARNAME)/test*.tap - find $(TARNAME)/ -name ".eslint*" -maxdepth 2 | xargs rm - find $(TARNAME)/ -type l | xargs rm # annoying on windows + $(RM) -r $(TARNAME)/deps/v8/{test,samples,tools/profviz,tools/run-tests.py} + $(RM) -r $(TARNAME)/doc/images # too big + $(RM) -r $(TARNAME)/deps/uv/{docs,samples,test} + $(RM) -r $(TARNAME)/deps/openssl/openssl/{doc,demos,test} + $(RM) -r $(TARNAME)/deps/zlib/contrib # too big, unused + $(RM) -r $(TARNAME)/.{editorconfig,git*,mailmap} + $(RM) -r $(TARNAME)/tools/{eslint,eslint-rules,osx-pkg.pmdoc,pkgsrc} + $(RM) -r $(TARNAME)/tools/{osx-*,license-builder.sh,cpplint.py} + $(RM) -r $(TARNAME)/test*.tap + find $(TARNAME)/ -name ".eslint*" -maxdepth 2 | xargs $(RM) + find $(TARNAME)/ -type l | xargs $(RM) # annoying on windows tar -cf $(TARNAME).tar $(TARNAME) - rm -rf $(TARNAME) + $(RM) -r $(TARNAME) gzip -c -f -9 $(TARNAME).tar > $(TARNAME).tar.gz ifeq ($(XZ), 0) xz -c -f -$(XZ_COMPRESSION) $(TARNAME).tar > $(TARNAME).tar.xz endif - rm $(TARNAME).tar + $(RM) $(TARNAME).tar tar: $(TARBALL) @@ -620,14 +620,14 @@ $(TARBALL)-headers: release-only --release-urlbase=$(RELEASE_URLBASE) \ $(CONFIG_FLAGS) $(BUILD_RELEASE_FLAGS) HEADERS_ONLY=1 $(PYTHON) tools/install.py install '$(TARNAME)' '/' - find $(TARNAME)/ -type l | xargs rm -f + find $(TARNAME)/ -type l | xargs $(RM) tar -cf $(TARNAME)-headers.tar $(TARNAME) - rm -rf $(TARNAME) + $(RM) -r $(TARNAME) gzip -c -f -9 $(TARNAME)-headers.tar > $(TARNAME)-headers.tar.gz ifeq ($(XZ), 0) xz -c -f -$(XZ_COMPRESSION) $(TARNAME)-headers.tar > $(TARNAME)-headers.tar.xz endif - rm $(TARNAME)-headers.tar + $(RM) $(TARNAME)-headers.tar tar-headers: $(TARBALL)-headers @@ -643,8 +643,8 @@ ifeq ($(XZ), 0) endif $(BINARYTAR): release-only - rm -rf $(BINARYNAME) - rm -rf out/deps out/Release + $(RM) -r $(BINARYNAME) + $(RM) -r out/deps out/Release $(PYTHON) ./configure \ --prefix=/ \ --dest-cpu=$(DESTCPU) \ @@ -656,12 +656,12 @@ $(BINARYTAR): release-only cp LICENSE $(BINARYNAME) cp CHANGELOG.md $(BINARYNAME) tar -cf $(BINARYNAME).tar $(BINARYNAME) - rm -rf $(BINARYNAME) + $(RM) -r $(BINARYNAME) gzip -c -f -9 $(BINARYNAME).tar > $(BINARYNAME).tar.gz ifeq ($(XZ), 0) xz -c -f -$(XZ_COMPRESSION) $(BINARYNAME).tar > $(BINARYNAME).tar.xz endif - rm $(BINARYNAME).tar + $(RM) $(BINARYNAME).tar binary: $(BINARYTAR) diff --git a/README.md b/README.md index 7fe05e05e27ad0..4fba5595caaaf9 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,10 @@ -# Node.js - -[![Gitter](https://badges.gitter.im/Join Chat.svg)](https://gitter.im/nodejs/node?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/29/badge)](https://bestpractices.coreinfrastructure.org/projects/29) +

+ Node.js +

+

+ + +

Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and @@ -16,6 +20,23 @@ policies, and releases are managed under an If you need help using or installing Node.js, please use the [nodejs/help](https://github.com/nodejs/help) issue tracker. + +# Table of Contents + +* [Resources for Newcomers](#resources-for-newcomers) +* [Release Types](#release-types) + * [Download](#download) + * [Current and LTS Releases](#current-and-lts-releases) + * [Nightly Releases](#nightly-releases) + * [API Documentation](#api-documentation) + * [Verifying Binaries](#verifying-binaries) +* [Building Node.js](#building-nodejs) + * [Security](#security) + * [Current Project Team Members](#current-project-team-members) + * [CTC (Core Technical Committee)](#ctc-core-technical-committee) + * [Collaborators](#collaborators) + * [Release Team](#release-team) + ## Resources for Newcomers ### Official Resources @@ -69,6 +90,7 @@ The Node.js project maintains multiple types of releases: Binaries, installers, and source tarballs are available at . +#### Current and LTS Releases **Current** and **LTS** releases are available at , listed under their version strings. The [latest](https://nodejs.org/download/release/latest/) directory is an @@ -76,11 +98,13 @@ alias for the latest Current release. The latest LTS release from an LTS line is available in the form: latest-_codename_. For example: +#### Nightly Releases **Nightly** builds are available at , listed under their version string which includes their date (in UTC time) and the commit SHA at the HEAD of the release. +#### API Documentation **API documentation** is available in each release and nightly directory under _docs_. points to the API documentation of the latest stable version. @@ -132,7 +156,7 @@ the binary verification command above. ## Building Node.js See [BUILDING.md](BUILDING.md) for instructions on how to build -Node.js from source. +Node.js from source along with a list of officially supported platforms. ## Security @@ -164,6 +188,8 @@ more information about the governance of the Node.js project, see **Colin Ihrig** <cjihrig@gmail.com> * [evanlucas](https://github.com/evanlucas) - **Evan Lucas** <evanlucas@me.com> (he/him) +* [fhinkel](https://github.com/fhinkel) - +**Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> * [fishrock123](https://github.com/fishrock123) - **Jeremiah Senkpiel** <fishrock123@rocketmail.com> * [indutny](https://github.com/indutny) - @@ -183,7 +209,7 @@ more information about the governance of the Node.js project, see * [rvagg](https://github.com/rvagg) - **Rod Vagg** <rod@vagg.org> * [shigeki](https://github.com/shigeki) - -**Shigeki Ohtsu** <ohtsu@iij.ad.jp> (he/him) +**Shigeki Ohtsu** <ohtsu@ohtsu.org> (he/him) * [targos](https://github.com/targos) - **Michaël Zasso** <targos@protonmail.com> (he/him) * [thefourtheye](https://github.com/thefourtheye) - @@ -203,6 +229,8 @@ more information about the governance of the Node.js project, see **Andras** <andras@kinvey.com> * [AndreasMadsen](https://github.com/AndreasMadsen) - **Andreas Madsen** <amwebdk@gmail.com> (he/him) +* [aqrln](https://github.com/aqrln) - +**Alexey Orlenko** <eaglexrlnk@gmail.com> * [bengl](https://github.com/bengl) - **Bryan English** <bryan@bryanenglish.com> (he/him) * [benjamingr](https://github.com/benjamingr) - @@ -229,8 +257,6 @@ more information about the governance of the Node.js project, see **Alexander Makarenko** <estliberitas@gmail.com> * [eugeneo](https://github.com/eugeneo) - **Eugene Ostroukhov** <eostroukhov@google.com> -* [fhinkel](https://github.com/fhinkel) - -**Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> * [firedfox](https://github.com/firedfox) - **Daniel Wang** <wangyang0123@gmail.com> * [geek](https://github.com/geek) - @@ -299,6 +325,10 @@ more information about the governance of the Node.js project, see **Prince John Wesley** <princejohnwesley@gmail.com> * [qard](https://github.com/qard) - **Stephen Belanger** <admin@stephenbelanger.com> (he/him) +* [refack](https://github.com/refack) - +**Refael Ackermann** <refack@gmail.com> (he/him) +* [richardlau](https://github.com/richardlau) - +**Richard Lau** <riclau@uk.ibm.com> * [rlidwka](https://github.com/rlidwka) - **Alex Kocharin** <alex@kocharin.ru> * [rmg](https://github.com/rmg) - @@ -337,6 +367,10 @@ more information about the governance of the Node.js project, see **Mike Tunnicliffe** <m.j.tunnicliffe@gmail.com> * [vkurchatkin](https://github.com/vkurchatkin) - **Vladimir Kurchatkin** <vladimir.kurchatkin@gmail.com> +* [vsemozhetbyt](https://github.com/vsemozhetbyt) - +**Vse Mozhet Byt** <vsemozhetbyt@gmail.com> (he/him) +* [watilde](https://github.com/watilde) - +**Daijiro Wachi** <daijiro.wachi@gmail.com> (he/him) * [whitlockjc](https://github.com/whitlockjc) - **Jeremy Whitlock** <jwhitlock@apache.org> * [yorkie](https://github.com/yorkie) - @@ -393,6 +427,11 @@ Previous releases may also have been signed with one of the following GPG keys: * **Timothy J Fontaine** <tjfontaine@gmail.com> `7937DFD2AB06298B2293C3187D33FF9D0246406D` +### Working Groups + +Information on the current Node.js Working Groups can be found in the +[CTC repository](https://github.com/nodejs/CTC/blob/master/WORKING_GROUPS.md). + [npm]: https://www.npmjs.com [Website]: https://nodejs.org/en/ [Contributing to the project]: CONTRIBUTING.md diff --git a/ROADMAP.md b/ROADMAP.md deleted file mode 100644 index da8e47941916cc..00000000000000 --- a/ROADMAP.md +++ /dev/null @@ -1,84 +0,0 @@ -# Node.js Roadmap - -***This is a living document, it describes the policy and priorities as they exist today but can evolve over time.*** - -## Stability Policy - -The most important consideration in every code change is the impact it will have, positive or negative, on the ecosystem (modules and applications). - -Node.js does not remove stdlib JS API. - -Shipping with current and well supported dependencies is the best way to ensure long term stability of the platform. - -Node.js will continue to adopt new V8 releases. -* When V8 ships a breaking change to their C++ API that can be handled by [`nan`](https://github.com/nodejs/nan) -the *minor* version of Node.js will be increased. -* When V8 ships a breaking change to their C++ API that can NOT be handled by [`nan`](https://github.com/nodejs/nan) -the *major* version of Node.js will be increased. -* When new features in the JavaScript language are introduced by V8 the -*minor* version number will be increased. TC39 has stated clearly that no -backwards incompatible changes will be made to the language so it is -appropriate to increase the minor rather than major. - -No new API will be added in *patch* releases. - -Any API addition will cause an increase in the *minor* version. - -## Channels - -Channels are points of collaboration with the broader community and are not strictly scoped to a repository or branch. - -* Release - Stable production ready builds. Unique version numbers following semver. -* Canary - Nightly builds w/ V8 version in Chrome Canary + changes landing to Node.js. No version designation. -* NG - "Next Generation." No version designation. - -## NG (Next Generation) - -In order for Node.js to stay competitive we need to work on the next generation of the platform which will more accurately integrate and reflect the advancements in the language and the ecosystem. - -While this constitutes a great leap forward for the platform we will be making this leap without breaking backwards compatibility with the existing ecosystem of modules. - -## Immediate Priorities - -### Debugging and Tracing - -Debugging is one of the first things from everyone's mouth, both developer and enterprise, when describing trouble they've had with Node.js. - -The goal of Node.js' effort is to build a healthy debugging and tracing ecosystem and not to try and build any "silver bullet" features for core (like the domains debacle). - -The [Tracing WG](https://github.com/nodejs/tracing-wg) is driving this effort: - -* AsyncWrap improvements - basically just iterations based on feedback from people using it. -* async-listener - userland module that will dogfood AsyncWrap as well as provide many often requested debugging features. -* Tracing - * Add tracing support for more platforms (LTTng, etc). - * [Unify the Tracing endpoint](https://github.com/nodejs/node/issues/729). - * New Chrome Debugger - Google is working on a version of Chrome's debugger that is without Chrome and can be used with Node.js. - -### Ecosystem Automation - -In order to maintain a good release cadence without harming compatibility we must do a better job of understanding exactly what impact a particular change or release will have on the ecosystem. This requires new automation. - -The initial goals for this automation are relatively simple but will create a baseline toolchain we can continue to improve upon. - -* Produce a list of modules that no longer build between two release versions. -* Produce a list of modules that use a particular core API. -* Produce detailed code coverage data for the tests in core. - -### Improve Installation and Upgrades - -* Host and maintain registry endpoints (Homebrew, apt, etc). -* Document installation and upgrade procedures with an emphasis on using nvm or nave for development and our registry endpoints for traditional package managers and production. - -### Streams - -* Fix all existing compatibility issues. -* Simplify stream creation to avoid user error. -* Explore and identify compatibility issues with [WHATWG Streams](https://github.com/whatwg/streams). -* Improve stream performance. - -### Internationalization / Localization - -* Build documentation tooling with localization support built in. -* Reduce size of ICU and ship with it by default. -* Continue growth of our i18n community. diff --git a/WORKING_GROUPS.md b/WORKING_GROUPS.md deleted file mode 100644 index 6390946f5d1899..00000000000000 --- a/WORKING_GROUPS.md +++ /dev/null @@ -1,281 +0,0 @@ -# Node.js Core Working Groups - -Node.js Core Working Groups are autonomous projects created by the -[Core Technical Committee (CTC)](https://github.com/nodejs/node/blob/master/GOVERNANCE.md#core-technical-committee). - -Working Groups can be formed at any time but must be ratified by the CTC. -Once formed the work defined in the Working Group charter is the -responsibility of the WG rather than the CTC. - -It is important that Working Groups are not formed pre-maturely. Working -Groups are not formed to *begin* a set of tasks but instead are formed -once that work is already underway and the contributors -think it would benefit from being done as an autonomous project. - -If the work defined in a Working Group's charter is complete, the charter -should be revoked. - -A Working Group's charter can be revoked either by consensus of the Working -Group's members or by a CTC vote. Once revoked, any future work that arises -becomes the responsibility of the CTC. - -## Joining a WG - -To find out how to join a working group, consult the GOVERNANCE.md in -the working group's repository, or in the working group's repository. - -## Starting A Core Working Group - -The process to start a Core Working Group is identical to [creating a -Top Level Working Group](https://github.com/nodejs/TSC/blob/master/WORKING_GROUPS.md#starting-a-wg). - -## Current Working Groups - -* [Website](#website) -* [Streams](#streams) -* [Build](#build) -* [Diagnostics](#diagnostics) -* [i18n](#i18n) -* [Evangelism](#evangelism) -* [Docker](#docker) -* [Addon API](#addon-api) -* [Benchmarking](#benchmarking) -* [Post-mortem](#post-mortem) -* [Intl](#intl) -* [Documentation](#documentation) -* [Testing](#testing) - - -### [Website](https://github.com/nodejs/nodejs.org) - -The website Working Group's purpose is to build and maintain a public -website for the Node.js project. - -Responsibilities include: -* Developing and maintaining a build and automation system for nodejs.org. -* Ensuring the site is regularly updated with changes made to Node.js, like - releases and features. -* Fostering and enabling a community of translators. - -### [Streams](https://github.com/nodejs/readable-stream) - -The Streams Working Group is dedicated to the support and improvement of the -Streams API as used in Node.js and the npm ecosystem. We seek to create a -composable API that solves the problem of representing multiple occurrences -of an event over time in a humane, low-overhead fashion. Improvements to the -API will be driven by the needs of the ecosystem; interoperability and -backwards compatibility with other solutions and prior versions are paramount -in importance. - -Responsibilities include: -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance notice of - changes. - -### [Build](https://github.com/nodejs/build) - -The Build Working Group's purpose is to create and maintain a distributed -automation infrastructure. - -Responsibilities include: -* Producing packages for all target platforms. -* Running tests. -* Running performance testing and comparisons. -* Creating and managing build-containers. - -### [Diagnostics](https://github.com/nodejs/diagnostics) - -The Diagnostics Working Group's purpose is to surface a set of comprehensive, -documented, and extensible diagnostic interfaces for use by Node.js tools and -JavaScript VMs. - -Responsibilities include: -* Collaborating with V8 to integrate `v8_inspector` into Node.js. -* Collaborating with V8 to integrate `trace_event` into Node.js. -* Collaborating with Core to refine `async_wrap` and `async_hooks`. -* Maintaining and improving OS trace system integration (e.g. ETW, LTTNG, dtrace). -* Documenting diagnostic capabilities and APIs in Node.js and its components. -* Exploring opportunities and gaps, discussing feature requests, and addressing - conflicts in Node.js diagnostics. -* Fostering an ecosystem of diagnostics tools for Node.js. - -### i18n - -The i18n Working Groups handle more than just translations. They -are endpoints for community members to collaborate with each -other in their language of choice. - -Each team is organized around a common spoken language. Each -language community might then produce multiple localizations for -various project resources. - -Responsibilities include: -* Translating any Node.js materials they believe are relevant to their - community. -* Reviewing processes for keeping translations up to date and of high quality. -* Managing and monitoring social media channels in their language. -* Promoting Node.js speakers for meetups and conferences in their language. - -Note that the i18n Working Groups are distinct from the [Intl](#Intl) Working Group. - -Each language community maintains its own membership. - -* [nodejs-ar - Arabic (اللغة العربية)](https://github.com/nodejs/nodejs-ar) -* [nodejs-bg - Bulgarian (български език)](https://github.com/nodejs/nodejs-bg) -* [nodejs-bn - Bengali (বাংলা)](https://github.com/nodejs/nodejs-bn) -* [nodejs-zh-CN - Chinese (中文)](https://github.com/nodejs/nodejs-zh-CN) -* [nodejs-cs - Czech (Český Jazyk)](https://github.com/nodejs/nodejs-cs) -* [nodejs-da - Danish (Dansk)](https://github.com/nodejs/nodejs-da) -* [nodejs-de - German (Deutsch)](https://github.com/nodejs/nodejs-de) -* [nodejs-el - Greek (Ελληνικά)](https://github.com/nodejs/nodejs-el) -* [nodejs-es - Spanish (Español)](https://github.com/nodejs/nodejs-es) -* [nodejs-fa - Persian (فارسی)](https://github.com/nodejs/nodejs-fa) -* [nodejs-fi - Finnish (Suomi)](https://github.com/nodejs/nodejs-fi) -* [nodejs-fr - French (Français)](https://github.com/nodejs/nodejs-fr) -* [nodejs-he - Hebrew (עברית)](https://github.com/nodejs/nodejs-he) -* [nodejs-hi - Hindi (फिजी बात)](https://github.com/nodejs/nodejs-hi) -* [nodejs-hu - Hungarian (Magyar)](https://github.com/nodejs/nodejs-hu) -* [nodejs-id - Indonesian (Bahasa Indonesia)](https://github.com/nodejs/nodejs-id) -* [nodejs-it - Italian (Italiano)](https://github.com/nodejs/nodejs-it) -* [nodejs-ja - Japanese (日本語)](https://github.com/nodejs/nodejs-ja) -* [nodejs-ka - Georgian (ქართული)](https://github.com/nodejs/nodejs-ka) -* [nodejs-ko - Korean (조선말)](https://github.com/nodejs/nodejs-ko) -* [nodejs-mk - Macedonian (Mакедонски)](https://github.com/nodejs/nodejs-mk) -* [nodejs-ms - Malay (بهاس ملايو)](https://github.com/nodejs/nodejs-ms) -* [nodejs-nl - Dutch (Nederlands)](https://github.com/nodejs/nodejs-nl) -* [nodejs-no - Norwegian (Norsk)](https://github.com/nodejs/nodejs-no) -* [nodejs-pl - Polish (Język Polski)](https://github.com/nodejs/nodejs-pl) -* [nodejs-pt - Portuguese (Português)](https://github.com/nodejs/nodejs-pt) -* [nodejs-ro - Romanian (Română)](https://github.com/nodejs/nodejs-ro) -* [nodejs-ru - Russian (Русский)](https://github.com/nodejs/nodejs-ru) -* [nodejs-sv - Swedish (Svenska)](https://github.com/nodejs/nodejs-sv) -* [nodejs-ta - Tamil (தமிழ்)](https://github.com/nodejs/nodejs-ta) -* [nodejs-tr - Turkish (Türkçe)](https://github.com/nodejs/nodejs-tr) -* [nodejs-zh-TW - Taiwanese (Hō-ló)](https://github.com/nodejs/nodejs-zh-TW) -* [nodejs-uk - Ukrainian (Українська)](https://github.com/nodejs/nodejs-uk) -* [nodejs-vi - Vietnamese (Tiếng Việtnam)](https://github.com/nodejs/nodejs-vi) - -### [Intl](https://github.com/nodejs/Intl) - -The Intl Working Group is dedicated to support and improvement of -Internationalization (i18n) and Localization (l10n) in Node. - -Responsibilities include: -* Ensuring functionality & compliance (standards: ECMA, Unicode…) -* Supporting Globalization and Internationalization issues that come up - in the tracker -* Communicating guidance and best practices -* Refining the existing `Intl` implementation - -The Intl Working Group is not responsible for translation of content. That is the -responsibility of the specific [i18n](#i18n) group for each language. - -### [Evangelism](https://github.com/nodejs/evangelism) - -The Evangelism Working Group promotes the accomplishments -of Node.js and lets the community know how they can get involved. - -Responsibilities include: -* Facilitating project messaging. -* Managing official project social media. -* Handling the promotion of speakers for meetups and conferences. -* Handling the promotion of community events. -* Publishing regular update summaries and other promotional - content. - -### [Docker](https://github.com/nodejs/docker-iojs) - -The Docker Working Group's purpose is to build, maintain, and improve official -Docker images for the Node.js project. - -Responsibilities include: -* Keeping the official Docker images updated in line with new Node.js releases. -* Decide and implement image improvements and/or fixes. -* Maintain and improve the images' documentation. - -### [Addon API](https://github.com/nodejs/nan) - -The Addon API Working Group is responsible for maintaining the NAN project and -corresponding _nan_ package in npm. The NAN project makes available an -abstraction layer for native add-on authors for Node.js, -assisting in the writing of code that is compatible with many actively used -versions of Node.js, V8 and libuv. - -Responsibilities include: -* Maintaining the [NAN](https://github.com/nodejs/nan) GitHub repository, - including code, issues and documentation. -* Maintaining the [addon-examples](https://github.com/nodejs/node-addon-examples) - GitHub repository, including code, issues and documentation. -* Maintaining the C++ Addon API within the Node.js project, in subordination to - the Node.js CTC. -* Maintaining the Addon documentation within the Node.js project, in - subordination to the Node.js CTC. -* Maintaining the _nan_ package in npm, releasing new versions as appropriate. -* Messaging about the future of the Node.js and NAN interface to give the - community advance notice of changes. - -The current members can be found in their -[README](https://github.com/nodejs/nan#collaborators). - -### [Benchmarking](https://github.com/nodejs/benchmarking) - -The purpose of the Benchmark Working Group is to gain consensus -on an agreed set of benchmarks that can be used to: - -* track and evangelize performance gains made between Node.js releases -* avoid performance regressions between releases - -Responsibilities include: -* Identifying 1 or more benchmarks that reflect customer usage. - Likely will need more than one to cover typical Node.js use cases - including low-latency and high concurrency -* Working to get community consensus on the list chosen -* Adding regular execution of chosen benchmarks to Node.js builds -* Tracking/publicizing performance between builds/releases - -### [Post-mortem](https://github.com/nodejs/post-mortem) - -The Post-mortem Diagnostics Working Group is dedicated to the support -and improvement of postmortem debugging for Node.js. It seeks to -elevate the role of postmortem debugging for Node, to assist in the -development of techniques and tools, and to make techniques and tools -known and available to Node.js users. - -Responsibilities include: -* Defining and adding interfaces/APIs in order to allow dumps - to be generated when needed. -* Defining and adding common structures to the dumps generated - in order to support tools that want to introspect those dumps. - -### [Documentation](https://github.com/nodejs/docs) - -The Documentation Working Group exists to support the improvement of Node.js -documentation, both in the core API documentation, and elsewhere, such as the -Node.js website. Its intent is to work closely with the Evangelism, Website, and -Intl Working Groups to make excellent documentation available and accessible -to all. - -Responsibilities include: -* Defining and maintaining documentation style and content standards. -* Producing documentation in a format acceptable for the Website Working Group - to consume. -* Ensuring that Node's documentation addresses a wide variety of audiences. -* Creating and operating a process for documentation review that produces - quality documentation and avoids impeding the progress of Core work. - -### [Testing](https://github.com/nodejs/testing) - -The Node.js Testing Working Group's purpose is to extend and improve testing of -the Node.js source code. - -Responsibilities include: -* Coordinating an overall strategy for improving testing. -* Documenting guidelines around tests. -* Working with the Build Working Group to improve continuous integration. -* Improving tooling for testing. - diff --git a/benchmark/fs/bench-realpathSync.js b/benchmark/fs/bench-realpathSync.js index ae1c78d30d1b35..bf1a38a746e150 100644 --- a/benchmark/fs/bench-realpathSync.js +++ b/benchmark/fs/bench-realpathSync.js @@ -3,6 +3,8 @@ const common = require('../common'); const fs = require('fs'); const path = require('path'); + +process.chdir(__dirname); const resolved_path = path.resolve(__dirname, '../../lib/'); const relative_path = path.relative(__dirname, '../../lib/'); diff --git a/benchmark/streams/writable-manywrites.js b/benchmark/streams/writable-manywrites.js new file mode 100644 index 00000000000000..fadafe86e4cf70 --- /dev/null +++ b/benchmark/streams/writable-manywrites.js @@ -0,0 +1,23 @@ +'use strict'; + +const common = require('../common'); +const Writable = require('stream').Writable; + +const bench = common.createBenchmark(main, { + n: [2e6] +}); + +function main(conf) { + const n = +conf.n; + const b = Buffer.allocUnsafe(1024); + const s = new Writable(); + s._write = function(chunk, encoding, cb) { + cb(); + }; + + bench.start(); + for (var k = 0; k < n; ++k) { + s.write(b); + } + bench.end(n); +} diff --git a/configure b/configure index 74229bf1120998..c8996b0b4a18d9 100755 --- a/configure +++ b/configure @@ -571,11 +571,11 @@ def get_version_helper(cc, regexp): def get_llvm_version(cc): return get_version_helper( - cc, r"(^clang version|based on LLVM) ([3-9]\.[0-9]+)") + cc, r"(^(?:FreeBSD )?clang version|based on LLVM) ([3-9]\.[0-9]+)") def get_xcode_version(cc): return get_version_helper( - cc, r"(^Apple LLVM version) ([5-9]\.[0-9]+)") + cc, r"(^Apple LLVM version) ([5-9]\.[0-9]+)") def get_gas_version(cc): try: @@ -894,7 +894,6 @@ def configure_node(o): o['variables']['library_files'] = options.linked_module o['variables']['asan'] = int(options.enable_asan or 0) - o['variables']['v8_inspector'] = b(not options.without_inspector) o['variables']['debug_devtools'] = 'node' if options.use_xcode and options.use_ninja: @@ -966,6 +965,15 @@ def configure_openssl(o): if options.without_ssl: + def without_ssl_error(option): + print('Error: --without-ssl is incompatible with %s' % option) + exit(1) + if options.shared_openssl: + without_ssl_error('--shared-openssl') + if options.openssl_no_asm: + without_ssl_error('--openssl-no-asm') + if options.openssl_fips: + without_ssl_error('--openssl-fips') return configure_library('openssl', o) @@ -1034,15 +1042,15 @@ def configure_intl(o): if nodedownload.candownload(auto_downloads, "icu"): nodedownload.retrievefile(url, targetfile) else: - print(' Re-using existing %s' % targetfile) + print('Re-using existing %s' % targetfile) if os.path.isfile(targetfile): - sys.stdout.write(' Checking file integrity with MD5:\r') + print('Checking file integrity with MD5:\r') gotmd5 = nodedownload.md5sum(targetfile) - print(' MD5: %s %s' % (gotmd5, targetfile)) + print('MD5: %s %s' % (gotmd5, targetfile)) if (md5 == gotmd5): return targetfile else: - print(' Expected: %s *MISMATCH*' % md5) + print('Expected: %s *MISMATCH*' % md5) print('\n ** Corrupted ZIP? Delete %s to retry download.\n' % targetfile) return None icu_config = { @@ -1180,7 +1188,7 @@ def configure_intl(o): os.rename(tmp_icu, icu_full_path) shutil.rmtree(icu_tmp_path) else: - print(' Error: --with-icu-source=%s did not result in an "icu" dir.' % \ + print('Error: --with-icu-source=%s did not result in an "icu" dir.' % \ with_icu_source) shutil.rmtree(icu_tmp_path) sys.exit(1) @@ -1196,8 +1204,8 @@ def configure_intl(o): if localzip: nodedownload.unpack(localzip, icu_parent_path) if not os.path.isdir(icu_full_path): - print(' Cannot build Intl without ICU in %s.' % icu_full_path) - print(' (Fix, or disable with "--with-intl=none" )') + print('Cannot build Intl without ICU in %s.' % icu_full_path) + print('(Fix, or disable with "--with-intl=none" )') sys.exit(1) else: print('* Using ICU in %s' % icu_full_path) @@ -1205,7 +1213,7 @@ def configure_intl(o): # uvernum.h contains it as a #define. uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h') if not os.path.isfile(uvernum_h): - print(' Error: could not load %s - is ICU installed?' % uvernum_h) + print('Error: could not load %s - is ICU installed?' % uvernum_h) sys.exit(1) icu_ver_major = None matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*' @@ -1215,7 +1223,7 @@ def configure_intl(o): if m: icu_ver_major = m.group(1) if not icu_ver_major: - print(' Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h) + print('Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h) sys.exit(1) icu_endianness = sys.byteorder[0]; o['variables']['icu_ver_major'] = icu_ver_major @@ -1242,8 +1250,8 @@ def configure_intl(o): # this is the icudt*.dat file which node will be using (platform endianness) o['variables']['icu_data_file'] = icu_data_file if not os.path.isfile(icu_data_path): - print(' Error: ICU prebuilt data file %s does not exist.' % icu_data_path) - print(' See the README.md.') + print('Error: ICU prebuilt data file %s does not exist.' % icu_data_path) + print('See the README.md.') # .. and we're not about to build it from .gyp! sys.exit(1) # map from variable name to subdirs @@ -1268,6 +1276,12 @@ def configure_intl(o): pprint.pformat(icu_config, indent=2) + '\n') return # end of configure_intl +def configure_inspector(o): + disable_inspector = (options.without_inspector or + options.with_intl in (None, 'none') or + options.without_ssl) + o['variables']['v8_inspector'] = b(not disable_inspector) + output = { 'variables': {}, 'include_dirs': [], @@ -1298,6 +1312,7 @@ configure_v8(output) configure_openssl(output) configure_intl(output) configure_static(output) +configure_inspector(output) # variables should be a root level element, # move everything else to target_defaults diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index 21d2a6ae9e2655..b76915cf317de3 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 5 #define V8_MINOR_VERSION 1 #define V8_BUILD_NUMBER 281 -#define V8_PATCH_LEVEL 98 +#define V8_PATCH_LEVEL 101 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 5b4cc7d8241cf0..d0c8317d4bd957 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -6763,7 +6763,11 @@ Local v8::ArrayBuffer::New(Isolate* isolate, size_t byte_length) { ENTER_V8(i_isolate); i::Handle obj = i_isolate->factory()->NewJSArrayBuffer(i::SharedFlag::kNotShared); - i::JSArrayBuffer::SetupAllocatingData(obj, i_isolate, byte_length); + // TODO(jbroman): It may be useful in the future to provide a MaybeLocal + // version that throws an exception or otherwise does not crash. + if (!i::JSArrayBuffer::SetupAllocatingData(obj, i_isolate, byte_length)) { + i::FatalProcessOutOfMemory("v8::ArrayBuffer::New"); + } return Utils::ToLocal(obj); } @@ -6959,8 +6963,12 @@ Local v8::SharedArrayBuffer::New(Isolate* isolate, ENTER_V8(i_isolate); i::Handle obj = i_isolate->factory()->NewJSArrayBuffer(i::SharedFlag::kShared); - i::JSArrayBuffer::SetupAllocatingData(obj, i_isolate, byte_length, true, - i::SharedFlag::kShared); + // TODO(jborman): It may be useful in the future to provide a MaybeLocal + // version that throws an exception or otherwise does not crash. + if (!i::JSArrayBuffer::SetupAllocatingData(obj, i_isolate, byte_length, true, + i::SharedFlag::kShared)) { + i::FatalProcessOutOfMemory("v8::SharedArrayBuffer::New"); + } return Utils::ToLocalShared(obj); } diff --git a/deps/v8/src/ast/scopes.cc b/deps/v8/src/ast/scopes.cc index 5d4b80987607b5..445bdf6b1f043b 100644 --- a/deps/v8/src/ast/scopes.cc +++ b/deps/v8/src/ast/scopes.cc @@ -1083,12 +1083,15 @@ Variable* Scope::LookupRecursive(VariableProxy* proxy, if (var != NULL && proxy->is_assigned()) var->set_maybe_assigned(); *binding_kind = DYNAMIC_LOOKUP; return NULL; - } else if (calls_sloppy_eval() && !is_script_scope() && - name_can_be_shadowed) { + } else if (calls_sloppy_eval() && is_declaration_scope() && + !is_script_scope() && name_can_be_shadowed) { // A variable binding may have been found in an outer scope, but the current // scope makes a sloppy 'eval' call, so the found variable may not be // the correct one (the 'eval' may introduce a binding with the same name). // In that case, change the lookup result to reflect this situation. + // Only scopes that can host var bindings (declaration scopes) need be + // considered here (this excludes block and catch scopes), and variable + // lookups at script scope are always dynamic. if (*binding_kind == BOUND) { *binding_kind = BOUND_EVAL_SHADOWED; } else if (*binding_kind == UNBOUND) { diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-608279.js b/deps/v8/test/mjsunit/regress/regress-crbug-608279.js new file mode 100644 index 00000000000000..22c69f252d0c37 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-608279.js @@ -0,0 +1,18 @@ +// Copyright 2016 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --always-opt --no-lazy + +function __f_38() { + try { + throw 0; + } catch (e) { + eval(); + var __v_38 = { a: 'hest' }; + __v_38.m = function () { return __v_38.a; }; + } + return __v_38; +} +var __v_40 = __f_38(); +__v_40.m(); diff --git a/deps/v8/tools/lldb_commands.py b/deps/v8/tools/lldb_commands.py new file mode 100644 index 00000000000000..d8946ee485a237 --- /dev/null +++ b/deps/v8/tools/lldb_commands.py @@ -0,0 +1,72 @@ +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import lldb +import re + +def jst(debugger, *args): + """Print the current JavaScript stack trace""" + target = debugger.GetSelectedTarget() + process = target.GetProcess() + thread = process.GetSelectedThread() + frame = thread.GetSelectedFrame() + frame.EvaluateExpression("_v8_internal_Print_StackTrace();") + print("") + +def jss(debugger, *args): + """Skip the jitted stack on x64 to where we entered JS last""" + target = debugger.GetSelectedTarget() + process = target.GetProcess() + thread = process.GetSelectedThread() + frame = thread.GetSelectedFrame() + js_entry_sp = frame.EvaluateExpression( + "v8::internal::Isolate::Current()->thread_local_top()->js_entry_sp_;") \ + .GetValue() + sizeof_void = frame.EvaluateExpression("sizeof(void*)").GetValue() + rbp = frame.FindRegister("rbp") + rsp = frame.FindRegister("rsp") + pc = frame.FindRegister("pc") + rbp = js_entry_sp + rsp = js_entry_sp + 2 *sizeof_void + pc.value = js_entry_sp + sizeof_void + +def bta(debugger, *args): + """Print stack trace with assertion scopes""" + func_name_re = re.compile("([^(<]+)(?:\(.+\))?") + assert_re = re.compile( + "^v8::internal::Per\w+AssertType::(\w+)_ASSERT, (false|true)>") + target = debugger.GetSelectedTarget() + process = target.GetProcess() + thread = process.GetSelectedThread() + frame = thread.GetSelectedFrame() + for frame in thread: + functionSignature = frame.GetDisplayFunctionName() + if functionSignature is None: + continue + functionName = func_name_re.match(functionSignature) + line = frame.GetLineEntry().GetLine() + sourceFile = frame.GetLineEntry().GetFileSpec().GetFilename() + if line: + sourceFile = sourceFile + ":" + str(line) + + if sourceFile is None: + sourceFile = "" + print("[%-2s] %-60s %-40s" % (frame.GetFrameID(), + functionName.group(1), + sourceFile)) + match = assert_re.match(str(functionSignature)) + if match: + if match.group(3) == "false": + prefix = "Disallow" + color = "\033[91m" + else: + prefix = "Allow" + color = "\033[92m" + print("%s -> %s %s (%s)\033[0m" % ( + color, prefix, match.group(2), match.group(1))) + +def __lldb_init_module (debugger, dict): + debugger.HandleCommand('command script add -f lldb_commands.jst jst') + debugger.HandleCommand('command script add -f lldb_commands.jss jss') + debugger.HandleCommand('command script add -f lldb_commands.bta bta') diff --git a/deps/v8/tools/lldbinit b/deps/v8/tools/lldbinit new file mode 100644 index 00000000000000..b4567a87bcf7ec --- /dev/null +++ b/deps/v8/tools/lldbinit @@ -0,0 +1,26 @@ +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Print HeapObjects. +command regex -h 'Print a v8 JavaScript object' job 's/(.+)/expr -- '_v8_internal_Print_Object((void*)(%1))/' + +# Print v8::Local handle value. +command regex -h 'Print content of a v8::Local handle' jlh 's/(.+)/expr -- '_v8_internal_Print_Object(*(v8::internal::Object**)(*%1))/' + +# Print Code objects containing given PC. +command regex -h 'Print a v8 Code object from an internal code address' jco 's/(.+)/expr -- '_v8_internal_Print_Code((void*)(*%1))/' + +# Print FeedbackVector +command regex -h 'Print a v8 FeedbackVector object' jfv 's/(.+)/expr -- '_v8_internal_Print_FeedbackVector((void*)(%1))/' + +# Print DescriptorArray. +command regex -h 'Print a v8 DescriptorArray object' jda 's/(.+)/expr -- '_v8_internal_Print_DescriptorArray((void*)(%1))/' + +# Print LayoutDescriptor. +command regex -h 'Print a v8 LayoutDescriptor object' jld 's/(.+)/expr -- '_v8_internal_Print_LayoutDescriptor((void*)(%1))/' + +# Print TransitionArray. +command regex -h 'Print a v8 TransitionArray object' jta 's/(.+)/expr -- '_v8_internal_Print_TransitionArray((void*)(%1))/' + +command script import ~/lldb_commands.py diff --git a/doc/STYLE_GUIDE.md b/doc/STYLE_GUIDE.md index 10f26421a4ceb5..f087718a6754fd 100644 --- a/doc/STYLE_GUIDE.md +++ b/doc/STYLE_GUIDE.md @@ -57,7 +57,7 @@ * When using underscores, asterisks and backticks please use proper escaping (**\\\_**, **\\\*** and **\\\`** instead of **\_**, **\*** and **\`**) * References to constructor functions should use PascalCase * References to constructor instances should be camelCased -* References to methods should be used with parenthesis: `socket.end()` instead of `socket.end` +* References to methods should be used with parentheses: `socket.end()` instead of `socket.end` [plugin]: http://editorconfig.org/#download [Oxford comma]: https://en.wikipedia.org/wiki/Serial_comma diff --git a/doc/api/assert.md b/doc/api/assert.md index 313b77cfbf149e..873f03e77553b2 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -12,22 +12,7 @@ added: v0.5.9 * `value` {any} * `message` {any} -An alias of [`assert.ok()`][] . - -```js -const assert = require('assert'); - -assert(true); -// OK -assert(1); -// OK -assert(false); -// throws "AssertionError: false == true" -assert(0); -// throws "AssertionError: 0 == true" -assert(false, 'it\'s false'); -// throws "AssertionError: it's false" -``` +An alias of [`assert.ok()`][]. ## assert.deepEqual(actual, expected[, message]) -* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a [`TypedArray`] or - [`ArrayBuffer`] -* `byteOffset` {Integer} Where to start copying from `arrayBuffer`. **Default:** `0` -* `length` {Integer} How many bytes to copy from `arrayBuffer`. +* `arrayBuffer` {ArrayBuffer} An [`ArrayBuffer`] or the `.buffer` property of a + [`TypedArray`]. +* `byteOffset` {Integer} Index of first byte to expose. **Default:** `0` +* `length` {Integer} Number of bytes to expose. **Default:** `arrayBuffer.length - byteOffset` -When passed a reference to the `.buffer` property of a [`TypedArray`] instance, -the newly created `Buffer` will share the same allocated memory as the -[`TypedArray`]. +This creates a view of the [`ArrayBuffer`] without copying the underlying +memory. For example, when passed a reference to the `.buffer` property of a +[`TypedArray`] instance, the newly created `Buffer` will share the same +allocated memory as the [`TypedArray`]. Example: @@ -870,6 +872,10 @@ The index operator `[index]` can be used to get and set the octet at position `index` in `buf`. The values refer to individual bytes, so the legal value range is between `0x00` and `0xFF` (hex) or `0` and `255` (decimal). +This operator is inherited from `Uint8Array`, so its behavior on out-of-bounds +access is the same as `UInt8Array` - that is, getting returns `undefined` and +setting does nothing. + Example: Copy an ASCII string into a `Buffer`, one byte at a time ```js diff --git a/doc/api/child_process.md b/doc/api/child_process.md index a5482201091565..74a149455fdd0b 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -193,7 +193,7 @@ the process is spawned. The default options are: } ``` -If `timeout` is greater than `0`, the parent will send the the signal +If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `'SIGTERM'`) if the child runs longer than `timeout` milliseconds. @@ -1067,7 +1067,7 @@ added: v0.1.90 A `Readable Stream` that represents the child process's `stderr`. If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, -then this will be `undefined`. +then this will be `null`. `child.stderr` is an alias for `child.stdio[2]`. Both properties will refer to the same value. @@ -1085,7 +1085,7 @@ A `Writable Stream` that represents the child process's `stdin`. continue until this stream has been closed via `end()`.* If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, -then this will be `undefined`. +then this will be `null`. `child.stdin` is an alias for `child.stdio[0]`. Both properties will refer to the same value. @@ -1140,7 +1140,7 @@ added: v0.1.90 A `Readable Stream` that represents the child process's `stdout`. If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, -then this will be `undefined`. +then this will be `null`. `child.stdout` is an alias for `child.stdio[1]`. Both properties will refer to the same value. diff --git a/doc/api/cli.md b/doc/api/cli.md index 34b2c7aff6adcc..53cfe5a96b4dd6 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -314,7 +314,7 @@ When set, the well known "root" CAs (like VeriSign) will be extended with the extra certificates in `file`. The file should consist of one or more trusted certificates in PEM format. A message will be emitted (once) with [`process.emitWarning()`][emit_warning] if the file is missing or -misformatted, but any errors are otherwise ignored. +malformed, but any errors are otherwise ignored. Note that neither the well known nor extra certificates are used when the `ca` options property is explicitly specified for a TLS or HTTPS client or server. diff --git a/doc/api/documentation.md b/doc/api/documentation.md index 947010d951bdab..5f45c9b56ed387 100644 --- a/doc/api/documentation.md +++ b/doc/api/documentation.md @@ -56,12 +56,6 @@ The API has proven satisfactory. Compatibility with the npm ecosystem is a high priority, and will not be broken unless absolutely necessary. ``` -```txt -Stability: 3 - Locked -Only bug fixes, security fixes, and performance improvements will be accepted. -Please do not suggest API changes in this area; they will be refused. -``` - ## JSON Output > Stability: 1 - Experimental diff --git a/doc/api/errors.md b/doc/api/errors.md index 640935da35e460..e0e0f06ac5900f 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -141,15 +141,15 @@ the first argument will be passed as `null`. const fs = require('fs'); function nodeStyleCallback(err, data) { - if (err) { - console.error('There was an error', err); - return; - } - console.log(data); + if (err) { + console.error('There was an error', err); + return; + } + console.log(data); } fs.readFile('/some/file/that/does-not-exist', nodeStyleCallback); -fs.readFile('/some/file/that/does-exist', nodeStyleCallback) +fs.readFile('/some/file/that/does-exist', nodeStyleCallback); ``` The JavaScript `try / catch` mechanism **cannot** be used to intercept errors @@ -167,15 +167,15 @@ try { throw err; } }); -} catch(err) { +} catch (err) { // This will not catch the throw! - console.log(err); + console.error(err); } ``` This will not work because the callback function passed to `fs.readFile()` is called asynchronously. By the time the callback has been called, the -surrounding code (including the `try { } catch(err) { }` block will have +surrounding code (including the `try { } catch (err) { }` block will have already exited. Throwing an error inside the callback **can crash the Node.js process** in most cases. If [domains][] are enabled, or a handler has been registered with `process.on('uncaughtException')`, such errors can be @@ -217,7 +217,7 @@ a string representing the location in the code at which ```js const myObject = {}; Error.captureStackTrace(myObject); -myObject.stack // similar to `new Error().stack` +myObject.stack; // similar to `new Error().stack` ``` The first line of the trace, instead of being prefixed with `ErrorType: @@ -238,7 +238,7 @@ function MyError() { // Without passing MyError to captureStackTrace, the MyError // frame would show up in the .stack property. By passing // the constructor, we omit that frame and all frames above it. -new MyError().stack +new MyError().stack; ``` ### Error.stackTraceLimit @@ -255,7 +255,7 @@ will affect any stack trace captured *after* the value has been changed. If set to a non-number value, or set to a negative number, stack traces will not capture any frames. -#### error.message +### error.message * {String} @@ -267,11 +267,11 @@ the stack trace of the `Error`, however changing this property after the ```js const err = new Error('The message'); -console.log(err.message); +console.error(err.message); // Prints: The message ``` -#### error.stack +### error.stack * {String} @@ -359,7 +359,7 @@ For example: ```js require('net').connect(-1); - // throws RangeError, port should be > 0 && < 65536 + // throws "RangeError: "port" option should be >= 0 and < 65536: -1" ``` Node.js will generate and throw `RangeError` instances *immediately* as a form @@ -379,19 +379,6 @@ doesNotExist; // throws ReferenceError, doesNotExist is not a variable in this program. ``` -`ReferenceError` instances will have an `error.arguments` property whose value -is an array containing a single element: a string representing the variable -that was not defined. - -```js -const assert = require('assert'); -try { - doesNotExist; -} catch(err) { - assert(err.arguments[0], 'doesNotExist'); -} -``` - Unless an application is dynamically generating and running code, `ReferenceError` instances should always be considered a bug in the code or its dependencies. @@ -407,7 +394,7 @@ program. ```js try { require('vm').runInThisContext('binary ! isNotOk'); -} catch(err) { +} catch (err) { // err will be a SyntaxError } ``` diff --git a/doc/api/events.md b/doc/api/events.md index aefcaec1a55f83..8864e78d9fc4c7 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -98,7 +98,7 @@ listener will be invoked _every time_ the named event is emitted. ```js const myEmitter = new MyEmitter(); -var m = 0; +let m = 0; myEmitter.on('event', () => { console.log(++m); }); @@ -114,7 +114,7 @@ the listener is unregistered and *then* called. ```js const myEmitter = new MyEmitter(); -var m = 0; +let m = 0; myEmitter.once('event', () => { console.log(++m); }); @@ -148,7 +148,7 @@ can be used. (_Note, however, that the `domain` module has been deprecated_) const myEmitter = new MyEmitter(); process.on('uncaughtException', (err) => { - console.log('whoops! there was an error'); + console.error('whoops! there was an error'); }); myEmitter.emit('error', new Error('whoops!')); @@ -160,7 +160,7 @@ As a best practice, listeners should always be added for the `'error'` events. ```js const myEmitter = new MyEmitter(); myEmitter.on('error', (err) => { - console.log('whoops! there was an error'); + console.error('whoops! there was an error'); }); myEmitter.emit('error', new Error('whoops!')); // Prints: whoops! there was an error @@ -491,7 +491,7 @@ Removes the specified `listener` from the listener array for the event named `eventName`. ```js -var callback = (stream) => { +const callback = (stream) => { console.log('someone connected!'); }; server.on('connection', callback); @@ -513,12 +513,12 @@ events will behave as expected. ```js const myEmitter = new MyEmitter(); -var callbackA = () => { +const callbackA = () => { console.log('A'); myEmitter.removeListener('event', callbackB); }; -var callbackB = () => { +const callbackB = () => { console.log('B'); }; diff --git a/doc/api/fs.md b/doc/api/fs.md index 5471c5f5e646f5..36a653386d230e 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -218,7 +218,7 @@ For a regular file [`util.inspect(stats)`][] would return a string very similar to this: ```js -{ +Stats { dev: 2114, ino: 48064969, mode: 33188, @@ -232,8 +232,7 @@ similar to this: atime: Mon, 10 Oct 2011 23:24:11 GMT, mtime: Mon, 10 Oct 2011 23:24:11 GMT, ctime: Mon, 10 Oct 2011 23:24:11 GMT, - birthtime: Mon, 10 Oct 2011 23:24:11 GMT -} + birthtime: Mon, 10 Oct 2011 23:24:11 GMT } ``` Please note that `atime`, `mtime`, `birthtime`, and `ctime` are @@ -377,12 +376,12 @@ fs.access('myfile', (err) => { ```js fs.open('myfile', 'wx', (err, fd) => { if (err) { - if (err.code === "EEXIST") { + if (err.code === 'EEXIST') { console.error('myfile already exists'); return; - } else { - throw err; } + + throw err; } writeMyData(fd); @@ -394,12 +393,12 @@ fs.open('myfile', 'wx', (err, fd) => { ```js fs.access('myfile', (err) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } + + throw err; } fs.open('myfile', 'r', (err, fd) => { @@ -414,12 +413,12 @@ fs.access('myfile', (err) => { ```js fs.open('myfile', 'r', (err, fd) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } + + throw err; } readMyData(fd); @@ -729,13 +728,14 @@ fs.exists('myfile', (exists) => { ```js fs.open('myfile', 'wx', (err, fd) => { if (err) { - if (err.code === "EEXIST") { + if (err.code === 'EEXIST') { console.error('myfile already exists'); return; - } else { - throw err; } + + throw err; } + writeMyData(fd); }); ``` @@ -759,15 +759,15 @@ fs.exists('myfile', (exists) => { ```js fs.open('myfile', 'r', (err, fd) => { if (err) { - if (err.code === "ENOENT") { + if (err.code === 'ENOENT') { console.error('myfile does not exist'); return; - } else { - throw err; } - } else { - readMyData(fd); + + throw err; } + + readMyData(fd); }); ``` @@ -945,7 +945,7 @@ const fd = fs.openSync('temp.txt', 'r+'); // truncate the file to 10 bytes, whereas the actual size is 7 bytes fs.ftruncate(fd, 10, (err) => { - assert.ifError(!err); + assert.ifError(err); console.log(fs.readFileSync('temp.txt')); }); // Prints: @@ -1154,8 +1154,8 @@ fs.mkdtemp(tmpDir, (err, folder) => { }); // This method is *CORRECT*: -const path = require('path'); -fs.mkdtemp(tmpDir + path.sep, (err, folder) => { +const { sep } = require('path'); +fs.mkdtemp(`${tmpDir}${sep}`, (err, folder) => { if (err) throw err; console.log(folder); // Will print something similar to `/tmp/abc123`. @@ -1564,7 +1564,7 @@ argument will automatically be normalized to absolute path. Here is an example below: ```js -fs.symlink('./foo', './new-port'); +fs.symlink('./foo', './new-port', callback); ``` It creates a symbolic link named "new-port" that points to "foo". @@ -1910,7 +1910,7 @@ Example: ```js fs.writeFile('message.txt', 'Hello Node.js', (err) => { if (err) throw err; - console.log('It\'s saved!'); + console.log('The file has been saved!'); }); ``` diff --git a/doc/api/https.md b/doc/api/https.md index 355fd7b133a76d..12ebef2bf8ce4b 100644 --- a/doc/api/https.md +++ b/doc/api/https.md @@ -69,7 +69,8 @@ const https = require('https'); const fs = require('fs'); const options = { - pfx: fs.readFileSync('server.pfx') + pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + passphrase: 'sample' }; https.createServer(options, (req, res) => { @@ -143,14 +144,14 @@ Example: ```js const https = require('https'); -var options = { +const options = { hostname: 'encrypted.google.com', port: 443, path: '/', method: 'GET' }; -var req = https.request(options, (res) => { +const req = https.request(options, (res) => { console.log('statusCode:', res.statusCode); console.log('headers:', res.headers); @@ -218,7 +219,7 @@ In order to specify these options, use a custom [`Agent`][]. Example: ```js -var options = { +const options = { hostname: 'encrypted.google.com', port: 443, path: '/', @@ -228,8 +229,8 @@ var options = { }; options.agent = new https.Agent(options); -var req = https.request(options, (res) => { - ... +const req = https.request(options, (res) => { + // ... }); ``` @@ -238,7 +239,7 @@ Alternatively, opt out of connection pooling by not using an `Agent`. Example: ```js -var options = { +const options = { hostname: 'encrypted.google.com', port: 443, path: '/', @@ -248,8 +249,8 @@ var options = { agent: false }; -var req = https.request(options, (res) => { - ... +const req = https.request(options, (res) => { + // ... }); ``` diff --git a/doc/api/modules.md b/doc/api/modules.md index 43bddab4faa310..cdbbf14fa43310 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -1,6 +1,6 @@ # Modules -> Stability: 3 - Locked +> Stability: 2 - Stable @@ -20,7 +20,7 @@ directory as `foo.js`. Here are the contents of `circle.js`: ```js -const PI = Math.PI; +const { PI } = Math; exports.area = (r) => PI * r * r; @@ -44,7 +44,7 @@ Below, `bar.js` makes use of the `square` module, which exports a constructor: ```js const square = require('./square.js'); -var mySquare = square(2); +const mySquare = square(2); console.log(`The area of my square is ${mySquare.area()}`); ``` @@ -56,10 +56,10 @@ module.exports = (width) => { return { area: () => width * width }; -} +}; ``` -The module system is implemented in the `require("module")` module. +The module system is implemented in the `require('module')` module. ## Accessing the main module @@ -142,18 +142,20 @@ To get the exact filename that will be loaded when `require()` is called, use the `require.resolve()` function. Putting together all of the above, here is the high-level algorithm -in pseudocode of what require.resolve does: +in pseudocode of what `require.resolve()` does: ```txt require(X) from module at path Y 1. If X is a core module, a. return the core module b. STOP -2. If X begins with './' or '/' or '../' +2. If X begins with '/' + a. set Y to be the filesystem root +3. If X begins with './' or '/' or '../' a. LOAD_AS_FILE(Y + X) b. LOAD_AS_DIRECTORY(Y + X) -3. LOAD_NODE_MODULES(X, dirname(Y)) -4. THROW "not found" +4. LOAD_NODE_MODULES(X, dirname(Y)) +5. THROW "not found" LOAD_AS_FILE(X) 1. If X is a file, load X as JavaScript text. STOP @@ -161,14 +163,18 @@ LOAD_AS_FILE(X) 3. If X.json is a file, parse X.json to a JavaScript Object. STOP 4. If X.node is a file, load X.node as binary addon. STOP +LOAD_INDEX(X) +1. If X/index.js is a file, load X/index.js as JavaScript text. STOP +2. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP +3. If X/index.node is a file, load X/index.node as binary addon. STOP + LOAD_AS_DIRECTORY(X) 1. If X/package.json is a file, a. Parse X/package.json, and look for "main" field. b. let M = X + (json main field) c. LOAD_AS_FILE(M) -2. If X/index.js is a file, load X/index.js as JavaScript text. STOP -3. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP -4. If X/index.node is a file, load X/index.node as binary addon. STOP + d. LOAD_INDEX(M) +2. LOAD_INDEX(X) LOAD_NODE_MODULES(X, START) 1. let DIRS=NODE_MODULES_PATHS(START) @@ -559,16 +565,16 @@ To illustrate the behavior, imagine this hypothetical implementation of `require()`, which is quite similar to what is actually done by `require()`: ```js -function require(...) { - var module = { exports: {} }; +function require(/* ... */) { + const module = { exports: {} }; ((module, exports) => { // Your module code here. In this example, define a function. - function some_func() {}; - exports = some_func; + function someFunc() {} + exports = someFunc; // At this point, exports is no longer a shortcut to module.exports, and // this module will still export an empty default object. - module.exports = some_func; - // At this point, the module will now export some_func, instead of the + module.exports = someFunc; + // At this point, the module will now export someFunc, instead of the // default object. })(module, module.exports); return module.exports; diff --git a/doc/api/net.md b/doc/api/net.md index cd7532c409878b..d7219efe5b3548 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -66,7 +66,7 @@ Returns an object with `port`, `family`, and `address` properties: Example: ```js -var server = net.createServer((socket) => { +const server = net.createServer((socket) => { socket.end('goodbye\n'); }).on('error', (err) => { // handle errors here @@ -211,7 +211,7 @@ double-backslashes, such as: ```js net.createServer().listen( - path.join('\\\\?\\pipe', process.cwd(), 'myctl')) + path.join('\\\\?\\pipe', process.cwd(), 'myctl')); ``` The parameter `backlog` behaves the same as in @@ -334,7 +334,7 @@ Construct a new socket object. `fd` allows you to specify the existing file descriptor of socket. Set `readable` and/or `writable` to `true` to allow reads and/or writes on this socket (NOTE: Works only when `fd` is passed). -About `allowHalfOpen`, refer to `createServer()` and `'end'` event. +About `allowHalfOpen`, refer to [`net.createServer()`][] and [`'end'`][] event. `net.Socket` instances are [`EventEmitter`][] with the following events: @@ -774,7 +774,8 @@ Passing `timeout` as an option will call [`socket.setTimeout()`][] after the soc The `connectListener` parameter will be added as a listener for the [`'connect'`][] event once. -Here is an example of a client of the previously described echo server: +Following is an example of a client of the echo server described +in the [`net.createServer()`][] section: ```js const net = require('net'); @@ -874,8 +875,8 @@ server.listen(8124, () => { Test this by using `telnet`: -```sh -telnet localhost 8124 +```console +$ telnet localhost 8124 ``` To listen on the socket `/tmp/echo.sock` the third line from the last would @@ -889,8 +890,8 @@ server.listen('/tmp/echo.sock', () => { Use `nc` to connect to a UNIX domain socket server: -```js -nc -U /tmp/echo.sock +```console +$ nc -U /tmp/echo.sock ``` ## net.isIP(input) @@ -933,6 +934,7 @@ Returns true if input is a version 6 IP address, otherwise returns false. [`dns.lookup()` hints]: dns.html#dns_supported_getaddrinfo_flags [`end()`]: #net_socket_end_data_encoding [`EventEmitter`]: events.html#events_class_eventemitter +[`net.createServer()`]: #net_net_createserver_options_connectionlistener [`net.Socket`]: #net_class_net_socket [`pause()`]: #net_socket_pause [`resume()`]: #net_socket_resume diff --git a/doc/api/os.md b/doc/api/os.md index bfe20c1bef5c7f..15b84189c91272 100644 --- a/doc/api/os.md +++ b/doc/api/os.md @@ -26,6 +26,8 @@ A string constant defining the operating system-specific end-of-line marker: added: v0.5.0 --> +* Returns: {String} + The `os.arch()` method returns a string identifying the operating system CPU architecture *for which the Node.js binary was compiled*. @@ -36,6 +38,9 @@ The current possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'mips'`, Equivalent to [`process.arch`][]. ## os.constants + * {Object} diff --git a/doc/api/process.md b/doc/api/process.md index e1beeac4cc6526..bd828634a5b143 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -290,7 +290,7 @@ too many listeners have been added to an event ```txt $ node -> event.defaultMaxListeners = 1; +> events.defaultMaxListeners = 1; > process.on('foo', () => {}); > process.on('foo', () => {}); > (node:38638) Warning: Possible EventEmitter memory leak detected. 2 foo @@ -303,7 +303,7 @@ adds a custom handler to the `'warning'` event: ```txt $ node --no-warnings > var p = process.on('warning', (warning) => console.warn('Do not do that!')); -> event.defaultMaxListeners = 1; +> events.defaultMaxListeners = 1; > process.on('foo', () => {}); > process.on('foo', () => {}); > Do not do that! @@ -1257,9 +1257,12 @@ function maybeSync(arg, cb) { This API is hazardous because in the following case: ```js -maybeSync(true, () => { +const maybeTrue = Math.random() > 0.5; + +maybeSync(maybeTrue, () => { foo(); }); + bar(); ``` @@ -1512,8 +1515,10 @@ Android) * {Stream} -The `process.stderr` property returns a [Writable][] stream connected to -`stderr` (fd `2`). +The `process.stderr` property returns a stream connected to +`stderr` (fd `2`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `2` refers to a file, in which case it is +a [Writable][] stream. Note: `process.stderr` differs from other Node.js streams in important ways, see [note on process I/O][] for more information. @@ -1522,8 +1527,10 @@ see [note on process I/O][] for more information. * {Stream} -The `process.stdin` property returns a [Readable][] stream equivalent to or -associated with `stdin` (fd `0`). +The `process.stdin` property returns a stream connected to +`stdin` (fd `0`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `0` refers to a file, in which case it is +a [Readable][] stream. For example: @@ -1542,7 +1549,7 @@ process.stdin.on('end', () => { }); ``` -As a [Readable][] stream, `process.stdin` can also be used in "old" mode that +As a [Duplex][] stream, `process.stdin` can also be used in "old" mode that is compatible with scripts written for Node.js prior to v0.10. For more information see [Stream compatibility][]. @@ -1554,8 +1561,10 @@ must call `process.stdin.resume()` to read from it. Note also that calling * {Stream} -The `process.stdout` property returns a [Writable][] stream connected to -`stdout` (fd `2`). +The `process.stdout` property returns a stream connected to +`stdout` (fd `1`). It is a [`net.Socket`][] (which is a [Duplex][] +stream) unless fd `1` refers to a file, in which case it is +a [Writable][] stream. For example, to copy process.stdin to process.stdout: @@ -1771,6 +1780,8 @@ cases: [`Error`]: errors.html#errors_class_error [`EventEmitter`]: events.html#events_class_eventemitter [`JSON.stringify()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify +[`console.error()`]: console.html#console_console_error_data_args +[`console.log()`]: console.html#console_console_log_data_args [`net.Server`]: net.html#net_class_net_server [`net.Socket`]: net.html#net_class_net_socket [`process.argv`]: #process_process_argv @@ -1788,6 +1799,7 @@ cases: [TTY]: tty.html#tty_tty [Writable]: stream.html#stream_writable_streams [Readable]: stream.html#stream_readable_streams +[Duplex]: stream.html#stream_duplex_and_transform_streams [Child Process]: child_process.html [Cluster]: cluster.html [`process.exitCode`]: #process_process_exitcode diff --git a/doc/api/stream.md b/doc/api/stream.md index d9a2f47db4271f..6638a8f6533acd 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -21,7 +21,7 @@ const stream = require('stream'); While it is important for all Node.js users to understand how streams work, the `stream` module itself is most useful for developers that are creating new -types of stream instances. Developer's who are primarily *consuming* stream +types of stream instances. Developers who are primarily *consuming* stream objects will rarely (if ever) have need to use the `stream` module directly. ## Organization of this Document @@ -281,7 +281,7 @@ has been called, and all data has been flushed to the underlying system. ```js const writer = getWritableStreamSomehow(); for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); + writer.write(`hello, #${i}!\n`); } writer.end('This is the end\n'); writer.on('finish', () => { @@ -471,7 +471,7 @@ If the data to be written can be generated or fetched on demand, it is recommended to encapsulate the logic into a [Readable][] and use [`stream.pipe()`][]. However, if calling `write()` is preferred, it is possible to respect backpressure and avoid memory issues using the -the [`'drain'`][] event: +[`'drain'`][] event: ```js function write (data, cb) { @@ -547,7 +547,7 @@ that the stream will *remain* paused once those destinations drain and ask for more data. *Note*: If a [Readable][] is switched into flowing mode and there are no -consumers available handle the data, that data will be lost. This can occur, +consumers available to handle the data, that data will be lost. This can occur, for instance, when the `readable.resume()` method is called without a listener attached to the `'data'` event, or when a `'data'` event handler is removed from the stream. @@ -728,7 +728,7 @@ end preferred over the use of the `'readable'` event. ##### readable.isPaused() - @@ -1007,7 +1007,7 @@ function parseHeader(stream, callback) { const remaining = split.join('\n\n'); const buf = Buffer.from(remaining, 'utf8'); stream.removeListener('error', callback); - // set the readable listener before unshifting + // remove the readable listener before unshifting stream.removeListener('readable', onReadable); if (buf.length) stream.unshift(buf); diff --git a/doc/api/timers.md b/doc/api/timers.md index 75d6a36737b7d9..df48905001e19b 100644 --- a/doc/api/timers.md +++ b/doc/api/timers.md @@ -1,6 +1,6 @@ # Timers -> Stability: 3 - Locked +> Stability: 2 - Stable The `timer` module exposes a global API for scheduling functions to be called at some future period of time. Because the timer functions are @@ -163,7 +163,7 @@ added: v0.0.1 Cancels a `Timeout` object created by [`setTimeout()`][]. -[the Node.js Event Loop]: https://github.com/nodejs/node/blob/master/doc/topics/event-loop-timers-and-nexttick.md +[the Node.js Event Loop]: https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick [`TypeError`]: errors.html#errors_class_typeerror [`clearImmediate()`]: timers.html#timers_clearimmediate_immediate [`clearInterval()`]: timers.html#timers_clearinterval_timeout diff --git a/doc/api/tls.md b/doc/api/tls.md index 3c8c31a8f11e22..0e47ddac0391be 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -483,12 +483,8 @@ added: v0.11.4 will be emitted on the socket before establishing a secure communication * `secureContext`: Optional TLS context object created with [`tls.createSecureContext()`][]. If a `secureContext` is _not_ provided, one - will be created by passing the entire `options` object to - `tls.createSecureContext()`. *Note*: In effect, all - [`tls.createSecureContext()`][] options can be provided, but they will be - _completely ignored_ unless the `secureContext` option is missing. - * ...: Optional [`tls.createSecureContext()`][] options can be provided, see - the `secureContext` option for more information. + will be created by calling [`tls.createSecureContext()`][] with no options. + Construct a new `tls.TLSSocket` object from an existing TCP socket. ### Event: 'OCSPResponse' @@ -587,13 +583,16 @@ For Example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }` added: v0.11.4 --> -* `detailed` {boolean} Specify `true` to request that the full certificate - chain with the `issuer` property be returned; `false` to return only the - top certificate without the `issuer` property. +* `detailed` {boolean} Include the full certificate chain if `true`, otherwise + include just the peer's certificate. Returns an object representing the peer's certificate. The returned object has some properties corresponding to the fields of the certificate. +If the full certificate chain was requested, each certificate will include a +`issuerCertificate` property containing an object representing its issuer's +certificate. + For example: ```text @@ -604,15 +603,15 @@ For example: O: 'node.js', OU: 'Test TLS Certificate', CN: 'localhost' }, - issuerInfo: + issuer: { C: 'UK', ST: 'Acknack Ltd', L: 'Rhys Jones', O: 'node.js', OU: 'Test TLS Certificate', CN: 'localhost' }, - issuer: - { ... another certificate ... }, + issuerCertificate: + { ... another certificate, possibly with a .issuerCertificate ... }, raw: < RAW DER buffer >, valid_from: 'Nov 11 09:52:22 2009 GMT', valid_to: 'Nov 6 09:52:22 2029 GMT', @@ -620,8 +619,7 @@ For example: serialNumber: 'B9B0D332A1AA5635' } ``` -If the peer does not provide a certificate, `null` or an empty object will be -returned. +If the peer does not provide a certificate, an empty object will be returned. ### tlsSocket.getProtocol()