diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index a00875881ab59c..a79f7ab22af7e2 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -174,6 +174,7 @@
/.github/workflows/tools.yml @nodejs/security-wg
/.github/workflows/update-openssl.yml @nodejs/security-wg
/.github/workflows/update-v8.yml @nodejs/security-wg @nodejs/v8-update
+/deps @nodejs/security-wg
/tools/dep_updaters/* @nodejs/security-wg
# Web Standards
diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml
index a0c0004af7e293..392b900565504c 100644
--- a/.github/workflows/build-tarball.yml
+++ b/.github/workflows/build-tarball.yml
@@ -90,8 +90,8 @@ jobs:
- name: Build
run: |
cd $TAR_DIR
- make build-ci -j2 V=1
+ make build-ci -j4 V=1
- name: Test
run: |
cd $TAR_DIR
- make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9"
+ make run-ci -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9"
diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml
index 10677ecaffccbf..012564d7ab14f9 100644
--- a/.github/workflows/coverage-linux-without-intl.yml
+++ b/.github/workflows/coverage-linux-without-intl.yml
@@ -53,11 +53,11 @@ jobs:
- name: Install gcovr
run: pip install gcovr==4.2
- name: Build
- run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn --coverage --without-intl"
+ run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn --coverage --without-intl"
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
# The cause is most likely coverage's use of the inspector.
- name: Test
- run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
+ run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
- name: Report JS
run: npx c8 report --check-coverage
env:
diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml
index 0895e029f714c0..83b973c1f752b6 100644
--- a/.github/workflows/coverage-linux.yml
+++ b/.github/workflows/coverage-linux.yml
@@ -53,11 +53,11 @@ jobs:
- name: Install gcovr
run: pip install gcovr==4.2
- name: Build
- run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn --coverage"
+ run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn --coverage"
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
# The cause is most likely coverage's use of the inspector.
- name: Test
- run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
+ run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
- name: Report JS
run: npx c8 report --check-coverage
env:
diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml
index fd50b61ffef222..c97d55d4e8bef8 100644
--- a/.github/workflows/test-asan.yml
+++ b/.github/workflows/test-asan.yml
@@ -57,6 +57,6 @@ jobs:
- name: Environment Information
run: npx envinfo
- name: Build
- run: make build-ci -j2 V=1
+ run: make build-ci -j4 V=1
- name: Test
- run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
+ run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml
index 7c7ebecad8367b..2c2e8060d9847f 100644
--- a/.github/workflows/test-internet.yml
+++ b/.github/workflows/test-internet.yml
@@ -50,6 +50,6 @@ jobs:
- name: Environment Information
run: npx envinfo
- name: Build
- run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
+ run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test Internet
- run: make test-internet -j2 V=1;
+ run: make test-internet -j4 V=1;
diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml
index 96ed6797d81141..3dd559cff24da0 100644
--- a/.github/workflows/test-linux.yml
+++ b/.github/workflows/test-linux.yml
@@ -44,6 +44,6 @@ jobs:
- name: Environment Information
run: npx envinfo
- name: Build
- run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
+ run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test
- run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
+ run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 51e38889abb31d..83e31527eb9d99 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -36,7 +36,9 @@ release.
-21.5.0
+21.6.1
+21.6.0
+21.5.0 21.4.0 21.3.0 21.2.0
diff --git a/README.md b/README.md
index 2325722e35ae12..a5d6d344910e69 100644
--- a/README.md
+++ b/README.md
@@ -190,8 +190,6 @@ For information about the governance of the Node.js project, see
**Moshe Atlow** <> (he/him)
* [RafaelGSS](https://github.com/RafaelGSS) -
**Rafael Gonzaga** <> (he/him)
-* [RaisinTen](https://github.com/RaisinTen) -
- **Darshan Sen** <> (he/him)
* [richardlau](https://github.com/richardlau) -
**Richard Lau** <>
* [ronag](https://github.com/ronag) -
@@ -266,6 +264,8 @@ For information about the governance of the Node.js project, see
**Alexis Campailla** <>
* [piscisaureus](https://github.com/piscisaureus) -
**Bert Belder** <>
+* [RaisinTen](https://github.com/RaisinTen) -
+ **Darshan Sen** <> (he/him)
* [sam-github](https://github.com/sam-github) -
**Sam Roberts** <>
* [shigeki](https://github.com/shigeki) -
@@ -389,6 +389,8 @@ For information about the governance of the Node.js project, see
**Keyhan Vakil** <>
* [legendecas](https://github.com/legendecas) -
**Chengzhong Wu** <> (he/him)
+* [lemire](https://github.com/lemire) -
+ **Daniel Lemire** <>
* [linkgoron](https://github.com/linkgoron) -
**Nitzan Uziely** <>
* [LiviaMedeiros](https://github.com/LiviaMedeiros) -
@@ -425,16 +427,12 @@ For information about the governance of the Node.js project, see
**Myles Borins** <> (he/him)
* [ovflowd](https://github.com/ovflowd) -
**Claudio Wunder** <> (he/they)
-* [oyyd](https://github.com/oyyd) -
- **Ouyang Yadong** <> (he/him)
* [panva](https://github.com/panva) -
**Filip Skokan** <> (he/him)
* [Qard](https://github.com/Qard) -
**Stephen Belanger** <> (he/him)
* [RafaelGSS](https://github.com/RafaelGSS) -
**Rafael Gonzaga** <> (he/him)
-* [RaisinTen](https://github.com/RaisinTen) -
- **Darshan Sen** <> (he/him)
* [rluvaton](https://github.com/rluvaton) -
**Raz Luvaton** <> (he/him)
* [richardlau](https://github.com/richardlau) -
@@ -623,6 +621,8 @@ For information about the governance of the Node.js project, see
**Alexis Campailla** <>
* [othiym23](https://github.com/othiym23) -
**Forrest L Norvell** <> (they/them/themself)
+* [oyyd](https://github.com/oyyd) -
+ **Ouyang Yadong** <> (he/him)
* [petkaantonov](https://github.com/petkaantonov) -
**Petka Antonov** <>
* [phillipj](https://github.com/phillipj) -
@@ -639,6 +639,8 @@ For information about the governance of the Node.js project, see
**Peter Marshall** <> (he/him)
* [puzpuzpuz](https://github.com/puzpuzpuz) -
**Andrey Pechkurov** <> (he/him)
+* [RaisinTen](https://github.com/RaisinTen) -
+ **Darshan Sen** <> (he/him)
* [refack](https://github.com/refack) -
**Refael Ackermann (רפאל פלחי)** <> (he/him/הוא/אתה)
* [rexagod](https://github.com/rexagod) -
@@ -737,8 +739,6 @@ maintaining the Node.js project.
**Preveen Padmanabhan** <> (he/him)
* [PoojaDurgad](https://github.com/PoojaDurgad) -
**Pooja Durgad** <>
-* [RaisinTen](https://github.com/RaisinTen) -
- **Darshan Sen** <>
* [VoltrexKeyva](https://github.com/VoltrexKeyva) -
**Mohammed Keyvanzadeh** <> (he/him)
diff --git a/benchmark/url/url-searchparams-append.js b/benchmark/url/url-searchparams-append.js
new file mode 100644
index 00000000000000..cd8099b517c6f7
--- /dev/null
+++ b/benchmark/url/url-searchparams-append.js
@@ -0,0 +1,19 @@
+'use strict';
+const common = require('../common.js');
+
+const bench = common.createBenchmark(main, {
+ type: ['URL', 'URLSearchParams'],
+ n: [1e3, 1e6],
+});
+
+function main({ type, n }) {
+ const params = type === 'URL' ?
+ new URL('https://nodejs.org').searchParams :
+ new URLSearchParams();
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ params.append('test', i);
+ }
+ bench.end(n);
+}
diff --git a/benchmark/url/url-searchparams-update.js b/benchmark/url/url-searchparams-update.js
new file mode 100644
index 00000000000000..082d476a5d2250
--- /dev/null
+++ b/benchmark/url/url-searchparams-update.js
@@ -0,0 +1,29 @@
+'use strict';
+const common = require('../common.js');
+const assert = require('assert');
+
+const bench = common.createBenchmark(main, {
+ searchParams: ['true', 'false'],
+ property: ['pathname', 'search', 'hash'],
+ n: [1e6],
+});
+
+function getMethod(url, property) {
+ if (property === 'pathname') return (x) => url.pathname = `/${x}`;
+ if (property === 'search') return (x) => url.search = `?${x}`;
+ if (property === 'hash') return (x) => url.hash = `#${x}`;
+ throw new Error(`Unsupported property "${property}"`);
+}
+
+function main({ searchParams, property, n }) {
+ const url = new URL('https://nodejs.org');
+ if (searchParams === 'true') assert(url.searchParams);
+
+ const method = getMethod(url, property);
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ method(i);
+ }
+ bench.end(n);
+}
diff --git a/deps/acorn/acorn-walk/dist/walk.d.mts b/deps/acorn/acorn-walk/dist/walk.d.mts
index 7bc8c9790fd1a7..e07a6afaf8e336 100644
--- a/deps/acorn/acorn-walk/dist/walk.d.mts
+++ b/deps/acorn/acorn-walk/dist/walk.d.mts
@@ -66,10 +66,10 @@ export function simple(
/**
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
- * @param node
- * @param visitors
- * @param base
- * @param state
+ * @param node
+ * @param visitors
+ * @param base
+ * @param state
*/
export function ancestor(
node: acorn.Node,
@@ -79,8 +79,8 @@ export function ancestor(
): void
/**
- * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
- * @param node
+ * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
+ * @param node
* @param state the start state
* @param functions contain an object that maps node types to walker functions
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
@@ -94,10 +94,10 @@ export function recursive(
/**
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
- * @param node
- * @param callback
- * @param base
- * @param state
+ * @param node
+ * @param callback
+ * @param base
+ * @param state
*/
export function full(
node: acorn.Node,
@@ -108,10 +108,10 @@ export function full(
/**
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
- * @param node
- * @param callback
- * @param base
- * @param state
+ * @param node
+ * @param callback
+ * @param base
+ * @param state
*/
export function fullAncestor(
node: acorn.Node,
@@ -122,8 +122,8 @@ export function fullAncestor(
/**
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
- * @param functions
- * @param base
+ * @param functions
+ * @param base
*/
export function make(
functions: RecursiveVisitors,
@@ -132,12 +132,12 @@ export function make(
/**
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
- * @param node
- * @param start
- * @param end
- * @param type
- * @param base
- * @param state
+ * @param node
+ * @param start
+ * @param end
+ * @param type
+ * @param base
+ * @param state
*/
export function findNodeAt(
node: acorn.Node,
@@ -150,11 +150,11 @@ export function findNodeAt(
/**
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
- * @param node
- * @param start
- * @param type
- * @param base
- * @param state
+ * @param node
+ * @param start
+ * @param type
+ * @param base
+ * @param state
*/
export function findNodeAround(
node: acorn.Node,
@@ -165,8 +165,13 @@ export function findNodeAround(
): Found | undefined
/**
- * similar to {@link findNodeAround}, but will match all nodes after the given position (testing outer nodes before inner nodes).
+ * Find the outermost matching node after a given position.
*/
export const findNodeAfter: typeof findNodeAround
+/**
+ * Find the outermost matching node before a given position.
+ */
+export const findNodeBefore: typeof findNodeAround
+
export const base: RecursiveVisitors
diff --git a/deps/acorn/acorn-walk/dist/walk.d.ts b/deps/acorn/acorn-walk/dist/walk.d.ts
index 7bc8c9790fd1a7..e07a6afaf8e336 100644
--- a/deps/acorn/acorn-walk/dist/walk.d.ts
+++ b/deps/acorn/acorn-walk/dist/walk.d.ts
@@ -66,10 +66,10 @@ export function simple(
/**
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
- * @param node
- * @param visitors
- * @param base
- * @param state
+ * @param node
+ * @param visitors
+ * @param base
+ * @param state
*/
export function ancestor(
node: acorn.Node,
@@ -79,8 +79,8 @@ export function ancestor(
): void
/**
- * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
- * @param node
+ * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
+ * @param node
* @param state the start state
* @param functions contain an object that maps node types to walker functions
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
@@ -94,10 +94,10 @@ export function recursive(
/**
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
- * @param node
- * @param callback
- * @param base
- * @param state
+ * @param node
+ * @param callback
+ * @param base
+ * @param state
*/
export function full(
node: acorn.Node,
@@ -108,10 +108,10 @@ export function full(
/**
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
- * @param node
- * @param callback
- * @param base
- * @param state
+ * @param node
+ * @param callback
+ * @param base
+ * @param state
*/
export function fullAncestor(
node: acorn.Node,
@@ -122,8 +122,8 @@ export function fullAncestor(
/**
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
- * @param functions
- * @param base
+ * @param functions
+ * @param base
*/
export function make(
functions: RecursiveVisitors,
@@ -132,12 +132,12 @@ export function make(
/**
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
- * @param node
- * @param start
- * @param end
- * @param type
- * @param base
- * @param state
+ * @param node
+ * @param start
+ * @param end
+ * @param type
+ * @param base
+ * @param state
*/
export function findNodeAt(
node: acorn.Node,
@@ -150,11 +150,11 @@ export function findNodeAt(
/**
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
- * @param node
- * @param start
- * @param type
- * @param base
- * @param state
+ * @param node
+ * @param start
+ * @param type
+ * @param base
+ * @param state
*/
export function findNodeAround(
node: acorn.Node,
@@ -165,8 +165,13 @@ export function findNodeAround(
): Found | undefined
/**
- * similar to {@link findNodeAround}, but will match all nodes after the given position (testing outer nodes before inner nodes).
+ * Find the outermost matching node after a given position.
*/
export const findNodeAfter: typeof findNodeAround
+/**
+ * Find the outermost matching node before a given position.
+ */
+export const findNodeBefore: typeof findNodeAround
+
export const base: RecursiveVisitors
diff --git a/deps/acorn/acorn-walk/package.json b/deps/acorn/acorn-walk/package.json
index 393c87a39255a0..9d3b7e5248fb83 100644
--- a/deps/acorn/acorn-walk/package.json
+++ b/deps/acorn/acorn-walk/package.json
@@ -16,7 +16,7 @@
],
"./package.json": "./package.json"
},
- "version": "8.3.1",
+ "version": "8.3.2",
"engines": {
"node": ">=0.4.0"
},
diff --git a/deps/base64/base64/CMakeLists.txt b/deps/base64/base64/CMakeLists.txt
index be1de665a2cd59..ff9f6f21e1ee28 100644
--- a/deps/base64/base64/CMakeLists.txt
+++ b/deps/base64/base64/CMakeLists.txt
@@ -17,7 +17,7 @@ if (POLICY CMP0127)
cmake_policy(SET CMP0127 NEW)
endif()
-project(base64 LANGUAGES C VERSION 0.5.1)
+project(base64 LANGUAGES C VERSION 0.5.2)
include(GNUInstallDirs)
include(CMakeDependentOption)
diff --git a/deps/base64/base64/Makefile b/deps/base64/base64/Makefile
index bcb944551ae881..bba3fde4dd05bf 100644
--- a/deps/base64/base64/Makefile
+++ b/deps/base64/base64/Makefile
@@ -1,4 +1,4 @@
-CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic
+CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic -DBASE64_STATIC_DEFINE
# Set OBJCOPY if not defined by environment:
OBJCOPY ?= objcopy
@@ -56,6 +56,7 @@ ifdef OPENMP
CFLAGS += -fopenmp
endif
+TARGET := $(shell $(CC) -dumpmachine)
.PHONY: all analyze clean
@@ -64,9 +65,17 @@ all: bin/base64 lib/libbase64.o
bin/base64: bin/base64.o lib/libbase64.o
$(CC) $(CFLAGS) -o $@ $^
-lib/libbase64.o: $(OBJS)
- $(LD) -r -o $@ $^
- $(OBJCOPY) --keep-global-symbols=lib/exports.txt $@
+# Workaround: mangle exported function names on MinGW32.
+lib/exports.build.txt: lib/exports.txt
+ifeq (i686-w64-mingw32, $(TARGET))
+ sed -e 's/^/_/' $< > $@
+else
+ cp -f $< $@
+endif
+
+lib/libbase64.o: lib/exports.build.txt $(OBJS)
+ $(LD) -r -o $@ $(OBJS)
+ $(OBJCOPY) --keep-global-symbols=$< $@
lib/config.h:
@echo "#define HAVE_AVX512 $(HAVE_AVX512)" > $@
@@ -97,4 +106,4 @@ analyze: clean
scan-build --use-analyzer=`which clang` --status-bugs make
clean:
- rm -f bin/base64 bin/base64.o lib/libbase64.o lib/config.h $(OBJS)
+ rm -f bin/base64 bin/base64.o lib/libbase64.o lib/config.h lib/exports.build.txt $(OBJS)
diff --git a/deps/base64/base64/bin/base64.c b/deps/base64/base64/bin/base64.c
index 98d6b3cbab560c..0e32ed03762df7 100644
--- a/deps/base64/base64/bin/base64.c
+++ b/deps/base64/base64/bin/base64.c
@@ -1,4 +1,19 @@
-#define _XOPEN_SOURCE // IOV_MAX
+// Test for MinGW.
+#if defined(__MINGW32__) || defined(__MINGW64__)
+# define MINGW
+#endif
+
+// Decide if the writev(2) system call needs to be emulated as a series of
+// write(2) calls. At least MinGW does not support writev(2).
+#ifdef MINGW
+# define EMULATE_WRITEV
+#endif
+
+// Include the necessary system header when using the system's writev(2).
+#ifndef EMULATE_WRITEV
+# define _XOPEN_SOURCE // Unlock IOV_MAX
+# include
+#endif
#include
#include
@@ -8,7 +23,7 @@
#include
#include
#include
-#include
+
#include "../include/libbase64.h"
// Size of the buffer for the "raw" (not base64-encoded) data in bytes.
@@ -50,6 +65,59 @@ struct buffer {
char *enc;
};
+// Optionally emulate writev(2) as a series of write calls.
+#ifdef EMULATE_WRITEV
+
+// Quick and dirty definition of IOV_MAX as it is probably not defined.
+#ifndef IOV_MAX
+# define IOV_MAX 1024
+#endif
+
+// Quick and dirty definition of this system struct, for local use only.
+struct iovec {
+
+ // Opaque data pointer.
+ void *iov_base;
+
+ // Length of the data in bytes.
+ size_t iov_len;
+};
+
+static ssize_t
+writev (const int fd, const struct iovec *iov, int iovcnt)
+{
+ ssize_t r, nwrite = 0;
+
+ // Reset the error marker.
+ errno = 0;
+
+ while (iovcnt-- > 0) {
+
+ // Write the vector; propagate errors back to the caller. Note
+ // that this loses information about how much vectors have been
+ // successfully written, but that also seems to be the case
+ // with the real function. The API is somewhat flawed.
+ if ((r = write(fd, iov->iov_base, iov->iov_len)) < 0) {
+ return r;
+ }
+
+ // Update the total write count.
+ nwrite += r;
+
+ // Return early after a partial write; the caller should retry.
+ if ((size_t) r != iov->iov_len) {
+ break;
+ }
+
+ // Move to the next vector.
+ iov++;
+ }
+
+ return nwrite;
+}
+
+#endif // EMULATE_WRITEV
+
static bool
buffer_alloc (const struct config *config, struct buffer *buf)
{
@@ -272,10 +340,23 @@ encode (const struct config *config, struct buffer *buf)
return true;
}
-static int
+static inline size_t
+find_newline (const char *p, const size_t avail)
+{
+ // This is very naive and can probably be improved by vectorization.
+ for (size_t len = 0; len < avail; len++) {
+ if (p[len] == '\n') {
+ return len;
+ }
+ }
+
+ return avail;
+}
+
+static bool
decode (const struct config *config, struct buffer *buf)
{
- size_t nread, nout;
+ size_t avail;
struct base64_state state;
// Initialize the decoder's state structure.
@@ -283,18 +364,51 @@ decode (const struct config *config, struct buffer *buf)
// Read encoded data into the buffer. Use the smallest buffer size to
// be on the safe side: the decoded output will fit the raw buffer.
- while ((nread = fread(buf->enc, 1, BUFFER_RAW_SIZE, config->fp)) > 0) {
+ while ((avail = fread(buf->enc, 1, BUFFER_RAW_SIZE, config->fp)) > 0) {
+ char *start = buf->enc;
+ char *outbuf = buf->raw;
+ size_t ototal = 0;
+
+ // By popular demand, this utility tries to be bug-compatible
+ // with GNU `base64'. That includes silently ignoring newlines
+ // in the input. Tokenize the input on newline characters.
+ while (avail > 0) {
+
+ // Find the offset of the next newline character, which
+ // is also the length of the next chunk.
+ size_t outlen, len = find_newline(start, avail);
+
+ // Ignore empty chunks.
+ if (len == 0) {
+ start++;
+ avail--;
+ continue;
+ }
- // Decode the input into the raw buffer.
- if (base64_stream_decode(&state, buf->enc, nread,
- buf->raw, &nout) == 0) {
- fprintf(stderr, "%s: %s: decoding error\n",
- config->name, config->file);
- return false;
+ // Decode the chunk into the raw buffer.
+ if (base64_stream_decode(&state, start, len,
+ outbuf, &outlen) == 0) {
+ fprintf(stderr, "%s: %s: decoding error\n",
+ config->name, config->file);
+ return false;
+ }
+
+ // Update the output buffer pointer and total size.
+ outbuf += outlen;
+ ototal += outlen;
+
+ // Bail out if the whole string has been consumed.
+ if (len == avail) {
+ break;
+ }
+
+ // Move the start pointer past the newline.
+ start += len + 1;
+ avail -= len + 1;
}
// Append the raw data to the output stream.
- if (write_stdout(config, buf->raw, nout) == false) {
+ if (write_stdout(config, buf->raw, ototal) == false) {
return false;
}
}
diff --git a/deps/base64/base64/lib/env.h b/deps/base64/base64/lib/env.h
index d5c2fdb7952735..d489ba54215bbf 100644
--- a/deps/base64/base64/lib/env.h
+++ b/deps/base64/base64/lib/env.h
@@ -1,6 +1,8 @@
#ifndef BASE64_ENV_H
#define BASE64_ENV_H
+#include
+
// This header file contains macro definitions that describe certain aspects of
// the compile-time environment. Compatibility and portability macros go here.
@@ -46,12 +48,10 @@
#if defined (__x86_64__)
// This also works for the x32 ABI, which has a 64-bit word size.
# define BASE64_WORDSIZE 64
-#elif defined (_INTEGRAL_MAX_BITS)
-# define BASE64_WORDSIZE _INTEGRAL_MAX_BITS
-#elif defined (__WORDSIZE)
-# define BASE64_WORDSIZE __WORDSIZE
-#elif defined (__SIZE_WIDTH__)
-# define BASE64_WORDSIZE __SIZE_WIDTH__
+#elif SIZE_MAX == UINT32_MAX
+# define BASE64_WORDSIZE 32
+#elif SIZE_MAX == UINT64_MAX
+# define BASE64_WORDSIZE 64
#else
# error BASE64_WORDSIZE_NOT_DEFINED
#endif
diff --git a/deps/base64/base64/test/CMakeLists.txt b/deps/base64/base64/test/CMakeLists.txt
index ef8787047b2944..f07b65a00c2cb4 100644
--- a/deps/base64/base64/test/CMakeLists.txt
+++ b/deps/base64/base64/test/CMakeLists.txt
@@ -32,12 +32,10 @@ add_base64_test(test_base64
test_base64.c
)
-if (NOT WIN32)
- add_base64_test(benchmark
- codec_supported.c
- benchmark.c
- )
-endif()
+add_base64_test(benchmark
+ codec_supported.c
+ benchmark.c
+)
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
target_link_libraries(benchmark PRIVATE rt)
diff --git a/deps/base64/base64/test/Makefile b/deps/base64/base64/test/Makefile
index c896627e0bd8d6..7ecb893a6363b9 100644
--- a/deps/base64/base64/test/Makefile
+++ b/deps/base64/base64/test/Makefile
@@ -1,4 +1,4 @@
-CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic
+CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic -DBASE64_STATIC_DEFINE
ifdef OPENMP
CFLAGS += -fopenmp
endif
@@ -6,6 +6,8 @@ endif
TARGET := $(shell $(CC) -dumpmachine)
ifneq (, $(findstring darwin, $(TARGET)))
BENCH_LDFLAGS=
+else ifneq (, $(findstring mingw, $(TARGET)))
+ BENCH_LDFLAGS=
else
# default to linux, -lrt needed
BENCH_LDFLAGS=-lrt
diff --git a/deps/base64/base64/test/benchmark.c b/deps/base64/base64/test/benchmark.c
index 80d21a389cb98c..e78b696bedb6b3 100644
--- a/deps/base64/base64/test/benchmark.c
+++ b/deps/base64/base64/test/benchmark.c
@@ -8,17 +8,25 @@
#define _XOPEN_SOURCE 600
#endif
+// Standard cross-platform includes.
#include
-#include
-#include
-#include
-#include
#include
#include
-#include
-#ifdef __MACH__
-#include
+// Platform-specific includes.
+#if defined(_WIN32) || defined(_WIN64)
+# include
+# include
+#else
+# include
+# include
+# include
+# include
+# include
+#endif
+
+#if defined(__MACH__)
+# include
#endif
#include "../include/libbase64.h"
@@ -60,6 +68,27 @@ bytes_to_mb (size_t bytes)
static bool
get_random_data (struct buffers *b, char **errmsg)
{
+#if defined(_WIN32) || defined(_WIN64)
+ HCRYPTPROV hProvider = 0;
+
+ if (!CryptAcquireContext(&hProvider, 0, 0, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT | CRYPT_SILENT)) {
+ *errmsg = "Error: CryptAcquireContext";
+ return false;
+ }
+
+ if (!CryptGenRandom(hProvider, b->regsz, b->reg)) {
+ CryptReleaseContext(hProvider, 0);
+ *errmsg = "Error: CryptGenRandom";
+ return false;
+ }
+
+ if (!CryptReleaseContext(hProvider, 0)) {
+ *errmsg = "Error: CryptReleaseContext";
+ return false;
+ }
+
+ return true;
+#else
int fd;
ssize_t nread;
size_t total_read = 0;
@@ -80,16 +109,19 @@ get_random_data (struct buffers *b, char **errmsg)
}
total_read += nread;
}
+
close(fd);
return true;
+#endif
}
-#ifdef __MACH__
+#if defined(__MACH__)
typedef uint64_t base64_timespec;
+
static void
-base64_gettime (base64_timespec * o_time)
+base64_gettime (base64_timespec *t)
{
- *o_time = mach_absolute_time();
+ *t = mach_absolute_time();
}
static float
@@ -101,18 +133,39 @@ timediff_sec (base64_timespec *start, base64_timespec *end)
return (float)((diff * tb.numer) / tb.denom) / 1e9f;
}
+#elif defined(_WIN32) || defined(_WIN64)
+typedef ULARGE_INTEGER base64_timespec;
+
+static void
+base64_gettime (base64_timespec *t)
+{
+ FILETIME current_time_ft;
+
+ GetSystemTimePreciseAsFileTime(¤t_time_ft);
+
+ t->LowPart = current_time_ft.dwLowDateTime;
+ t->HighPart = current_time_ft.dwHighDateTime;
+}
+
+static float
+timediff_sec (base64_timespec *start, base64_timespec *end)
+{
+ // Timer resolution is 100 nanoseconds (10^-7 sec).
+ return (end->QuadPart - start->QuadPart) / 1e7f;
+}
#else
typedef struct timespec base64_timespec;
+
static void
-base64_gettime (base64_timespec * o_time)
+base64_gettime (base64_timespec *t)
{
- clock_gettime(CLOCK_REALTIME, o_time);
+ clock_gettime(CLOCK_REALTIME, t);
}
static float
timediff_sec (base64_timespec *start, base64_timespec *end)
{
- return (end->tv_sec - start->tv_sec) + ((float)(end->tv_nsec - start->tv_nsec)) / 1e9f;
+ return (end->tv_sec - start->tv_sec) + (end->tv_nsec - start->tv_nsec) / 1e9f;
}
#endif
diff --git a/deps/cares/cares.gyp b/deps/cares/cares.gyp
index 587cf7f3d626d5..6963a1c9479f29 100644
--- a/deps/cares/cares.gyp
+++ b/deps/cares/cares.gyp
@@ -181,7 +181,7 @@
}],
[ 'OS not in "win android"', {
'cflags': [
- '--std=gnu89'
+ '--std=gnu11'
],
}],
[ 'OS=="linux"', {
diff --git a/deps/icu-small/source/data/in/icudt74l.dat.bz2 b/deps/icu-small/source/data/in/icudt74l.dat.bz2
index f52401bb1a5d7e..e299a5c8abfc02 100644
Binary files a/deps/icu-small/source/data/in/icudt74l.dat.bz2 and b/deps/icu-small/source/data/in/icudt74l.dat.bz2 differ
diff --git a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h
index a8d4b4afd3a470..f16d15cb39bb52 100644
--- a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h
+++ b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h
@@ -4269,9 +4269,6 @@ NGTCP2_EXTERN int ngtcp2_conn_open_uni_stream(ngtcp2_conn *conn,
*
* |flags| is currently unused, and should be set to 0.
*
- * This function returns 0 if a stream denoted by |stream_id| is not
- * found.
- *
* This function returns 0 if it succeeds, or one of the following
* negative error codes:
*
@@ -4294,9 +4291,6 @@ NGTCP2_EXTERN int ngtcp2_conn_shutdown_stream(ngtcp2_conn *conn, uint32_t flags,
*
* |flags| is currently unused, and should be set to 0.
*
- * This function returns 0 if a stream denoted by |stream_id| is not
- * found.
- *
* This function returns 0 if it succeeds, or one of the following
* negative error codes:
*
@@ -4321,9 +4315,6 @@ NGTCP2_EXTERN int ngtcp2_conn_shutdown_stream_write(ngtcp2_conn *conn,
*
* |flags| is currently unused, and should be set to 0.
*
- * This function returns 0 if a stream denoted by |stream_id| is not
- * found.
- *
* This function returns 0 if it succeeds, or one of the following
* negative error codes:
*
@@ -4684,9 +4675,6 @@ NGTCP2_EXTERN int ngtcp2_conn_in_draining_period(ngtcp2_conn *conn);
* specifies the stream ID. This function only extends stream-level
* flow control window.
*
- * This function returns 0 if a stream denoted by |stream_id| is not
- * found.
- *
* This function returns 0 if it succeeds, or one of the following
* negative error codes:
*
diff --git a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h
index 9f7592b84a4585..66a70ffe962964 100644
--- a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h
+++ b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h
@@ -36,7 +36,7 @@
*
* Version number of the ngtcp2 library release.
*/
-#define NGTCP2_VERSION "0.8.1"
+#define NGTCP2_VERSION "1.1.0"
/**
* @macro
@@ -46,6 +46,6 @@
* number, 8 bits for minor and 8 bits for patch. Version 1.2.3
* becomes 0x010203.
*/
-#define NGTCP2_VERSION_NUM 0x000801
+#define NGTCP2_VERSION_NUM 0x010100
#endif /* VERSION_H */
diff --git a/deps/npm/docs/README.md b/deps/npm/docs/README.md
deleted file mode 100644
index 5fc7ccf6cd60ac..00000000000000
--- a/deps/npm/docs/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# docs
-
-[![CI - docs](https://github.com/npm/cli/actions/workflows/ci-docs.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-docs.yml)
-
-Scripts to build the npm docs.
diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md
index 3b94ea27763f8f..b73614de2d4f42 100644
--- a/deps/npm/docs/content/commands/npm-install-test.md
+++ b/deps/npm/docs/content/commands/npm-install-test.md
@@ -290,6 +290,16 @@ field of package.json, which comes from `process.platform`.
+#### `libc`
+
+* Default: null
+* Type: null or String
+
+Override libc of native modules to install. Acceptable values are same as
+`libc` field of package.json
+
+
+
#### `workspace`
* Default:
diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md
index 738ca3372c8e9f..36496954270b30 100644
--- a/deps/npm/docs/content/commands/npm-install.md
+++ b/deps/npm/docs/content/commands/npm-install.md
@@ -680,6 +680,16 @@ field of package.json, which comes from `process.platform`.
+#### `libc`
+
+* Default: null
+* Type: null or String
+
+Override libc of native modules to install. Acceptable values are same as
+`libc` field of package.json
+
+
+
#### `workspace`
* Default:
diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md
index 4ad025dbb5b08e..a21d14e577df61 100644
--- a/deps/npm/docs/content/commands/npm-ls.md
+++ b/deps/npm/docs/content/commands/npm-ls.md
@@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For
example, running `npm ls promzard` in npm's source tree will show:
```bash
-npm@10.2.4 /path/to/npm
+npm@10.3.0 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
```
diff --git a/deps/npm/docs/content/commands/npm-publish.md b/deps/npm/docs/content/commands/npm-publish.md
index 0e18cddf8b36d4..a9c368e218543f 100644
--- a/deps/npm/docs/content/commands/npm-publish.md
+++ b/deps/npm/docs/content/commands/npm-publish.md
@@ -22,7 +22,7 @@ scope-configured registry (see
A `package` is interpreted the same way as other commands (like
-`npm install` and can be:
+`npm install`) and can be:
* a) a folder containing a program described by a
[`package.json`](/configuring-npm/package-json) file
diff --git a/deps/npm/docs/content/commands/npm-sbom.md b/deps/npm/docs/content/commands/npm-sbom.md
index ee0d60c6fde790..6e8033b96aedc7 100644
--- a/deps/npm/docs/content/commands/npm-sbom.md
+++ b/deps/npm/docs/content/commands/npm-sbom.md
@@ -266,7 +266,7 @@ SBOM format to use when generating SBOMs.
* Type: "library", "application", or "framework"
The type of package described by the generated SBOM. For SPDX, this is the
-value for the `primaryPackagePurpose` fieled. For CycloneDX, this is the
+value for the `primaryPackagePurpose` field. For CycloneDX, this is the
value for the `type` field.
diff --git a/deps/npm/docs/content/commands/npm-unpublish.md b/deps/npm/docs/content/commands/npm-unpublish.md
index 8ab976e96cb6c7..2421e102325363 100644
--- a/deps/npm/docs/content/commands/npm-unpublish.md
+++ b/deps/npm/docs/content/commands/npm-unpublish.md
@@ -27,8 +27,12 @@ removing the tarball.
The npm registry will return an error if you are not [logged
in](/commands/npm-adduser).
-If you do not specify a version or if you remove all of a package's
-versions then the registry will remove the root package entry entirely.
+If you do not specify a package name at all, the name and version to be
+unpublished will be pulled from the project in the current directory.
+
+If you specify a package name but do not specify a version or if you
+remove all of a package's versions then the registry will remove the
+root package entry entirely.
Even if you unpublish a package version, that specific name and version
combination can never be reused. In order to publish the package again,
diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md
index 1528614f69a69a..d92e83e5ccdd19 100644
--- a/deps/npm/docs/content/commands/npm.md
+++ b/deps/npm/docs/content/commands/npm.md
@@ -14,7 +14,7 @@ Note: This command is unaware of workspaces.
### Version
-10.2.4
+10.3.0
### Description
diff --git a/deps/npm/docs/content/commands/npx.md b/deps/npm/docs/content/commands/npx.md
index 5ce300e724b103..e596baa5da4793 100644
--- a/deps/npm/docs/content/commands/npx.md
+++ b/deps/npm/docs/content/commands/npx.md
@@ -150,7 +150,8 @@ This resulted in some shifts in its functionality:
always present in the executed process `PATH`.
- The `--npm` option is removed. `npx` will always use the `npm` it ships
with.
-- The `--node-arg` and `-n` options are removed.
+- The `--node-arg` and `-n` options have been removed. Use [`NODE_OPTIONS`](https://nodejs.org/api/cli.html#node_optionsoptions) instead: e.g.,
+ `NODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true`
- The `--always-spawn` option is redundant, and thus removed.
- The `--shell` option is replaced with `--script-shell`, but maintained
in the `npx` executable for backwards compatibility.
diff --git a/deps/npm/docs/content/configuring-npm/npmrc.md b/deps/npm/docs/content/configuring-npm/npmrc.md
index 8cd532abc1c2db..0aa99fc271013b 100644
--- a/deps/npm/docs/content/configuring-npm/npmrc.md
+++ b/deps/npm/docs/content/configuring-npm/npmrc.md
@@ -19,10 +19,10 @@ For a list of available configuration options, see
The four relevant files are:
-* per-project config file (/path/to/my/project/.npmrc)
-* per-user config file (~/.npmrc)
-* global config file ($PREFIX/etc/npmrc)
-* npm builtin config file (/path/to/npm/npmrc)
+* per-project config file (`/path/to/my/project/.npmrc`)
+* per-user config file (`~/.npmrc`)
+* global config file (`$PREFIX/etc/npmrc`)
+* npm builtin config file (`/path/to/npm/npmrc`)
All npm config files are an ini-formatted list of `key = value` parameters.
Environment variables can be replaced using `${VARIABLE_NAME}`. For
diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md
index 630ad453196a0a..2ef888fe1a4d64 100644
--- a/deps/npm/docs/content/configuring-npm/package-json.md
+++ b/deps/npm/docs/content/configuring-npm/package-json.md
@@ -291,25 +291,39 @@ Certain files are always included, regardless of settings:
`README` & `LICENSE` can have any case and extension.
-Conversely, some files are always ignored:
+Some files are always ignored by default:
+* `*.orig`
+* `.*.swp`
+* `.DS_Store`
+* `._*`
* `.git`
-* `CVS`
-* `.svn`
* `.hg`
* `.lock-wscript`
+* `.npmrc`
+* `.svn`
* `.wafpickle-N`
-* `.*.swp`
-* `.DS_Store`
-* `._*`
+* `CVS`
+* `config.gypi`
+* `node_modules`
* `npm-debug.log`
+* `package-lock.json` (use
+ [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json)
+ if you wish it to be published)
+* `pnpm-lock.yaml`
+* `yarn.lock`
+
+Most of these ignored files can be included specifically if included in
+the `files` globs. Exceptions to this are:
+
+* `.git`
* `.npmrc`
* `node_modules`
-* `config.gypi`
-* `*.orig`
-* `package-lock.json` (use
- [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) if you wish
- it to be published)
+* `package-lock.json`
+* `pnpm-lock.yaml`
+* `yarn.lock`
+
+These can not be included.
### main
diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md
index 80969ee23e5355..93c820ab3b45ad 100644
--- a/deps/npm/docs/content/using-npm/config.md
+++ b/deps/npm/docs/content/using-npm/config.md
@@ -855,6 +855,16 @@ Use of `legacy-peer-deps` is not recommended, as it will not enforce the
+#### `libc`
+
+* Default: null
+* Type: null or String
+
+Override libc of native modules to install. Acceptable values are same as
+`libc` field of package.json
+
+
+
#### `link`
* Default: false
@@ -1373,7 +1383,7 @@ SBOM format to use when generating SBOMs.
* Type: "library", "application", or "framework"
The type of package described by the generated SBOM. For SPDX, this is the
-value for the `primaryPackagePurpose` fieled. For CycloneDX, this is the
+value for the `primaryPackagePurpose` field. For CycloneDX, this is the
value for the `type` field.
diff --git a/deps/npm/docs/lib/index.js b/deps/npm/docs/lib/index.js
new file mode 100644
index 00000000000000..5d4ae7af3457bb
--- /dev/null
+++ b/deps/npm/docs/lib/index.js
@@ -0,0 +1,189 @@
+const localeCompare = require('@isaacs/string-locale-compare')('en')
+const { join, basename, resolve } = require('path')
+const transformHTML = require('./transform-html.js')
+const { version } = require('../../lib/npm.js')
+const { aliases } = require('../../lib/utils/cmd-list')
+const { shorthands, definitions } = require('@npmcli/config/lib/definitions')
+
+const DOC_EXT = '.md'
+
+const TAGS = {
+ CONFIG: '',
+ USAGE: '',
+ SHORTHANDS: '',
+}
+
+const assertPlaceholder = (src, path, placeholder) => {
+ if (!src.includes(placeholder)) {
+ throw new Error(
+ `Cannot replace ${placeholder} in ${path} due to missing placeholder`
+ )
+ }
+ return placeholder
+}
+
+const getCommandByDoc = (docFile, docExt) => {
+ // Grab the command name from the *.md filename
+ // NOTE: We cannot use the name property command file because in the case of
+ // `npx` the file being used is `lib/commands/exec.js`
+ const name = basename(docFile, docExt).replace('npm-', '')
+
+ if (name === 'npm') {
+ return {
+ name,
+ params: null,
+ usage: 'npm',
+ }
+ }
+
+ // special case for `npx`:
+ // `npx` is not technically a command in and of itself,
+ // so it just needs the usage of npm exex
+ const srcName = name === 'npx' ? 'exec' : name
+ const { params, usage = [''], workspaces } = require(`../../lib/commands/${srcName}`)
+ const usagePrefix = name === 'npx' ? 'npx' : `npm ${name}`
+ if (params) {
+ for (const param of params) {
+ if (definitions[param].exclusive) {
+ for (const e of definitions[param].exclusive) {
+ if (!params.includes(e)) {
+ params.splice(params.indexOf(param) + 1, 0, e)
+ }
+ }
+ }
+ }
+ }
+
+ return {
+ name,
+ workspaces,
+ params: name === 'npx' ? null : params,
+ usage: usage.map(u => `${usagePrefix} ${u}`.trim()).join('\n'),
+ }
+}
+
+const replaceVersion = (src) => src.replace(/@VERSION@/g, version)
+
+const replaceUsage = (src, { path }) => {
+ const replacer = assertPlaceholder(src, path, TAGS.USAGE)
+ const { usage, name, workspaces } = getCommandByDoc(path, DOC_EXT)
+
+ const synopsis = ['```bash', usage]
+
+ const cmdAliases = Object.keys(aliases).reduce((p, c) => {
+ if (aliases[c] === name) {
+ p.push(c)
+ }
+ return p
+ }, [])
+
+ if (cmdAliases.length === 1) {
+ synopsis.push('', `alias: ${cmdAliases[0]}`)
+ } else if (cmdAliases.length > 1) {
+ synopsis.push('', `aliases: ${cmdAliases.join(', ')}`)
+ }
+
+ synopsis.push('```')
+
+ if (!workspaces) {
+ synopsis.push('', 'Note: This command is unaware of workspaces.')
+ }
+
+ return src.replace(replacer, synopsis.join('\n'))
+}
+
+const replaceParams = (src, { path }) => {
+ const { params } = getCommandByDoc(path, DOC_EXT)
+ const replacer = params && assertPlaceholder(src, path, TAGS.CONFIG)
+
+ if (!params) {
+ return src
+ }
+
+ const paramsConfig = params.map((n) => definitions[n].describe())
+
+ return src.replace(replacer, paramsConfig.join('\n\n'))
+}
+
+const replaceConfig = (src, { path }) => {
+ const replacer = assertPlaceholder(src, path, TAGS.CONFIG)
+
+ // sort not-deprecated ones to the top
+ /* istanbul ignore next - typically already sorted in the definitions file,
+ * but this is here so that our help doc will stay consistent if we decide
+ * to move them around. */
+ const sort = ([keya, { deprecated: depa }], [keyb, { deprecated: depb }]) => {
+ return depa && !depb ? 1
+ : !depa && depb ? -1
+ : localeCompare(keya, keyb)
+ }
+
+ const allConfig = Object.entries(definitions).sort(sort)
+ .map(([_, def]) => def.describe())
+ .join('\n\n')
+
+ return src.replace(replacer, allConfig)
+}
+
+const replaceShorthands = (src, { path }) => {
+ const replacer = assertPlaceholder(src, path, TAGS.SHORTHANDS)
+
+ const sh = Object.entries(shorthands)
+ .sort(([shorta, expansiona], [shortb, expansionb]) =>
+ // sort by what they're short FOR
+ localeCompare(expansiona.join(' '), expansionb.join(' ')) || localeCompare(shorta, shortb)
+ )
+ .map(([short, expansion]) => {
+ // XXX: this is incorrect. we have multicharacter flags like `-iwr` that
+ // can only be set with a single dash
+ const dash = short.length === 1 ? '-' : '--'
+ return `* \`${dash}${short}\`: \`${expansion.join(' ')}\``
+ })
+
+ return src.replace(replacer, sh.join('\n'))
+}
+
+const replaceHelpLinks = (src) => {
+ // replaces markdown links with equivalent-ish npm help commands
+ return src.replace(
+ /\[`?([\w\s-]+)`?\]\(\/(?:commands|configuring-npm|using-npm)\/(?:[\w\s-]+)\)/g,
+ (_, p1) => {
+ const term = p1.replace(/npm\s/g, '').replace(/\s+/g, ' ').trim()
+ const help = `npm help ${term.includes(' ') ? `"${term}"` : term}`
+ return help
+ }
+ )
+}
+
+const transformMan = (src, { data, unified, remarkParse, remarkMan }) => unified()
+ .use(remarkParse)
+ .use(remarkMan)
+ .processSync(`# ${data.title}(${data.section}) - ${data.description}\n\n${src}`)
+ .toString()
+
+const manPath = (name, { data }) => join(`man${data.section}`, `${name}.${data.section}`)
+
+const transformMd = (src, { frontmatter }) => ['---', frontmatter, '---', '', src].join('\n')
+
+module.exports = {
+ DOC_EXT,
+ TAGS,
+ paths: {
+ content: resolve(__dirname, 'content'),
+ nav: resolve(__dirname, 'content', 'nav.yml'),
+ template: resolve(__dirname, 'template.html'),
+ man: resolve(__dirname, '..', '..', 'man'),
+ html: resolve(__dirname, '..', 'output'),
+ md: resolve(__dirname, '..', 'content'),
+ },
+ usage: replaceUsage,
+ params: replaceParams,
+ config: replaceConfig,
+ shorthands: replaceShorthands,
+ version: replaceVersion,
+ helpLinks: replaceHelpLinks,
+ man: transformMan,
+ manPath: manPath,
+ md: transformMd,
+ html: transformHTML,
+}
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html
index dd78090d2a5db3..b6b16aba6a5ca0 100644
--- a/deps/npm/docs/output/commands/npm-install-test.html
+++ b/deps/npm/docs/output/commands/npm-install-test.html
@@ -142,7 +142,7 @@
the results to only the paths to the packages named. Note that nested
packages will also show the paths to the specified packages. For
example, running npm ls promzard in npm's source tree will show:
-
The type of package described by the generated SBOM. For SPDX, this is the
-value for the primaryPackagePurpose fieled. For CycloneDX, this is the
+value for the primaryPackagePurpose field. For CycloneDX, this is the
value for the type field.
The npm registry will return an error if you are not logged
in.
-
If you do not specify a version or if you remove all of a package's
-versions then the registry will remove the root package entry entirely.
+
If you do not specify a package name at all, the name and version to be
+unpublished will be pulled from the project in the current directory.
+
If you specify a package name but do not specify a version or if you
+remove all of a package's versions then the registry will remove the
+root package entry entirely.
Even if you unpublish a package version, that specific name and version
combination can never be reused. In order to publish the package again,
you must use a new version number. If you unpublish the entire package,
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html
index 7a530efa07bb17..094e2ef256b937 100644
--- a/deps/npm/docs/output/commands/npm.html
+++ b/deps/npm/docs/output/commands/npm.html
@@ -150,7 +150,7 @@
Table of contents
Note: This command is unaware of workspaces.
Version
-
10.2.4
+
10.3.0
Description
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html
index 54d59ca7cdb329..d2bebc34c0c484 100644
--- a/deps/npm/docs/output/commands/npx.html
+++ b/deps/npm/docs/output/commands/npx.html
@@ -252,7 +252,8 @@
Compatibility with Older npx Vers
always present in the executed process PATH.
The --npm option is removed. npx will always use the npm it ships
with.
-
The --node-arg and -n options are removed.
+
The --node-arg and -n options have been removed. Use NODE_OPTIONS instead: e.g.,
+NODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true
The --always-spawn option is redundant, and thus removed.
The --shell option is replaced with --script-shell, but maintained
in the npx executable for backwards compatibility.
All npm config files are an ini-formatted list of key = value parameters.
Environment variables can be replaced using ${VARIABLE_NAME}. For
diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html
index 712708ef406391..dd602671238b99 100644
--- a/deps/npm/docs/output/configuring-npm/package-json.html
+++ b/deps/npm/docs/output/configuring-npm/package-json.html
@@ -364,26 +364,39 @@
files
The file(s) in the "bin" field
README & LICENSE can have any case and extension.
-
Conversely, some files are always ignored:
+
Some files are always ignored by default:
+
*.orig
+
.*.swp
+
.DS_Store
+
._*
.git
-
CVS
-
.svn
.hg
.lock-wscript
+
.npmrc
+
.svn
.wafpickle-N
-
.*.swp
-
.DS_Store
-
._*
+
CVS
+
config.gypi
+
node_modules
npm-debug.log
+
package-lock.json (use
+npm-shrinkwrap.json
+if you wish it to be published)
+
pnpm-lock.yaml
+
yarn.lock
+
+
Most of these ignored files can be included specifically if included in
+the files globs. Exceptions to this are:
+
+
.git
.npmrc
node_modules
-
config.gypi
-
*.orig
-
package-lock.json (use
-npm-shrinkwrap.json if you wish
-it to be published)
+
package-lock.json
+
pnpm-lock.yaml
+
yarn.lock
+
These can not be included.
main
The main field is a module ID that is the primary entry point to your
program. That is, if your package is named foo, and a user installs it,
diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html
index c80d3342fc9049..3015e5d066b473 100644
--- a/deps/npm/docs/output/using-npm/config.html
+++ b/deps/npm/docs/output/using-npm/config.html
@@ -142,7 +142,7 @@
peerDependenciescould be unpacked in a correct place.
Use of legacy-peer-deps is not recommended, as it will not enforce the
peerDependencies contract that meta-dependencies may rely on.
+
libc
+
+
Default: null
+
Type: null or String
+
+
Override libc of native modules to install. Acceptable values are same as
+libc field of package.json
link
Default: false
@@ -1129,7 +1136,7 @@
sbom-type
Type: "library", "application", or "framework"
The type of package described by the generated SBOM. For SPDX, this is the
-value for the primaryPackagePurpose fieled. For CycloneDX, this is the
+value for the primaryPackagePurpose field. For CycloneDX, this is the
value for the type field.
scope
diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js
index 6687ec4371dd82..d04a35fbec2a76 100644
--- a/deps/npm/lib/commands/install.js
+++ b/deps/npm/lib/commands/install.js
@@ -37,6 +37,7 @@ class Install extends ArboristWorkspaceCmd {
'dry-run',
'cpu',
'os',
+ 'libc',
...super.params,
]
diff --git a/deps/npm/lib/commands/unpublish.js b/deps/npm/lib/commands/unpublish.js
index 402f8f30efff85..a9c20900534c3a 100644
--- a/deps/npm/lib/commands/unpublish.js
+++ b/deps/npm/lib/commands/unpublish.js
@@ -1,7 +1,7 @@
const libaccess = require('libnpmaccess')
const libunpub = require('libnpmpublish').unpublish
const npa = require('npm-package-arg')
-const npmFetch = require('npm-registry-fetch')
+const pacote = require('pacote')
const pkgJson = require('@npmcli/package-json')
const { flatten } = require('@npmcli/config/lib/definitions')
@@ -23,12 +23,12 @@ class Unpublish extends BaseCommand {
static ignoreImplicitWorkspace = false
static async getKeysOfVersions (name, opts) {
- const pkgUri = npa(name).escapedName
- const json = await npmFetch.json(`${pkgUri}?write=true`, {
+ const packument = await pacote.packument(name, {
...opts,
spec: name,
+ query: { write: true },
})
- return Object.keys(json.versions)
+ return Object.keys(packument.versions)
}
static async completion (args, npm) {
@@ -59,7 +59,7 @@ class Unpublish extends BaseCommand {
return pkgs
}
- const versions = await this.getKeysOfVersions(pkgs[0], opts)
+ const versions = await Unpublish.getKeysOfVersions(pkgs[0], opts)
if (!versions.length) {
return pkgs
} else {
@@ -67,20 +67,35 @@ class Unpublish extends BaseCommand {
}
}
- async exec (args) {
+ async exec (args, { localPrefix } = {}) {
if (args.length > 1) {
throw this.usageError()
}
- let spec = args.length && npa(args[0])
+ // workspace mode
+ if (!localPrefix) {
+ localPrefix = this.npm.localPrefix
+ }
+
const force = this.npm.config.get('force')
const { silent } = this.npm
const dryRun = this.npm.config.get('dry-run')
+ let spec
+ if (args.length) {
+ spec = npa(args[0])
+ if (spec.type !== 'version' && spec.rawSpec !== '*') {
+ throw this.usageError(
+ 'Can only unpublish a single version, or the entire project.\n' +
+ 'Tags and ranges are not supported.'
+ )
+ }
+ }
+
log.silly('unpublish', 'args[0]', args[0])
log.silly('unpublish', 'spec', spec)
- if ((!spec || !spec.rawSpec) && !force) {
+ if (spec?.rawSpec === '*' && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
@@ -89,69 +104,67 @@ class Unpublish extends BaseCommand {
const opts = { ...this.npm.flatOptions }
- let pkgName
- let pkgVersion
let manifest
- let manifestErr
try {
- const { content } = await pkgJson.prepare(this.npm.localPrefix)
+ const { content } = await pkgJson.prepare(localPrefix)
manifest = content
} catch (err) {
- manifestErr = err
- }
- if (spec) {
- // If cwd has a package.json with a name that matches the package being
- // unpublished, load up the publishConfig
- if (manifest && manifest.name === spec.name && manifest.publishConfig) {
- flatten(manifest.publishConfig, opts)
- }
- const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
- if (versions.length === 1 && !force) {
- throw this.usageError(LAST_REMAINING_VERSION_ERROR)
- }
- pkgName = spec.name
- pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
- } else {
- if (manifestErr) {
- if (manifestErr.code === 'ENOENT' || manifestErr.code === 'ENOTDIR') {
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+ if (!spec) {
+ // We needed a local package.json to figure out what package to
+ // unpublish
throw this.usageError()
- } else {
- throw manifestErr
}
+ } else {
+ // folks should know if ANY local package.json had a parsing error.
+ // They may be relying on `publishConfig` to be loading and we don't
+ // want to ignore errors in that case.
+ throw err
}
+ }
- log.verbose('unpublish', manifest)
-
+ let pkgVersion // for cli output
+ if (spec) {
+ pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
+ } else {
spec = npa.resolve(manifest.name, manifest.version)
- if (manifest.publishConfig) {
- flatten(manifest.publishConfig, opts)
+ log.verbose('unpublish', manifest)
+ pkgVersion = manifest.version ? `@${manifest.version}` : ''
+ if (!manifest.version && !force) {
+ throw this.usageError(
+ 'Refusing to delete entire project.\n' +
+ 'Run with --force to do this.'
+ )
}
+ }
- pkgName = manifest.name
- pkgVersion = manifest.version ? `@${manifest.version}` : ''
+ // If localPrefix has a package.json with a name that matches the package
+ // being unpublished, load up the publishConfig
+ if (manifest?.name === spec.name && manifest.publishConfig) {
+ flatten(manifest.publishConfig, opts)
+ }
+
+ const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
+ if (versions.length === 1 && spec.rawSpec === versions[0] && !force) {
+ throw this.usageError(LAST_REMAINING_VERSION_ERROR)
+ }
+ if (versions.length === 1) {
+ pkgVersion = ''
}
if (!dryRun) {
await otplease(this.npm, opts, o => libunpub(spec, o))
}
if (!silent) {
- this.npm.output(`- ${pkgName}${pkgVersion}`)
+ this.npm.output(`- ${spec.name}${pkgVersion}`)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
- const force = this.npm.config.get('force')
- if (!force) {
- throw this.usageError(
- 'Refusing to delete entire project(s).\n' +
- 'Run with --force to do this.'
- )
- }
-
- for (const name of this.workspaceNames) {
- await this.exec([name])
+ for (const path of this.workspacePaths) {
+ await this.exec(args, { localPrefix: path })
}
}
}
diff --git a/deps/npm/lib/commands/view.js b/deps/npm/lib/commands/view.js
index f118184124db97..214a45e92611c9 100644
--- a/deps/npm/lib/commands/view.js
+++ b/deps/npm/lib/commands/view.js
@@ -392,20 +392,20 @@ class View extends BaseCommand {
if (info.keywords.length) {
this.npm.output('')
- this.npm.output('keywords:', chalk.yellow(info.keywords.join(', ')))
+ this.npm.output(`keywords: ${chalk.yellow(info.keywords.join(', '))}`)
}
if (info.bins.length) {
this.npm.output('')
- this.npm.output('bin:', chalk.yellow(info.bins.join(', ')))
+ this.npm.output(`bin: ${chalk.yellow(info.bins.join(', '))}`)
}
this.npm.output('')
this.npm.output('dist')
- this.npm.output('.tarball:', info.tarball)
- this.npm.output('.shasum:', info.shasum)
- info.integrity && this.npm.output('.integrity:', info.integrity)
- info.unpackedSize && this.npm.output('.unpackedSize:', info.unpackedSize)
+ this.npm.output(`.tarball: ${info.tarball}`)
+ this.npm.output(`.shasum: ${info.shasum}`)
+ info.integrity && this.npm.output(`.integrity: ${info.integrity}`)
+ info.unpackedSize && this.npm.output(`.unpackedSize: ${info.unpackedSize}`)
const maxDeps = 24
if (info.deps.length) {
@@ -420,7 +420,7 @@ class View extends BaseCommand {
if (info.maintainers && info.maintainers.length) {
this.npm.output('')
this.npm.output('maintainers:')
- info.maintainers.forEach((u) => this.npm.output('-', u))
+ info.maintainers.forEach((u) => this.npm.output(`- ${u}`))
}
this.npm.output('')
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index 14706629e79c2e..0a023f4ac8a302 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -216,11 +216,13 @@ class Npm {
fs.mkdir(this.cache, { recursive: true })
.catch((e) => log.verbose('cache', `could not create cache: ${e}`)))
- // its ok if this fails. user might have specified an invalid dir
+ // it's ok if this fails. user might have specified an invalid dir
// which we will tell them about at the end
- await this.time('npm:load:mkdirplogs', () =>
- fs.mkdir(this.logsDir, { recursive: true })
- .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`)))
+ if (this.config.get('logs-max') > 0) {
+ await this.time('npm:load:mkdirplogs', () =>
+ fs.mkdir(this.logsDir, { recursive: true })
+ .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`)))
+ }
// note: this MUST be shorter than the actual argv length, because it
// uses the same memory, so node will truncate it if it's too long.
@@ -438,7 +440,7 @@ class Npm {
output (...msg) {
log.clearProgress()
// eslint-disable-next-line no-console
- console.log(...msg)
+ console.log(...msg.map(Display.clean))
log.showProgress()
}
@@ -476,7 +478,7 @@ class Npm {
outputError (...msg) {
log.clearProgress()
// eslint-disable-next-line no-console
- console.error(...msg)
+ console.error(...msg.map(Display.clean))
log.showProgress()
}
}
diff --git a/deps/npm/lib/utils/display.js b/deps/npm/lib/utils/display.js
index a41bf903e9a8fa..c5e5ca2b5b874a 100644
--- a/deps/npm/lib/utils/display.js
+++ b/deps/npm/lib/utils/display.js
@@ -3,6 +3,44 @@ const npmlog = require('npmlog')
const log = require('./log-shim.js')
const { explain } = require('./explain-eresolve.js')
+const originalCustomInspect = Symbol('npm.display.original.util.inspect.custom')
+
+// These are most assuredly not a mistake
+// https://eslint.org/docs/latest/rules/no-control-regex
+/* eslint-disable no-control-regex */
+// \x00 through \x1f, \x7f through \x9f, not including \x09 \x0a \x0b \x0d
+const hasC01 = /[\x00-\x08\x0c\x0e-\x1f\x7f-\x9f]/
+// Allows everything up to '[38;5;255m' in 8 bit notation
+const allowedSGR = /^\[[0-9;]{0,8}m/
+// '[38;5;255m'.length
+const sgrMaxLen = 10
+
+// Strips all ANSI C0 and C1 control characters (except for SGR up to 8 bit)
+function stripC01 (str) {
+ if (!hasC01.test(str)) {
+ return str
+ }
+ let result = ''
+ for (let i = 0; i < str.length; i++) {
+ const char = str[i]
+ const code = char.charCodeAt(0)
+ if (!hasC01.test(char)) {
+ // Most characters are in this set so continue early if we can
+ result = `${result}${char}`
+ } else if (code === 27 && allowedSGR.test(str.slice(i + 1, i + sgrMaxLen + 1))) {
+ // \x1b with allowed SGR
+ result = `${result}\x1b`
+ } else if (code <= 31) {
+ // escape all other C0 control characters besides \x7f
+ result = `${result}^${String.fromCharCode(code + 64)}`
+ } else {
+ // hasC01 ensures this is now a C1 control character or \x7f
+ result = `${result}^${String.fromCharCode(code - 64)}`
+ }
+ }
+ return result
+}
+
class Display {
#chalk = null
@@ -12,6 +50,57 @@ class Display {
log.pause()
}
+ static clean (output) {
+ if (typeof output === 'string') {
+ // Strings are cleaned inline
+ return stripC01(output)
+ }
+ if (!output || typeof output !== 'object') {
+ // Numbers, booleans, null all end up here and don't need cleaning
+ return output
+ }
+ // output && typeof output === 'object'
+ // We can't use hasOwn et al for detecting the original but we can use it
+ // for detecting the properties we set via defineProperty
+ if (
+ output[inspect.custom] &&
+ (!Object.hasOwn(output, originalCustomInspect))
+ ) {
+ // Save the old one if we didn't already do it.
+ Object.defineProperty(output, originalCustomInspect, {
+ value: output[inspect.custom],
+ writable: true,
+ })
+ }
+ if (!Object.hasOwn(output, originalCustomInspect)) {
+ // Put a dummy one in for when we run multiple times on the same object
+ Object.defineProperty(output, originalCustomInspect, {
+ value: function () {
+ return this
+ },
+ writable: true,
+ })
+ }
+ // Set the custom inspect to our own function
+ Object.defineProperty(output, inspect.custom, {
+ value: function () {
+ const toClean = this[originalCustomInspect]()
+ // Custom inspect can return things other than objects, check type again
+ if (typeof toClean === 'string') {
+ // Strings are cleaned inline
+ return stripC01(toClean)
+ }
+ if (!toClean || typeof toClean !== 'object') {
+ // Numbers, booleans, null all end up here and don't need cleaning
+ return toClean
+ }
+ return stripC01(inspect(toClean, { customInspect: false }))
+ },
+ writable: true,
+ })
+ return output
+ }
+
on () {
process.on('log', this.#logHandler)
}
@@ -103,7 +192,7 @@ class Display {
// Explicitly call these on npmlog and not log shim
// This is the final place we should call npmlog before removing it.
#npmlog (level, ...args) {
- npmlog[level](...args)
+ npmlog[level](...args.map(Display.clean))
}
// Also (and this is a really inexcusable kludge), we patch the
@@ -112,8 +201,8 @@ class Display {
// highly abbreviated explanation of what's being overridden.
#eresolveWarn (level, heading, message, expl) {
if (level === 'warn' &&
- heading === 'ERESOLVE' &&
- expl && typeof expl === 'object'
+ heading === 'ERESOLVE' &&
+ expl && typeof expl === 'object'
) {
this.#npmlog(level, heading, message)
this.#npmlog(level, '', explain(expl, this.#chalk, 2))
diff --git a/deps/npm/lib/utils/log-file.js b/deps/npm/lib/utils/log-file.js
index 84f86983639ce6..8c06f5647e761e 100644
--- a/deps/npm/lib/utils/log-file.js
+++ b/deps/npm/lib/utils/log-file.js
@@ -6,6 +6,7 @@ const { Minipass } = require('minipass')
const fsMiniPass = require('fs-minipass')
const fs = require('fs/promises')
const log = require('./log-shim')
+const Display = require('./display')
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
const globify = pattern => pattern.split('\\').join('/')
@@ -49,6 +50,7 @@ class LogFiles {
return format(...args)
.split(/\r?\n/)
+ .map(Display.clean)
.reduce((lines, line) =>
lines += prefix + (line ? ' ' : '') + line + os.EOL,
''
diff --git a/deps/npm/lib/utils/open-url-prompt.js b/deps/npm/lib/utils/open-url-prompt.js
index df0c9709c07744..71a68c253c0505 100644
--- a/deps/npm/lib/utils/open-url-prompt.js
+++ b/deps/npm/lib/utils/open-url-prompt.js
@@ -1,5 +1,5 @@
const readline = require('readline')
-const promiseSpawn = require('@npmcli/promise-spawn')
+const open = require('./open-url.js')
function print (npm, title, url) {
const json = npm.config.get('json')
@@ -63,8 +63,7 @@ const promptOpen = async (npm, url, title, prompt, emitter) => {
return
}
- const command = browser === true ? null : browser
- await promiseSpawn.open(url, { command })
+ await open(npm, url, 'Browser unavailable. Please open the URL manually')
}
module.exports = promptOpen
diff --git a/deps/npm/lib/utils/reify-output.js b/deps/npm/lib/utils/reify-output.js
index 22036dc8110cfc..3b79fc2be1898e 100644
--- a/deps/npm/lib/utils/reify-output.js
+++ b/deps/npm/lib/utils/reify-output.js
@@ -76,7 +76,7 @@ const reifyOutput = (npm, arb) => {
summary.audit = npm.command === 'audit' ? auditReport
: auditReport.toJSON().metadata
}
- npm.output(JSON.stringify(summary, 0, 2))
+ npm.output(JSON.stringify(summary, null, 2))
} else {
packagesChangedMessage(npm, summary)
packagesFundingMessage(npm, summary)
diff --git a/deps/npm/lib/utils/sbom-spdx.js b/deps/npm/lib/utils/sbom-spdx.js
index 8c91147cb4102b..fdddd8944f32d1 100644
--- a/deps/npm/lib/utils/sbom-spdx.js
+++ b/deps/npm/lib/utils/sbom-spdx.js
@@ -11,10 +11,10 @@ const SPDX_IDENTIFER = 'SPDXRef-DOCUMENT'
const NO_ASSERTION = 'NOASSERTION'
const REL_DESCRIBES = 'DESCRIBES'
-const REL_PREREQ = 'HAS_PREREQUISITE'
+const REL_PREREQ = 'PREREQUISITE_FOR'
const REL_OPTIONAL = 'OPTIONAL_DEPENDENCY_OF'
const REL_DEV = 'DEV_DEPENDENCY_OF'
-const REL_DEP = 'DEPENDS_ON'
+const REL_DEP = 'DEPENDENCY_OF'
const REF_CAT_PACKAGE_MANAGER = 'PACKAGE-MANAGER'
const REF_TYPE_PURL = 'purl'
@@ -147,8 +147,8 @@ const toSpdxRelationship = (node, edge) => {
}
return {
- spdxElementId: toSpdxID(node),
- relatedSpdxElement: toSpdxID(edge.to),
+ spdxElementId: toSpdxID(edge.to),
+ relatedSpdxElement: toSpdxID(node),
relationshipType: type,
}
}
diff --git a/deps/npm/lib/utils/update-notifier.js b/deps/npm/lib/utils/update-notifier.js
index 2c839bfeff8436..1b3e21d878b94d 100644
--- a/deps/npm/lib/utils/update-notifier.js
+++ b/deps/npm/lib/utils/update-notifier.js
@@ -24,6 +24,7 @@ const updateCheck = async (npm, spec, version, current) => {
// always prefer latest, even if doing --tag=whatever on the cmd
defaultTag: 'latest',
...npm.flatOptions,
+ cache: false,
}).catch(() => null)
// if pacote failed, give up
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 53fd371f3a67e2..01e92a1f4ede44 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM-ACCESS" "1" "November 2023" "" ""
+.TH "NPM-ACCESS" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-access\fR - Set access level on published packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index 2c55c56074528d..48ac7056d06ad9 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM-ADDUSER" "1" "November 2023" "" ""
+.TH "NPM-ADDUSER" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-adduser\fR - Add a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index c3b84eef16325b..10f382f9645c4f 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -1,4 +1,4 @@
-.TH "NPM-AUDIT" "1" "November 2023" "" ""
+.TH "NPM-AUDIT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-audit\fR - Run a security audit
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index 16a65feaa6d038..a885517d062034 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM-BUGS" "1" "November 2023" "" ""
+.TH "NPM-BUGS" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-bugs\fR - Report bugs for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index 8f303d21c2c455..2de4d40db0efb3 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM-CACHE" "1" "November 2023" "" ""
+.TH "NPM-CACHE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-cache\fR - Manipulates packages cache
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1
index 4d7a7a0c658742..4f28c62b79f9e5 100644
--- a/deps/npm/man/man1/npm-ci.1
+++ b/deps/npm/man/man1/npm-ci.1
@@ -1,4 +1,4 @@
-.TH "NPM-CI" "1" "November 2023" "" ""
+.TH "NPM-CI" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-ci\fR - Clean install a project
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index 2597a926a124ac..33afabe5da5380 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM-COMPLETION" "1" "November 2023" "" ""
+.TH "NPM-COMPLETION" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-completion\fR - Tab Completion for npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index 02abc5c9d8f1d7..469bb97e36f668 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM-CONFIG" "1" "November 2023" "" ""
+.TH "NPM-CONFIG" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-config\fR - Manage the npm configuration files
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index 6ed4db0a520e0d..0ffc5c2fe43c36 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEDUPE" "1" "November 2023" "" ""
+.TH "NPM-DEDUPE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-dedupe\fR - Reduce duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index faea73b948544f..fe2e7d0958b8dd 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEPRECATE" "1" "November 2023" "" ""
+.TH "NPM-DEPRECATE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-deprecate\fR - Deprecate a version of a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1
index 217e2ebf607019..7e07d60da652d4 100644
--- a/deps/npm/man/man1/npm-diff.1
+++ b/deps/npm/man/man1/npm-diff.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIFF" "1" "November 2023" "" ""
+.TH "NPM-DIFF" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-diff\fR - The registry diff command
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 668b8963a57384..c89c03b4e0307b 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIST-TAG" "1" "November 2023" "" ""
+.TH "NPM-DIST-TAG" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR - Modify package distribution tags
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 0f0d0f731b9bda..2c15c6f78838e2 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCS" "1" "November 2023" "" ""
+.TH "NPM-DOCS" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-docs\fR - Open documentation for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index 4d2c2aa00f4e0f..16c1d1d841ad7d 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCTOR" "1" "November 2023" "" ""
+.TH "NPM-DOCTOR" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-doctor\fR - Check the health of your npm environment
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index cc1e9b5c8ede7f..23a92b8a4d3ae0 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM-EDIT" "1" "November 2023" "" ""
+.TH "NPM-EDIT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-edit\fR - Edit an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1
index 8cb1010ffb552a..53512a4daecc74 100644
--- a/deps/npm/man/man1/npm-exec.1
+++ b/deps/npm/man/man1/npm-exec.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXEC" "1" "November 2023" "" ""
+.TH "NPM-EXEC" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-exec\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1
index 6070bb24a24386..ec20107b04e06c 100644
--- a/deps/npm/man/man1/npm-explain.1
+++ b/deps/npm/man/man1/npm-explain.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLAIN" "1" "November 2023" "" ""
+.TH "NPM-EXPLAIN" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-explain\fR - Explain installed packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index 27371aa6c9e9e7..f6bb2c51f747d6 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLORE" "1" "November 2023" "" ""
+.TH "NPM-EXPLORE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-explore\fR - Browse an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1
index d27a252baaca8d..b56814b8d2ac68 100644
--- a/deps/npm/man/man1/npm-find-dupes.1
+++ b/deps/npm/man/man1/npm-find-dupes.1
@@ -1,4 +1,4 @@
-.TH "NPM-FIND-DUPES" "1" "November 2023" "" ""
+.TH "NPM-FIND-DUPES" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-find-dupes\fR - Find duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1
index 88bf7caa0d5d83..a9c1ae61f5ef4c 100644
--- a/deps/npm/man/man1/npm-fund.1
+++ b/deps/npm/man/man1/npm-fund.1
@@ -1,4 +1,4 @@
-.TH "NPM-FUND" "1" "November 2023" "" ""
+.TH "NPM-FUND" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-fund\fR - Retrieve funding information
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index b98088152314b8..9396188aa721d3 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP-SEARCH" "1" "November 2023" "" ""
+.TH "NPM-HELP-SEARCH" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-help-search\fR - Search npm help documentation
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index 949e54c44eab08..e3b322850a728a 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP" "1" "November 2023" "" ""
+.TH "NPM-HELP" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-help\fR - Get help on npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1
index 145406b9d09877..c544b23a94a89f 100644
--- a/deps/npm/man/man1/npm-hook.1
+++ b/deps/npm/man/man1/npm-hook.1
@@ -1,4 +1,4 @@
-.TH "NPM-HOOK" "1" "November 2023" "" ""
+.TH "NPM-HOOK" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-hook\fR - Manage registry hooks
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index a86a09b910a810..ce674e7d610308 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM-INIT" "1" "November 2023" "" ""
+.TH "NPM-INIT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-init\fR - Create a package.json file
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1
index 21320f9378b7d8..cfb93cc754a5a1 100644
--- a/deps/npm/man/man1/npm-install-ci-test.1
+++ b/deps/npm/man/man1/npm-install-ci-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-CI-TEST" "1" "November 2023" "" ""
+.TH "NPM-INSTALL-CI-TEST" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index 3e6efb819bc6e2..9a7b69ff8a4571 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-TEST" "1" "November 2023" "" ""
+.TH "NPM-INSTALL-TEST" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-install-test\fR - Install package(s) and run tests
.SS "Synopsis"
@@ -257,6 +257,16 @@ Type: null or String
.P
Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR.
+.SS "\fBlibc\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index 0493f5dc9577f0..bd1c0ded701450 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL" "1" "November 2023" "" ""
+.TH "NPM-INSTALL" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-install\fR - Install a package
.SS "Synopsis"
@@ -619,6 +619,16 @@ Type: null or String
.P
Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR.
+.SS "\fBlibc\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index 491cc6e45f066d..3403d178525184 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM-LINK" "1" "November 2023" "" ""
+.TH "NPM-LINK" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-link\fR - Symlink a package folder
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1
index dff4c54b206299..f3d4934c85b20e 100644
--- a/deps/npm/man/man1/npm-login.1
+++ b/deps/npm/man/man1/npm-login.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGIN" "1" "November 2023" "" ""
+.TH "NPM-LOGIN" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-login\fR - Login to a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index f4e3525a9a43ad..b8f521280db4cb 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGOUT" "1" "November 2023" "" ""
+.TH "NPM-LOGOUT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-logout\fR - Log out of the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 7b8ea0771e1a43..5e314fe40308b0 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM-LS" "1" "November 2023" "" ""
+.TH "NPM-LS" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-ls\fR - List installed packages
.SS "Synopsis"
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@10.2.4 /path/to/npm
+npm@10.3.0 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1
index 182f84af28895f..c4666d623d5f69 100644
--- a/deps/npm/man/man1/npm-org.1
+++ b/deps/npm/man/man1/npm-org.1
@@ -1,4 +1,4 @@
-.TH "NPM-ORG" "1" "November 2023" "" ""
+.TH "NPM-ORG" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-org\fR - Manage orgs
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index 84351aeb35d4b9..186dfaeade934f 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM-OUTDATED" "1" "November 2023" "" ""
+.TH "NPM-OUTDATED" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-outdated\fR - Check for outdated packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index ceda18a6c31758..02afd6627f2c04 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM-OWNER" "1" "November 2023" "" ""
+.TH "NPM-OWNER" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-owner\fR - Manage package owners
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 050c208ffd64e8..075f61681ec84d 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM-PACK" "1" "November 2023" "" ""
+.TH "NPM-PACK" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-pack\fR - Create a tarball from a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index 6c6e1e28281664..4abefd19837e32 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM-PING" "1" "November 2023" "" ""
+.TH "NPM-PING" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-ping\fR - Ping npm registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1
index 40b0329fef13f1..c70c95fc27d1ab 100644
--- a/deps/npm/man/man1/npm-pkg.1
+++ b/deps/npm/man/man1/npm-pkg.1
@@ -1,4 +1,4 @@
-.TH "NPM-PKG" "1" "November 2023" "" ""
+.TH "NPM-PKG" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-pkg\fR - Manages your package.json
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index a0ab1560881243..dabc60e7e6ef25 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM-PREFIX" "1" "November 2023" "" ""
+.TH "NPM-PREFIX" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-prefix\fR - Display prefix
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1
index 834c26428d94b8..2966cf66d1c3c0 100644
--- a/deps/npm/man/man1/npm-profile.1
+++ b/deps/npm/man/man1/npm-profile.1
@@ -1,4 +1,4 @@
-.TH "NPM-PROFILE" "1" "November 2023" "" ""
+.TH "NPM-PROFILE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-profile\fR - Change settings on your registry profile
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index 32f57580e3fb49..af09d9962f7c93 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM-PRUNE" "1" "November 2023" "" ""
+.TH "NPM-PRUNE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-prune\fR - Remove extraneous packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 5d8444e2fa2d04..deaead3259a2e1 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM-PUBLISH" "1" "November 2023" "" ""
+.TH "NPM-PUBLISH" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-publish\fR - Publish a package
.SS "Synopsis"
@@ -14,7 +14,7 @@ Publishes a package to the registry so that it can be installed by name.
.P
By default npm will publish to the public registry. This can be overridden by specifying a different default registry or using a npm help scope in the name, combined with a scope-configured registry (see \fB\fBpackage.json\fR\fR \fI\(la/configuring-npm/package-json\(ra\fR).
.P
-A \fBpackage\fR is interpreted the same way as other commands (like \fBnpm install\fR and can be:
+A \fBpackage\fR is interpreted the same way as other commands (like \fBnpm install\fR) and can be:
.RS 0
.IP \(bu 4
a) a folder containing a program described by a \fB\fBpackage.json\fR\fR \fI\(la/configuring-npm/package-json\(ra\fR file
diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1
index 947a7b78397571..7730ecea472b6c 100644
--- a/deps/npm/man/man1/npm-query.1
+++ b/deps/npm/man/man1/npm-query.1
@@ -1,4 +1,4 @@
-.TH "NPM-QUERY" "1" "November 2023" "" ""
+.TH "NPM-QUERY" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-query\fR - Dependency selector query
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index b00eb2816eb116..f3e0c3ff4c6f0e 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM-REBUILD" "1" "November 2023" "" ""
+.TH "NPM-REBUILD" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-rebuild\fR - Rebuild a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index 1d89a9e85c7105..1da82d9b3b52f1 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM-REPO" "1" "November 2023" "" ""
+.TH "NPM-REPO" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-repo\fR - Open package repository page in the browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index 5f85577a45ac12..459d99a14026c4 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM-RESTART" "1" "November 2023" "" ""
+.TH "NPM-RESTART" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-restart\fR - Restart a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 1dce718fc0f961..6761a6608a4f5b 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM-ROOT" "1" "November 2023" "" ""
+.TH "NPM-ROOT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-root\fR - Display npm root
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 3718c9293d45a1..66fcf9fab78ba2 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM-RUN-SCRIPT" "1" "November 2023" "" ""
+.TH "NPM-RUN-SCRIPT" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-run-script\fR - Run arbitrary package scripts
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1
index 21a18ac91dc953..b89c3ff35dc049 100644
--- a/deps/npm/man/man1/npm-sbom.1
+++ b/deps/npm/man/man1/npm-sbom.1
@@ -1,4 +1,4 @@
-.TH "NPM-SBOM" "1" "November 2023" "" ""
+.TH "NPM-SBOM" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM)
.SS "Synopsis"
@@ -256,7 +256,7 @@ Type: "library", "application", or "framework"
.RE 0
.P
-The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR fieled. For CycloneDX, this is the value for the \fBtype\fR field.
+The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR field. For CycloneDX, this is the value for the \fBtype\fR field.
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 5aaeabd24af938..d8ffc4bbe4e8e2 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-SEARCH" "1" "November 2023" "" ""
+.TH "NPM-SEARCH" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-search\fR - Search for packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index 7e29d4151728c3..d7df87411550f5 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP" "1" "November 2023" "" ""
+.TH "NPM-SHRINKWRAP" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR - Lock down dependency versions for publication
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index b6a3f7e8f174a9..610ec54213fe62 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM-STAR" "1" "November 2023" "" ""
+.TH "NPM-STAR" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-star\fR - Mark your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index ad66d694917f68..eb2391b790d625 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM-STARS" "1" "November 2023" "" ""
+.TH "NPM-STARS" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-stars\fR - View packages marked as favorites
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index f35690a145e58c..a4cf36fdcacb75 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM-START" "1" "November 2023" "" ""
+.TH "NPM-START" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-start\fR - Start a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index 2913641d2845d3..8fe1df7fe1f190 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM-STOP" "1" "November 2023" "" ""
+.TH "NPM-STOP" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-stop\fR - Stop a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 729e9d3194dd49..c61904afc1cc82 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEAM" "1" "November 2023" "" ""
+.TH "NPM-TEAM" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-team\fR - Manage organization teams and team memberships
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index bfecac60a9a113..865dfc5ea794ef 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEST" "1" "November 2023" "" ""
+.TH "NPM-TEST" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-test\fR - Test a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1
index 84e0e8ea0220a3..0a42bd48c5fcbb 100644
--- a/deps/npm/man/man1/npm-token.1
+++ b/deps/npm/man/man1/npm-token.1
@@ -1,4 +1,4 @@
-.TH "NPM-TOKEN" "1" "November 2023" "" ""
+.TH "NPM-TOKEN" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-token\fR - Manage your authentication tokens
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index 749ae7533a4f5f..13be7457cf72f5 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNINSTALL" "1" "November 2023" "" ""
+.TH "NPM-UNINSTALL" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-uninstall\fR - Remove a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index 0b059350966a00..12a2fd78133444 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNPUBLISH" "1" "November 2023" "" ""
+.TH "NPM-UNPUBLISH" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-unpublish\fR - Remove a package from the registry
.SS "Synopsis"
@@ -19,7 +19,9 @@ This removes a package version from the registry, deleting its entry and removin
.P
The npm registry will return an error if you are not npm help "logged in".
.P
-If you do not specify a version or if you remove all of a package's versions then the registry will remove the root package entry entirely.
+If you do not specify a package name at all, the name and version to be unpublished will be pulled from the project in the current directory.
+.P
+If you specify a package name but do not specify a version or if you remove all of a package's versions then the registry will remove the root package entry entirely.
.P
Even if you unpublish a package version, that specific name and version combination can never be reused. In order to publish the package again, you must use a new version number. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed.
.SS "Configuration"
diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1
index 0e44709c83da65..8fdc2941fb60f1 100644
--- a/deps/npm/man/man1/npm-unstar.1
+++ b/deps/npm/man/man1/npm-unstar.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNSTAR" "1" "November 2023" "" ""
+.TH "NPM-UNSTAR" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-unstar\fR - Remove an item from your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 0b247f80d8d9f8..742fbe7f7547af 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM-UPDATE" "1" "November 2023" "" ""
+.TH "NPM-UPDATE" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-update\fR - Update packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index 07f8352de3c7ce..f4a976fafa39d9 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM-VERSION" "1" "November 2023" "" ""
+.TH "NPM-VERSION" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-version\fR - Bump a package version
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 70c435cdc929c6..7ce90e336e3154 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM-VIEW" "1" "November 2023" "" ""
+.TH "NPM-VIEW" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-view\fR - View registry info
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index 6e182bbff873c2..bead7976024f53 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM-WHOAMI" "1" "November 2023" "" ""
+.TH "NPM-WHOAMI" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm-whoami\fR - Display npm username
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 05d06e9f7758a4..a1844237f29f23 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "November 2023" "" ""
+.TH "NPM" "1" "January 2024" "" ""
.SH "NAME"
\fBnpm\fR - javascript package manager
.SS "Synopsis"
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-10.2.4
+10.3.0
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index d318fa2a6ea6c6..44833d0c7a575c 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "November 2023" "" ""
+.TH "NPX" "1" "January 2024" "" ""
.SH "NAME"
\fBnpx\fR - Run a command from a local or remote npm package
.SS "Synopsis"
@@ -128,7 +128,7 @@ The \fB--ignore-existing\fR option is removed. Locally installed bins are always
.IP \(bu 4
The \fB--npm\fR option is removed. \fBnpx\fR will always use the \fBnpm\fR it ships with.
.IP \(bu 4
-The \fB--node-arg\fR and \fB-n\fR options are removed.
+The \fB--node-arg\fR and \fB-n\fR options have been removed. Use \fB\fBNODE_OPTIONS\fR\fR \fI\(lahttps://nodejs.org/api/cli.html#node_optionsoptions\(ra\fR instead: e.g., \fBNODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true\fR
.IP \(bu 4
The \fB--always-spawn\fR option is redundant, and thus removed.
.IP \(bu 4
diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5
index f6248235a2e9bd..73d1862bab4fb1 100644
--- a/deps/npm/man/man5/folders.5
+++ b/deps/npm/man/man5/folders.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "November 2023" "" ""
+.TH "FOLDERS" "5" "January 2024" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5
index 5b32048710b9e0..0e3d7170254218 100644
--- a/deps/npm/man/man5/install.5
+++ b/deps/npm/man/man5/install.5
@@ -1,4 +1,4 @@
-.TH "INSTALL" "5" "November 2023" "" ""
+.TH "INSTALL" "5" "January 2024" "" ""
.SH "NAME"
\fBinstall\fR - Download and install node and npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index f6248235a2e9bd..73d1862bab4fb1 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "November 2023" "" ""
+.TH "FOLDERS" "5" "January 2024" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index 8054048290285e..eb82f2539897a8 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "November 2023" "" ""
+.TH "PACKAGE.JSON" "5" "January 2024" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -263,40 +263,63 @@ The file(s) in the "bin" field
.P
\fBREADME\fR & \fBLICENSE\fR can have any case and extension.
.P
-Conversely, some files are always ignored:
+Some files are always ignored by default:
.RS 0
.IP \(bu 4
-\fB.git\fR
+\fB*.orig\fR
.IP \(bu 4
-\fBCVS\fR
+\fB.*.swp\fR
.IP \(bu 4
-\fB.svn\fR
+\fB.DS_Store\fR
+.IP \(bu 4
+\fB._*\fR
+.IP \(bu 4
+\fB.git\fR
.IP \(bu 4
\fB.hg\fR
.IP \(bu 4
\fB.lock-wscript\fR
.IP \(bu 4
+\fB.npmrc\fR
+.IP \(bu 4
+\fB.svn\fR
+.IP \(bu 4
\fB.wafpickle-N\fR
.IP \(bu 4
-\fB.*.swp\fR
+\fBCVS\fR
.IP \(bu 4
-\fB.DS_Store\fR
+\fBconfig.gypi\fR
.IP \(bu 4
-\fB._*\fR
+\fBnode_modules\fR
.IP \(bu 4
\fBnpm-debug.log\fR
.IP \(bu 4
+\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published)
+.IP \(bu 4
+\fBpnpm-lock.yaml\fR
+.IP \(bu 4
+\fByarn.lock\fR
+.RE 0
+
+.P
+Most of these ignored files can be included specifically if included in the \fBfiles\fR globs. Exceptions to this are:
+.RS 0
+.IP \(bu 4
+\fB.git\fR
+.IP \(bu 4
\fB.npmrc\fR
.IP \(bu 4
\fBnode_modules\fR
.IP \(bu 4
-\fBconfig.gypi\fR
+\fBpackage-lock.json\fR
.IP \(bu 4
-\fB*.orig\fR
+\fBpnpm-lock.yaml\fR
.IP \(bu 4
-\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published)
+\fByarn.lock\fR
.RE 0
+.P
+These can not be included.
.SS "main"
.P
The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned.
diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5
index d1f8b923c2c0de..5a18abb999e0ff 100644
--- a/deps/npm/man/man5/npm-shrinkwrap-json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap-json.5
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP.JSON" "5" "November 2023" "" ""
+.TH "NPM-SHRINKWRAP.JSON" "5" "January 2024" "" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR - A publishable lockfile
.SS "Description"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 2fe1d3eeba7cc7..4af73a99fea2cd 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "November 2023" "" ""
+.TH "NPMRC" "5" "January 2024" "" ""
.SH "NAME"
\fBnpmrc\fR - The npm config files
.SS "Description"
@@ -13,13 +13,13 @@ For a list of available configuration options, see npm help config.
The four relevant files are:
.RS 0
.IP \(bu 4
-per-project config file (/path/to/my/project/.npmrc)
+per-project config file (\fB/path/to/my/project/.npmrc\fR)
.IP \(bu 4
-per-user config file (~/.npmrc)
+per-user config file (\fB~/.npmrc\fR)
.IP \(bu 4
-global config file ($PREFIX/etc/npmrc)
+global config file (\fB$PREFIX/etc/npmrc\fR)
.IP \(bu 4
-npm builtin config file (/path/to/npm/npmrc)
+npm builtin config file (\fB/path/to/npm/npmrc\fR)
.RE 0
.P
diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5
index 8054048290285e..eb82f2539897a8 100644
--- a/deps/npm/man/man5/package-json.5
+++ b/deps/npm/man/man5/package-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "November 2023" "" ""
+.TH "PACKAGE.JSON" "5" "January 2024" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -263,40 +263,63 @@ The file(s) in the "bin" field
.P
\fBREADME\fR & \fBLICENSE\fR can have any case and extension.
.P
-Conversely, some files are always ignored:
+Some files are always ignored by default:
.RS 0
.IP \(bu 4
-\fB.git\fR
+\fB*.orig\fR
.IP \(bu 4
-\fBCVS\fR
+\fB.*.swp\fR
.IP \(bu 4
-\fB.svn\fR
+\fB.DS_Store\fR
+.IP \(bu 4
+\fB._*\fR
+.IP \(bu 4
+\fB.git\fR
.IP \(bu 4
\fB.hg\fR
.IP \(bu 4
\fB.lock-wscript\fR
.IP \(bu 4
+\fB.npmrc\fR
+.IP \(bu 4
+\fB.svn\fR
+.IP \(bu 4
\fB.wafpickle-N\fR
.IP \(bu 4
-\fB.*.swp\fR
+\fBCVS\fR
.IP \(bu 4
-\fB.DS_Store\fR
+\fBconfig.gypi\fR
.IP \(bu 4
-\fB._*\fR
+\fBnode_modules\fR
.IP \(bu 4
\fBnpm-debug.log\fR
.IP \(bu 4
+\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published)
+.IP \(bu 4
+\fBpnpm-lock.yaml\fR
+.IP \(bu 4
+\fByarn.lock\fR
+.RE 0
+
+.P
+Most of these ignored files can be included specifically if included in the \fBfiles\fR globs. Exceptions to this are:
+.RS 0
+.IP \(bu 4
+\fB.git\fR
+.IP \(bu 4
\fB.npmrc\fR
.IP \(bu 4
\fBnode_modules\fR
.IP \(bu 4
-\fBconfig.gypi\fR
+\fBpackage-lock.json\fR
.IP \(bu 4
-\fB*.orig\fR
+\fBpnpm-lock.yaml\fR
.IP \(bu 4
-\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published)
+\fByarn.lock\fR
.RE 0
+.P
+These can not be included.
.SS "main"
.P
The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned.
diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5
index 2f303d610ff127..6172a8ba0e8340 100644
--- a/deps/npm/man/man5/package-lock-json.5
+++ b/deps/npm/man/man5/package-lock-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE-LOCK.JSON" "5" "November 2023" "" ""
+.TH "PACKAGE-LOCK.JSON" "5" "January 2024" "" ""
.SH "NAME"
\fBpackage-lock.json\fR - A manifestation of the manifest
.SS "Description"
diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7
index 73552f74e29746..308e1632b43215 100644
--- a/deps/npm/man/man7/config.7
+++ b/deps/npm/man/man7/config.7
@@ -1,4 +1,4 @@
-.TH "CONFIG" "7" "November 2023" "" ""
+.TH "CONFIG" "7" "January 2024" "" ""
.SH "NAME"
\fBconfig\fR - More than you probably want to know about npm configuration
.SS "Description"
@@ -865,6 +865,16 @@ If a package cannot be installed because of overly strict \fBpeerDependencies\fR
This differs from \fB--omit=peer\fR, in that \fB--omit=peer\fR will avoid unpacking \fBpeerDependencies\fR on disk, but will still design a tree such that \fBpeerDependencies\fR \fIcould\fR be unpacked in a correct place.
.P
Use of \fBlegacy-peer-deps\fR is not recommended, as it will not enforce the \fBpeerDependencies\fR contract that meta-dependencies may rely on.
+.SS "\fBlibc\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json
.SS "\fBlink\fR"
.RS 0
.IP \(bu 4
@@ -1374,7 +1384,7 @@ Type: "library", "application", or "framework"
.RE 0
.P
-The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR fieled. For CycloneDX, this is the value for the \fBtype\fR field.
+The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR field. For CycloneDX, this is the value for the \fBtype\fR field.
.SS "\fBscope\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7
index 78aaa5a18a9fff..489f33f3f38a3b 100644
--- a/deps/npm/man/man7/dependency-selectors.7
+++ b/deps/npm/man/man7/dependency-selectors.7
@@ -1,4 +1,4 @@
-.TH "QUERYING" "7" "November 2023" "" ""
+.TH "QUERYING" "7" "January 2024" "" ""
.SH "NAME"
\fBQuerying\fR - Dependency Selector Syntax & Querying
.SS "Description"
diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7
index 9516ee1030c22a..b259049ab72e5c 100644
--- a/deps/npm/man/man7/developers.7
+++ b/deps/npm/man/man7/developers.7
@@ -1,4 +1,4 @@
-.TH "DEVELOPERS" "7" "November 2023" "" ""
+.TH "DEVELOPERS" "7" "January 2024" "" ""
.SH "NAME"
\fBdevelopers\fR - Developer Guide
.SS "Description"
diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7
index a6fb699796ea5d..c0340e820fa731 100644
--- a/deps/npm/man/man7/logging.7
+++ b/deps/npm/man/man7/logging.7
@@ -1,4 +1,4 @@
-.TH "LOGGING" "7" "November 2023" "" ""
+.TH "LOGGING" "7" "January 2024" "" ""
.SH "NAME"
\fBLogging\fR - Why, What & How We Log
.SS "Description"
diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7
index 2f282bc29165de..b37092693719f6 100644
--- a/deps/npm/man/man7/orgs.7
+++ b/deps/npm/man/man7/orgs.7
@@ -1,4 +1,4 @@
-.TH "ORGS" "7" "November 2023" "" ""
+.TH "ORGS" "7" "January 2024" "" ""
.SH "NAME"
\fBorgs\fR - Working with Teams & Orgs
.SS "Description"
diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7
index 6c38ccdcbf8434..d189b6273127d6 100644
--- a/deps/npm/man/man7/package-spec.7
+++ b/deps/npm/man/man7/package-spec.7
@@ -1,4 +1,4 @@
-.TH "PACKAGE-SPEC" "7" "November 2023" "" ""
+.TH "PACKAGE-SPEC" "7" "January 2024" "" ""
.SH "NAME"
\fBpackage-spec\fR - Package name specifier
.SS "Description"
diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7
index a1dc9352fc706b..08a7778a18d54d 100644
--- a/deps/npm/man/man7/registry.7
+++ b/deps/npm/man/man7/registry.7
@@ -1,4 +1,4 @@
-.TH "REGISTRY" "7" "November 2023" "" ""
+.TH "REGISTRY" "7" "January 2024" "" ""
.SH "NAME"
\fBregistry\fR - The JavaScript Package Registry
.SS "Description"
diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7
index 0047ac99fd5832..af0e2320303c8d 100644
--- a/deps/npm/man/man7/removal.7
+++ b/deps/npm/man/man7/removal.7
@@ -1,4 +1,4 @@
-.TH "REMOVAL" "7" "November 2023" "" ""
+.TH "REMOVAL" "7" "January 2024" "" ""
.SH "NAME"
\fBremoval\fR - Cleaning the Slate
.SS "Synopsis"
diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7
index b7c6d0104ac3b8..7a753f73aec1dc 100644
--- a/deps/npm/man/man7/scope.7
+++ b/deps/npm/man/man7/scope.7
@@ -1,4 +1,4 @@
-.TH "SCOPE" "7" "November 2023" "" ""
+.TH "SCOPE" "7" "January 2024" "" ""
.SH "NAME"
\fBscope\fR - Scoped packages
.SS "Description"
diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7
index 881e8202e086f3..60f252bd137b62 100644
--- a/deps/npm/man/man7/scripts.7
+++ b/deps/npm/man/man7/scripts.7
@@ -1,4 +1,4 @@
-.TH "SCRIPTS" "7" "November 2023" "" ""
+.TH "SCRIPTS" "7" "January 2024" "" ""
.SH "NAME"
\fBscripts\fR - How npm handles the "scripts" field
.SS "Description"
diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7
index a81dd84a928fb0..4d261469a1835f 100644
--- a/deps/npm/man/man7/workspaces.7
+++ b/deps/npm/man/man7/workspaces.7
@@ -1,4 +1,4 @@
-.TH "WORKSPACES" "7" "November 2023" "" ""
+.TH "WORKSPACES" "7" "January 2024" "" ""
.SH "NAME"
\fBworkspaces\fR - Working with workspaces
.SS "Description"
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
index 8c4e148464d33a..def00dc74f039c 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
@@ -333,7 +333,7 @@ module.exports = cls => class ActualLoader extends cls {
async #loadFSTree (node) {
const did = this.#actualTreeLoaded
- if (!did.has(node.target.realpath)) {
+ if (!node.isLink && !did.has(node.target.realpath)) {
did.add(node.target.realpath)
await this.#loadFSChildren(node.target)
return Promise.all(
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
index 0981afdae6ece7..7ce3bc2a9db1dd 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -628,7 +628,7 @@ module.exports = cls => class Reifier extends cls {
process.emit('time', timer)
this.addTracker('reify', node.name, node.location)
- const { npmVersion, nodeVersion, cpu, os } = this.options
+ const { npmVersion, nodeVersion, cpu, os, libc } = this.options
const p = Promise.resolve().then(async () => {
// when we reify an optional node, check the engine and platform
// first. be sure to ignore the --force and --engine-strict flags,
@@ -638,7 +638,7 @@ module.exports = cls => class Reifier extends cls {
// eslint-disable-next-line promise/always-return
if (node.optional) {
checkEngine(node.package, npmVersion, nodeVersion, false)
- checkPlatform(node.package, false, { cpu, os })
+ checkPlatform(node.package, false, { cpu, os, libc })
}
await this[_checkBins](node)
await this[_extractOrLink](node)
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
index a0fda5a4b567a9..e6525ffe67b65d 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
@@ -48,7 +48,7 @@ const { resolve, basename, relative } = require('path')
const specFromLock = require('./spec-from-lock.js')
const versionFromTgz = require('./version-from-tgz.js')
const npa = require('npm-package-arg')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const parseJSON = require('parse-conflict-json')
const stringify = require('json-stringify-nice')
@@ -81,28 +81,6 @@ const relpath = require('./relpath.js')
const consistentResolve = require('./consistent-resolve.js')
const { overrideResolves } = require('./override-resolves.js')
-const maybeReadFile = file => {
- return readFile(file, 'utf8').then(d => d, er => {
- /* istanbul ignore else - can't test without breaking module itself */
- if (er.code === 'ENOENT') {
- return ''
- } else {
- throw er
- }
- })
-}
-
-const maybeStatFile = file => {
- return stat(file).then(st => st.isFile(), er => {
- /* istanbul ignore else - can't test without breaking module itself */
- if (er.code === 'ENOENT') {
- return null
- } else {
- throw er
- }
- })
-}
-
const pkgMetaKeys = [
// note: name is included if necessary, for alias packages
'version',
@@ -134,81 +112,72 @@ const nodeMetaKeys = [
const metaFieldFromPkg = (pkg, key) => {
const val = pkg[key]
- // get the license type, not an object
- return (key === 'license' && val && typeof val === 'object' && val.type)
- ? val.type
+ if (val) {
+ // get only the license type, not the full object
+ if (key === 'license' && typeof val === 'object' && val.type) {
+ return val.type
+ }
// skip empty objects and falsey values
- : (val && !(typeof val === 'object' && !Object.keys(val).length)) ? val
- : null
+ if (typeof val !== 'object' || Object.keys(val).length) {
+ return val
+ }
+ }
+ return null
}
-// check to make sure that there are no packages newer than the hidden lockfile
-const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => {
+// check to make sure that there are no packages newer than or missing from the hidden lockfile
+const assertNoNewer = async (path, data, lockTime, dir, seen) => {
const base = basename(dir)
const isNM = dir !== path && base === 'node_modules'
- const isScope = dir !== path && !isNM && base.charAt(0) === '@'
- const isParent = dir === path || isNM || isScope
+ const isScope = dir !== path && base.startsWith('@')
+ const isParent = (dir === path) || isNM || isScope
+ const parent = isParent ? dir : resolve(dir, 'node_modules')
const rel = relpath(path, dir)
- if (dir !== path) {
- const dirTime = (await stat(dir)).mtime
+ seen.add(rel)
+ let entries
+ if (dir === path) {
+ entries = [{ name: 'node_modules', isDirectory: () => true }]
+ } else {
+ const { mtime: dirTime } = await stat(dir)
if (dirTime > lockTime) {
- throw 'out of date, updated: ' + rel
+ throw new Error(`out of date, updated: ${rel}`)
}
if (!isScope && !isNM && !data.packages[rel]) {
- throw 'missing from lockfile: ' + rel
+ throw new Error(`missing from lockfile: ${rel}`)
}
- seen.add(rel)
- } else {
- seen = new Set([rel])
+ entries = await readdir(parent, { withFileTypes: true }).catch(() => [])
}
- const parent = isParent ? dir : resolve(dir, 'node_modules')
- const children = dir === path
- ? Promise.resolve([{ name: 'node_modules', isDirectory: () => true }])
- : readdir(parent, { withFileTypes: true })
-
- const ents = await children.catch(() => [])
- await Promise.all(ents.map(async ent => {
- const child = resolve(parent, ent.name)
- if (ent.isDirectory() && !/^\./.test(ent.name)) {
+ // TODO limit concurrency here, this is recursive
+ await Promise.all(entries.map(async dirent => {
+ const child = resolve(parent, dirent.name)
+ if (dirent.isDirectory() && !dirent.name.startsWith('.')) {
await assertNoNewer(path, data, lockTime, child, seen)
- } else if (ent.isSymbolicLink()) {
+ } else if (dirent.isSymbolicLink()) {
const target = resolve(parent, await readlink(child))
const tstat = await stat(target).catch(
/* istanbul ignore next - windows */ () => null)
seen.add(relpath(path, child))
/* istanbul ignore next - windows cannot do this */
- if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) {
+ if (tstat?.isDirectory() && !seen.has(relpath(path, target))) {
await assertNoNewer(path, data, lockTime, target, seen)
}
}
}))
+
if (dir !== path) {
return
}
// assert that all the entries in the lockfile were seen
- for (const loc of new Set(Object.keys(data.packages))) {
+ for (const loc in data.packages) {
if (!seen.has(loc)) {
- throw 'missing from node_modules: ' + loc
+ throw new Error(`missing from node_modules: ${loc}`)
}
}
}
-const _awaitingUpdate = Symbol('_awaitingUpdate')
-const _updateWaitingNode = Symbol('_updateWaitingNode')
-const _lockFromLoc = Symbol('_lockFromLoc')
-const _pathToLoc = Symbol('_pathToLoc')
-const _loadAll = Symbol('_loadAll')
-const _metaFromLock = Symbol('_metaFromLock')
-const _resolveMetaNode = Symbol('_resolveMetaNode')
-const _fixDependencies = Symbol('_fixDependencies')
-const _buildLegacyLockfile = Symbol('_buildLegacyLockfile')
-const _filenameSet = Symbol('_filenameSet')
-const _maybeRead = Symbol('_maybeRead')
-const _maybeStat = Symbol('_maybeStat')
-
class Shrinkwrap {
static get defaultLockfileVersion () {
return defaultLockfileVersion
@@ -228,13 +197,18 @@ class Shrinkwrap {
const s = new Shrinkwrap(options)
s.reset()
- const [sw, lock] = await s[_maybeStat]()
+ const [sw, lock] = await s.resetFiles
- s.filename = resolve(s.path,
- (s.hiddenLockfile ? 'node_modules/.package-lock'
- : s.shrinkwrapOnly || sw ? 'npm-shrinkwrap'
- : 'package-lock') + '.json')
+ // XXX this is duplicated in this.load(), but using loadFiles instead of resetFiles
+ if (s.hiddenLockfile) {
+ s.filename = resolve(s.path, 'node_modules/.package-lock.json')
+ } else if (s.shrinkwrapOnly || sw) {
+ s.filename = resolve(s.path, 'npm-shrinkwrap.json')
+ } else {
+ s.filename = resolve(s.path, 'package-lock.json')
+ }
s.loadedFromDisk = !!(sw || lock)
+ // TODO what uses this?
s.type = basename(s.filename)
return s
@@ -249,12 +223,12 @@ class Shrinkwrap {
}
const meta = {}
- pkgMetaKeys.forEach(key => {
+ for (const key of pkgMetaKeys) {
const val = metaFieldFromPkg(node.package, key)
if (val) {
meta[key.replace(/^_/, '')] = val
}
- })
+ }
// we only include name if different from the node path name, and for the
// root to help prevent churn based on the name of the directory the
// project is in
@@ -267,11 +241,11 @@ class Shrinkwrap {
meta.devDependencies = node.package.devDependencies
}
- nodeMetaKeys.forEach(key => {
+ for (const key of nodeMetaKeys) {
if (node[key]) {
meta[key] = node[key]
}
- })
+ }
const resolved = consistentResolve(node.resolved, node.path, path, true)
// hide resolved from registry dependencies.
@@ -302,6 +276,8 @@ class Shrinkwrap {
return meta
}
+ #awaitingUpdate = new Map()
+
constructor (options = {}) {
const {
path,
@@ -313,11 +289,14 @@ class Shrinkwrap {
resolveOptions = {},
} = options
- this.lockfileVersion = hiddenLockfile ? 3
- : lockfileVersion ? parseInt(lockfileVersion, 10)
- : null
+ if (hiddenLockfile) {
+ this.lockfileVersion = 3
+ } else if (lockfileVersion) {
+ this.lockfileVersion = parseInt(lockfileVersion, 10)
+ } else {
+ this.lockfileVersion = null
+ }
- this[_awaitingUpdate] = new Map()
this.tree = null
this.path = resolve(path || '.')
this.filename = null
@@ -354,9 +333,12 @@ class Shrinkwrap {
// don't use the simple version if the "registry" url is
// something else entirely!
const tgz = isReg && versionFromTgz(spec.name, resolved) || {}
- const yspec = tgz.name === spec.name && tgz.version === version ? version
- : isReg && tgz.name && tgz.version ? `npm:${tgz.name}@${tgz.version}`
- : resolved
+ let yspec = resolved
+ if (tgz.name === spec.name && tgz.version === version) {
+ yspec = version
+ } else if (isReg && tgz.name && tgz.version) {
+ yspec = `npm:${tgz.name}@${tgz.version}`
+ }
if (yspec) {
options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/')
options.integrity = integrity
@@ -370,7 +352,7 @@ class Shrinkwrap {
// still worth doing a load() first so we know which files to write.
reset () {
this.tree = null
- this[_awaitingUpdate] = new Map()
+ this.#awaitingUpdate = new Map()
const lockfileVersion = this.lockfileVersion || defaultLockfileVersion
this.originalLockfileVersion = lockfileVersion
@@ -382,58 +364,83 @@ class Shrinkwrap {
}
}
- [_filenameSet] () {
- return this.shrinkwrapOnly ? [
- this.path + '/npm-shrinkwrap.json',
- ] : this.hiddenLockfile ? [
- null,
- this.path + '/node_modules/.package-lock.json',
- ] : [
- this.path + '/npm-shrinkwrap.json',
- this.path + '/package-lock.json',
- this.path + '/yarn.lock',
+ // files to potentially read from and write to, in order of priority
+ get #filenameSet () {
+ if (this.shrinkwrapOnly) {
+ return [`${this.path}/npm-shrinkwrap.json`]
+ }
+ if (this.hiddenLockfile) {
+ return [`${this.path}/node_modules/.package-lock.json`]
+ }
+ return [
+ `${this.path}/npm-shrinkwrap.json`,
+ `${this.path}/package-lock.json`,
+ `${this.path}/yarn.lock`,
]
}
- [_maybeRead] () {
- return Promise.all(this[_filenameSet]().map(fn => fn && maybeReadFile(fn)))
+ get loadFiles () {
+ return Promise.all(
+ this.#filenameSet.map(file => file && readFile(file, 'utf8').then(d => d, er => {
+ /* istanbul ignore else - can't test without breaking module itself */
+ if (er.code === 'ENOENT') {
+ return ''
+ } else {
+ throw er
+ }
+ }))
+ )
}
- [_maybeStat] () {
- // throw away yarn, we only care about lock or shrinkwrap when checking
+ get resetFiles () {
+ // slice out yarn, we only care about lock or shrinkwrap when checking
// this way, since we're not actually loading the full lock metadata
- return Promise.all(this[_filenameSet]().slice(0, 2)
- .map(fn => fn && maybeStatFile(fn)))
+ return Promise.all(this.#filenameSet.slice(0, 2)
+ .map(file => file && stat(file).then(st => st.isFile(), er => {
+ /* istanbul ignore else - can't test without breaking module itself */
+ if (er.code === 'ENOENT') {
+ return null
+ } else {
+ throw er
+ }
+ })
+ )
+ )
}
inferFormattingOptions (packageJSONData) {
- // don't use detect-indent, just pick the first line.
- // if the file starts with {" then we have an indent of '', ie, none
- // which will default to 2 at save time.
const {
[Symbol.for('indent')]: indent,
[Symbol.for('newline')]: newline,
} = packageJSONData
- this.indent = indent !== undefined ? indent : this.indent
- this.newline = newline !== undefined ? newline : this.newline
+ if (indent !== undefined) {
+ this.indent = indent
+ }
+ if (newline !== undefined) {
+ this.newline = newline
+ }
}
async load () {
// we don't need to load package-lock.json except for top of tree nodes,
// only npm-shrinkwrap.json.
- return this[_maybeRead]().then(([sw, lock, yarn]) => {
- const data = sw || lock || ''
+ let data
+ try {
+ const [sw, lock, yarn] = await this.loadFiles
+ data = sw || lock || '{}'
// use shrinkwrap only for deps, otherwise prefer package-lock
// and ignore npm-shrinkwrap if both are present.
// TODO: emit a warning here or something if both are present.
- this.filename = resolve(this.path,
- (this.hiddenLockfile ? 'node_modules/.package-lock'
- : this.shrinkwrapOnly || sw ? 'npm-shrinkwrap'
- : 'package-lock') + '.json')
-
+ if (this.hiddenLockfile) {
+ this.filename = resolve(this.path, 'node_modules/.package-lock.json')
+ } else if (this.shrinkwrapOnly || sw) {
+ this.filename = resolve(this.path, 'npm-shrinkwrap.json')
+ } else {
+ this.filename = resolve(this.path, 'package-lock.json')
+ }
this.type = basename(this.filename)
- this.loadedFromDisk = !!data
+ this.loadedFromDisk = Boolean(sw || lock)
if (yarn) {
this.yarnLock = new YarnLock()
@@ -445,85 +452,84 @@ class Shrinkwrap {
}
}
- return data ? parseJSON(data) : {}
- }).then(async data => {
+ data = parseJSON(data)
this.inferFormattingOptions(data)
- if (!this.hiddenLockfile || !data.packages) {
- return data
+ if (this.hiddenLockfile && data.packages) {
+ // add a few ms just to account for jitter
+ const lockTime = +(await stat(this.filename)).mtime + 10
+ await assertNoNewer(this.path, data, lockTime, this.path, new Set())
}
- // add a few ms just to account for jitter
- const lockTime = +(await stat(this.filename)).mtime + 10
- await assertNoNewer(this.path, data, lockTime)
-
// all good! hidden lockfile is the newest thing in here.
- return data
- }).catch(er => {
+ } catch (er) {
/* istanbul ignore else */
if (typeof this.filename === 'string') {
const rel = relpath(this.path, this.filename)
- log.verbose('shrinkwrap', `failed to load ${rel}`, er)
+ log.verbose('shrinkwrap', `failed to load ${rel}`, er.message)
} else {
- log.verbose('shrinkwrap', `failed to load ${this.path}`, er)
+ log.verbose('shrinkwrap', `failed to load ${this.path}`, er.message)
}
this.loadingError = er
this.loadedFromDisk = false
this.ancientLockfile = false
- return {}
- }).then(lock => {
- // auto convert v1 lockfiles to v3
- // leave v2 in place unless configured
- // v3 by default
- const lockfileVersion =
- this.lockfileVersion ? this.lockfileVersion
- : lock.lockfileVersion === 1 ? defaultLockfileVersion
- : lock.lockfileVersion || defaultLockfileVersion
-
- this.data = {
- ...lock,
- lockfileVersion: lockfileVersion,
- requires: true,
- packages: lock.packages || {},
- dependencies: lock.dependencies || {},
- }
+ data = {}
+ }
+ // auto convert v1 lockfiles to v3
+ // leave v2 in place unless configured
+ // v3 by default
+ let lockfileVersion = defaultLockfileVersion
+ if (this.lockfileVersion) {
+ lockfileVersion = this.lockfileVersion
+ } else if (data.lockfileVersion && data.lockfileVersion !== 1) {
+ lockfileVersion = data.lockfileVersion
+ }
+
+ this.data = {
+ ...data,
+ lockfileVersion,
+ requires: true,
+ packages: data.packages || {},
+ dependencies: data.dependencies || {},
+ }
- this.originalLockfileVersion = lock.lockfileVersion
+ this.originalLockfileVersion = data.lockfileVersion
- // use default if it wasn't explicitly set, and the current file is
- // less than our default. otherwise, keep whatever is in the file,
- // unless we had an explicit setting already.
- if (!this.lockfileVersion) {
- this.lockfileVersion = this.data.lockfileVersion = lockfileVersion
- }
- this.ancientLockfile = this.loadedFromDisk &&
- !(lock.lockfileVersion >= 2) && !lock.requires
-
- // load old lockfile deps into the packages listing
- // eslint-disable-next-line promise/always-return
- if (lock.dependencies && !lock.packages) {
- return rpj(this.path + '/package.json').then(pkg => pkg, er => ({}))
- // eslint-disable-next-line promise/always-return
- .then(pkg => {
- this[_loadAll]('', null, this.data)
- this[_fixDependencies](pkg)
- })
+ // use default if it wasn't explicitly set, and the current file is
+ // less than our default. otherwise, keep whatever is in the file,
+ // unless we had an explicit setting already.
+ if (!this.lockfileVersion) {
+ this.lockfileVersion = this.data.lockfileVersion = lockfileVersion
+ }
+ this.ancientLockfile = this.loadedFromDisk &&
+ !(data.lockfileVersion >= 2) && !data.requires
+
+ // load old lockfile deps into the packages listing
+ if (data.dependencies && !data.packages) {
+ let pkg
+ try {
+ pkg = await pkgJson.normalize(this.path)
+ pkg = pkg.content
+ } catch {
+ pkg = {}
}
- })
- .then(() => this)
+ this.#loadAll('', null, this.data)
+ this.#fixDependencies(pkg)
+ }
+ return this
}
- [_loadAll] (location, name, lock) {
+ #loadAll (location, name, lock) {
// migrate a v1 package lock to the new format.
- const meta = this[_metaFromLock](location, name, lock)
+ const meta = this.#metaFromLock(location, name, lock)
// dependencies nested under a link are actually under the link target
if (meta.link) {
location = meta.resolved
}
if (lock.dependencies) {
- for (const [name, dep] of Object.entries(lock.dependencies)) {
+ for (const name in lock.dependencies) {
const loc = location + (location ? '/' : '') + 'node_modules/' + name
- this[_loadAll](loc, name, dep)
+ this.#loadAll(loc, name, lock.dependencies[name])
}
}
}
@@ -531,20 +537,20 @@ class Shrinkwrap {
// v1 lockfiles track the optional/dev flags, but they don't tell us
// which thing had what kind of dep on what other thing, so we need
// to correct that now, or every link will be considered prod
- [_fixDependencies] (pkg) {
+ #fixDependencies (pkg) {
// we need the root package.json because legacy shrinkwraps just
// have requires:true at the root level, which is even less useful
// than merging all dep types into one object.
const root = this.data.packages['']
- pkgMetaKeys.forEach(key => {
+ for (const key of pkgMetaKeys) {
const val = metaFieldFromPkg(pkg, key)
- const k = key.replace(/^_/, '')
if (val) {
- root[k] = val
+ root[key.replace(/^_/, '')] = val
}
- })
+ }
- for (const [loc, meta] of Object.entries(this.data.packages)) {
+ for (const loc in this.data.packages) {
+ const meta = this.data.packages[loc]
if (!meta.requires || !loc) {
continue
}
@@ -555,25 +561,30 @@ class Shrinkwrap {
// This isn't perfect, but it's a pretty good approximation, and at
// least gets us out of having all 'prod' edges, which throws off the
// buildIdealTree process
- for (const [name, spec] of Object.entries(meta.requires)) {
- const dep = this[_resolveMetaNode](loc, name)
+ for (const name in meta.requires) {
+ const dep = this.#resolveMetaNode(loc, name)
// this overwrites the false value set above
- const depType = dep && dep.optional && !meta.optional
- ? 'optionalDependencies'
- : /* istanbul ignore next - dev deps are only for the root level */
- dep && dep.dev && !meta.dev ? 'devDependencies'
- // also land here if the dep just isn't in the tree, which maybe
- // should be an error, since it means that the shrinkwrap is
- // invalid, but we can't do much better without any info.
- : 'dependencies'
- meta[depType] = meta[depType] || {}
- meta[depType][name] = spec
+ // default to dependencies if the dep just isn't in the tree, which
+ // maybe should be an error, since it means that the shrinkwrap is
+ // invalid, but we can't do much better without any info.
+ let depType = 'dependencies'
+ /* istanbul ignore else - dev deps are only for the root level */
+ if (dep?.optional && !meta.optional) {
+ depType = 'optionalDependencies'
+ } else if (dep?.dev && !meta.dev) {
+ // XXX is this even reachable?
+ depType = 'devDependencies'
+ }
+ if (!meta[depType]) {
+ meta[depType] = {}
+ }
+ meta[depType][name] = meta.requires[name]
}
delete meta.requires
}
}
- [_resolveMetaNode] (loc, name) {
+ #resolveMetaNode (loc, name) {
for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) {
const check = `${path}${path ? '/' : ''}node_modules/${name}`
if (this.data.packages[check]) {
@@ -587,7 +598,7 @@ class Shrinkwrap {
return null
}
- [_lockFromLoc] (lock, path, i = 0) {
+ #lockFromLoc (lock, path, i = 0) {
if (!lock) {
return null
}
@@ -604,12 +615,12 @@ class Shrinkwrap {
return null
}
- return this[_lockFromLoc](lock.dependencies[path[i]], path, i + 1)
+ return this.#lockFromLoc(lock.dependencies[path[i]], path, i + 1)
}
// pass in a path relative to the root path, or an absolute path,
// get back a /-normalized location based on root path.
- [_pathToLoc] (path) {
+ #pathToLoc (path) {
return relpath(this.path, resolve(this.path, path))
}
@@ -617,13 +628,13 @@ class Shrinkwrap {
if (!this.data) {
throw new Error('run load() before getting or setting data')
}
- const location = this[_pathToLoc](nodePath)
- this[_awaitingUpdate].delete(location)
+ const location = this.#pathToLoc(nodePath)
+ this.#awaitingUpdate.delete(location)
delete this.data.packages[location]
const path = location.split(/(?:^|\/)node_modules\//)
const name = path.pop()
- const pLock = this[_lockFromLoc](this.data, path)
+ const pLock = this.#lockFromLoc(this.data, path)
if (pLock && pLock.dependencies) {
delete pLock.dependencies[name]
}
@@ -634,9 +645,9 @@ class Shrinkwrap {
throw new Error('run load() before getting or setting data')
}
- const location = this[_pathToLoc](nodePath)
- if (this[_awaitingUpdate].has(location)) {
- this[_updateWaitingNode](location)
+ const location = this.#pathToLoc(nodePath)
+ if (this.#awaitingUpdate.has(location)) {
+ this.#updateWaitingNode(location)
}
// first try to get from the newer spot, which we know has
@@ -649,12 +660,12 @@ class Shrinkwrap {
// get the node in the shrinkwrap corresponding to this spot
const path = location.split(/(?:^|\/)node_modules\//)
const name = path[path.length - 1]
- const lock = this[_lockFromLoc](this.data, path)
+ const lock = this.#lockFromLoc(this.data, path)
- return this[_metaFromLock](location, name, lock)
+ return this.#metaFromLock(location, name, lock)
}
- [_metaFromLock] (location, name, lock) {
+ #metaFromLock (location, name, lock) {
// This function tries as hard as it can to figure out the metadata
// from a lockfile which may be outdated or incomplete. Since v1
// lockfiles used the "version" field to contain a variety of
@@ -679,7 +690,7 @@ class Shrinkwrap {
// also save the link target, omitting version since we don't know
// what it is, but we know it isn't a link to itself!
if (!this.data.packages[target]) {
- this[_metaFromLock](target, name, { ...lock, version: null })
+ this.#metaFromLock(target, name, { ...lock, version: null })
}
return this.data.packages[location]
}
@@ -799,10 +810,14 @@ class Shrinkwrap {
version,
} = this.get(node.path)
- const pathFixed = !resolved ? null
- : !/^file:/.test(resolved) ? resolved
- // resolve onto the metadata path
- : `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}`
+ let pathFixed = null
+ if (resolved) {
+ if (!/^file:/.test(resolved)) {
+ pathFixed = resolved
+ } else {
+ pathFixed = `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}`
+ }
+ }
// if we have one, only set the other if it matches
// otherwise it could be for a completely different thing.
@@ -831,7 +846,7 @@ class Shrinkwrap {
node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false
}
}
- this[_awaitingUpdate].set(loc, node)
+ this.#awaitingUpdate.set(loc, node)
}
addEdge (edge) {
@@ -852,10 +867,15 @@ class Shrinkwrap {
}
// we relativize the path here because that's how it shows up in the lock
- // XXX how is this different from pathFixed above??
- const pathFixed = !node.resolved ? null
- : !/file:/.test(node.resolved) ? node.resolved
- : consistentResolve(node.resolved, node.path, this.path, true)
+ // XXX why is this different from pathFixed in this.add??
+ let pathFixed = null
+ if (node.resolved) {
+ if (!/file:/.test(node.resolved)) {
+ pathFixed = node.resolved
+ } else {
+ pathFixed = consistentResolve(node.resolved, node.path, this.path, true)
+ }
+ }
const spec = npa(`${node.name}@${edge.spec}`)
const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`)
@@ -875,12 +895,12 @@ class Shrinkwrap {
node.resolved = node.resolved ||
consistentResolve(entry.resolved, this.path, node.path) || null
- this[_awaitingUpdate].set(relpath(this.path, node.path), node)
+ this.#awaitingUpdate.set(relpath(this.path, node.path), node)
}
- [_updateWaitingNode] (loc) {
- const node = this[_awaitingUpdate].get(loc)
- this[_awaitingUpdate].delete(loc)
+ #updateWaitingNode (loc) {
+ const node = this.#awaitingUpdate.get(loc)
+ this.#awaitingUpdate.delete(loc)
this.data.packages[loc] = Shrinkwrap.metaFromNode(
node,
this.path,
@@ -911,9 +931,9 @@ class Shrinkwrap {
this.path,
this.resolveOptions)
}
- } else if (this[_awaitingUpdate].size > 0) {
- for (const loc of this[_awaitingUpdate].keys()) {
- this[_updateWaitingNode](loc)
+ } else if (this.#awaitingUpdate.size > 0) {
+ for (const loc of this.#awaitingUpdate.keys()) {
+ this.#updateWaitingNode(loc)
}
}
@@ -928,7 +948,7 @@ class Shrinkwrap {
delete this.data.packages['']
delete this.data.dependencies
} else if (this.tree && this.lockfileVersion <= 3) {
- this[_buildLegacyLockfile](this.tree, this.data)
+ this.#buildLegacyLockfile(this.tree, this.data)
}
// lf version 1 = dependencies only
@@ -945,7 +965,7 @@ class Shrinkwrap {
}
}
- [_buildLegacyLockfile] (node, lock, path = []) {
+ #buildLegacyLockfile (node, lock, path = []) {
if (node === this.tree) {
// the root node
lock.name = node.packageName || node.name
@@ -966,9 +986,13 @@ class Shrinkwrap {
const aloc = a.from.location.split('node_modules')
const bloc = b.from.location.split('node_modules')
/* istanbul ignore next - sort calling order is indeterminate */
- return aloc.length > bloc.length ? 1
- : bloc.length > aloc.length ? -1
- : localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1])
+ if (aloc.length > bloc.length) {
+ return 1
+ }
+ if (bloc.length > aloc.length) {
+ return -1
+ }
+ return localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1])
})[0]
const res = consistentResolve(node.resolved, this.path, this.path, true)
@@ -979,8 +1003,10 @@ class Shrinkwrap {
// if we don't have either, just an empty object so nothing matches below.
// This will effectively just save the version and resolved, as if it's
// a standard version/range dep, which is a reasonable default.
- const spec = !edge ? rSpec
- : npa.resolve(node.name, edge.spec, edge.from.realpath)
+ let spec = rSpec
+ if (edge) {
+ spec = npa.resolve(node.name, edge.spec, edge.from.realpath)
+ }
if (node.isLink) {
lock.version = `file:${relpath(this.path, node.realpath).replace(/#/g, '%23')}`
@@ -1086,7 +1112,7 @@ class Shrinkwrap {
if (path.includes(kid.realpath)) {
continue
}
- dependencies[name] = this[_buildLegacyLockfile](kid, {}, kidPath)
+ dependencies[name] = this.#buildLegacyLockfile(kid, {}, kidPath)
found = true
}
if (found) {
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js
index 44b5484c68240c..62a50bc75bdb58 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js
@@ -90,7 +90,7 @@ const checkTree = (tree, checkUnreachable = true) => {
})
}
- if (node.path === tree.root.path && node !== tree.root) {
+ if (node.path === tree.root.path && node !== tree.root && !tree.root.isLink) {
throw Object.assign(new Error('node with same path as root'), {
node: node.path,
tree: tree.path,
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js
index 887d776f85d04e..d5693a3eff943a 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js
@@ -341,10 +341,10 @@ class YarnLock {
}
}
-const _specs = Symbol('_specs')
class YarnLockEntry {
+ #specs
constructor (specs) {
- this[_specs] = new Set(specs)
+ this.#specs = new Set(specs)
this.resolved = null
this.version = null
this.integrity = null
@@ -354,7 +354,7 @@ class YarnLockEntry {
toString () {
// sort objects to the bottom, then alphabetical
- return ([...this[_specs]]
+ return ([...this.#specs]
.sort(localeCompare)
.map(quoteIfNeeded).join(', ') +
':\n' +
@@ -370,7 +370,7 @@ class YarnLockEntry {
}
addSpec (spec) {
- this[_specs].add(spec)
+ this.#specs.add(spec)
}
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index a4d47d5627031c..1ba9c92e3fdc01 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/arborist",
- "version": "7.2.1",
+ "version": "7.3.0",
"description": "Manage node_modules trees",
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
@@ -39,7 +39,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.19.0",
+ "@npmcli/template-oss": "4.21.3",
"benchmark": "^2.1.4",
"minify-registry-metadata": "^3.0.0",
"nock": "^13.3.3",
@@ -49,11 +49,11 @@
},
"scripts": {
"test": "tap",
- "posttest": "node ../.. run lint",
+ "posttest": "npm run lint",
"snap": "tap",
"test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot",
- "lint": "eslint \"**/*.js\"",
- "lintfix": "node ../.. run lint -- --fix",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+ "lintfix": "npm run lint -- --fix",
"benchmark": "node scripts/benchmark.js",
"benchclean": "rm -rf scripts/benchmark/*/",
"postlint": "template-oss-check",
@@ -90,7 +90,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.19.0",
+ "version": "4.21.3",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
index c5b8d4f779b92b..6f8760fce1d3e7 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
@@ -494,6 +494,16 @@ define('os', {
flatten,
})
+define('libc', {
+ default: null,
+ type: [null, String],
+ description: `
+ Override libc of native modules to install.
+ Acceptable values are same as \`libc\` field of package.json
+ `,
+ flatten,
+})
+
define('depth', {
default: null,
defaultDescription: `
@@ -1234,7 +1244,7 @@ define('sbom-type', {
],
description: `
The type of package described by the generated SBOM. For SPDX, this is the
- value for the \`primaryPackagePurpose\` fieled. For CycloneDX, this is the
+ value for the \`primaryPackagePurpose\` field. For CycloneDX, this is the
value for the \`type\` field.
`,
flatten,
diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json
index c4eabca7d1b8c1..80eb210b19e166 100644
--- a/deps/npm/node_modules/@npmcli/config/package.json
+++ b/deps/npm/node_modules/@npmcli/config/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/config",
- "version": "8.0.2",
+ "version": "8.1.0",
"files": [
"bin/",
"lib/"
@@ -17,7 +17,7 @@
"scripts": {
"test": "tap",
"snap": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
@@ -32,7 +32,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-globals": "^1.0.0",
- "@npmcli/template-oss": "4.19.0",
+ "@npmcli/template-oss": "4.21.3",
"tap": "^16.3.8"
},
"dependencies": {
@@ -50,8 +50,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.19.0",
- "content": "../../scripts/template-oss/index.js",
- "npm": "npm"
+ "version": "4.21.3",
+ "content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/@npmcli/git/lib/spawn.js b/deps/npm/node_modules/@npmcli/git/lib/spawn.js
index 7098d7b8729427..5e96eb5542b5a6 100644
--- a/deps/npm/node_modules/@npmcli/git/lib/spawn.js
+++ b/deps/npm/node_modules/@npmcli/git/lib/spawn.js
@@ -2,10 +2,10 @@ const spawn = require('@npmcli/promise-spawn')
const promiseRetry = require('promise-retry')
const log = require('proc-log')
const makeError = require('./make-error.js')
-const whichGit = require('./which.js')
const makeOpts = require('./opts.js')
module.exports = (gitArgs, opts = {}) => {
+ const whichGit = require('./which.js')
const gitPath = whichGit(opts)
if (gitPath instanceof Error) {
diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json
index 6ab037d841cc34..485c1f43dddb90 100644
--- a/deps/npm/node_modules/@npmcli/git/package.json
+++ b/deps/npm/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/git",
- "version": "5.0.3",
+ "version": "5.0.4",
"main": "lib/index.js",
"files": [
"bin/",
@@ -14,7 +14,7 @@
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"snap": "tap",
"test": "tap",
"posttest": "npm run lint",
@@ -31,7 +31,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.18.0",
+ "@npmcli/template-oss": "4.21.3",
"npm-package-arg": "^11.0.0",
"slash": "^3.0.0",
"tap": "^16.0.1"
@@ -51,13 +51,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.18.0",
- "publish": true,
- "ciVersions": [
- "16.14.0",
- "16.x",
- "18.0.0",
- "18.x"
- ]
+ "version": "4.21.3",
+ "publish": true
}
}
diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
index 571ff6b9169c9b..b31395ebb5bcd4 100644
--- a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -100,8 +100,8 @@ const spawnWithShell = (cmd, args, opts, extra) => {
let pathToInitial
try {
pathToInitial = which.sync(initialCmd, {
- path: (options.env && options.env.PATH) || process.env.PATH,
- pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
+ path: (options.env && findInObject(options.env, 'PATH')) || process.env.PATH,
+ pathext: (options.env && findInObject(options.env, 'PATHEXT')) || process.env.PATHEXT,
}).toLowerCase()
} catch (err) {
pathToInitial = initialCmd.toLowerCase()
@@ -192,4 +192,14 @@ const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
return result
}
+// case insensitive lookup in an object
+const findInObject = (obj, key) => {
+ key = key.toLowerCase()
+ for (const objKey of Object.keys(obj).sort()) {
+ if (objKey.toLowerCase() === key) {
+ return obj[objKey]
+ }
+ }
+}
+
module.exports = promiseSpawn
diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
index ffd89f1083341c..6e161b7404b858 100644
--- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json
+++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/promise-spawn",
- "version": "7.0.0",
+ "version": "7.0.1",
"files": [
"bin/",
"lib/"
@@ -16,7 +16,7 @@
"scripts": {
"test": "tap",
"snap": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
@@ -32,7 +32,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.18.0",
+ "@npmcli/template-oss": "4.21.3",
"spawk": "^1.7.1",
"tap": "^16.0.1"
},
@@ -41,13 +41,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "ciVersions": [
- "16.14.0",
- "16.x",
- "18.0.0",
- "18.x"
- ],
- "version": "4.18.0",
+ "version": "4.21.3",
"publish": true
},
"dependencies": {
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js b/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js
index efc00b488063ff..a099a4af2b9be3 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js
@@ -1,9 +1,6 @@
const runningProcs = new Set()
let handlersInstalled = false
-// NOTE: these signals aren't actually forwarded anywhere. they're trapped and
-// ignored until all child processes have exited. in our next breaking change
-// we should rename this
const forwardedSignals = [
'SIGINT',
'SIGTERM',
@@ -12,8 +9,12 @@ const forwardedSignals = [
// no-op, this is so receiving the signal doesn't cause us to exit immediately
// instead, we exit after all children have exited when we re-send the signal
// to ourselves. see the catch handler at the bottom of run-script-pkg.js
-// istanbul ignore next - this function does nothing
-const handleSignal = () => {}
+const handleSignal = signal => {
+ for (const proc of runningProcs) {
+ proc.kill(signal)
+ }
+}
+
const setupListeners = () => {
for (const signal of forwardedSignals) {
process.on(signal, handleSignal)
diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json
index 21f00c7f1cbfbb..c090e52cf11278 100644
--- a/deps/npm/node_modules/@npmcli/run-script/package.json
+++ b/deps/npm/node_modules/@npmcli/run-script/package.json
@@ -1,13 +1,13 @@
{
"name": "@npmcli/run-script",
- "version": "7.0.2",
+ "version": "7.0.3",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
"test": "tap",
"eslint": "eslint",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"postlint": "template-oss-check",
"snap": "tap",
@@ -16,7 +16,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.19.0",
+ "@npmcli/template-oss": "4.21.3",
"require-inject": "^1.4.4",
"tap": "^16.0.1"
},
@@ -41,7 +41,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.19.0",
+ "version": "4.21.3",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/abort-controller/LICENSE b/deps/npm/node_modules/abort-controller/LICENSE
deleted file mode 100644
index c914149a6f845c..00000000000000
--- a/deps/npm/node_modules/abort-controller/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2017 Toru Nagashima
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/abort-controller/browser.js b/deps/npm/node_modules/abort-controller/browser.js
deleted file mode 100644
index b0c5ec37d9b76c..00000000000000
--- a/deps/npm/node_modules/abort-controller/browser.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/*globals self, window */
-"use strict"
-
-/*eslint-disable @mysticatea/prettier */
-const { AbortController, AbortSignal } =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-module.exports = AbortController
-module.exports.AbortSignal = AbortSignal
-module.exports.default = AbortController
diff --git a/deps/npm/node_modules/abort-controller/browser.mjs b/deps/npm/node_modules/abort-controller/browser.mjs
deleted file mode 100644
index a8f321afed6755..00000000000000
--- a/deps/npm/node_modules/abort-controller/browser.mjs
+++ /dev/null
@@ -1,11 +0,0 @@
-/*globals self, window */
-
-/*eslint-disable @mysticatea/prettier */
-const { AbortController, AbortSignal } =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-export default AbortController
-export { AbortController, AbortSignal }
diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.js b/deps/npm/node_modules/abort-controller/dist/abort-controller.js
deleted file mode 100644
index 49af73955859f7..00000000000000
--- a/deps/npm/node_modules/abort-controller/dist/abort-controller.js
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */
-'use strict';
-
-Object.defineProperty(exports, '__esModule', { value: true });
-
-var eventTargetShim = require('event-target-shim');
-
-/**
- * The signal class.
- * @see https://dom.spec.whatwg.org/#abortsignal
- */
-class AbortSignal extends eventTargetShim.EventTarget {
- /**
- * AbortSignal cannot be constructed directly.
- */
- constructor() {
- super();
- throw new TypeError("AbortSignal cannot be constructed directly");
- }
- /**
- * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.
- */
- get aborted() {
- const aborted = abortedFlags.get(this);
- if (typeof aborted !== "boolean") {
- throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`);
- }
- return aborted;
- }
-}
-eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort");
-/**
- * Create an AbortSignal object.
- */
-function createAbortSignal() {
- const signal = Object.create(AbortSignal.prototype);
- eventTargetShim.EventTarget.call(signal);
- abortedFlags.set(signal, false);
- return signal;
-}
-/**
- * Abort a given signal.
- */
-function abortSignal(signal) {
- if (abortedFlags.get(signal) !== false) {
- return;
- }
- abortedFlags.set(signal, true);
- signal.dispatchEvent({ type: "abort" });
-}
-/**
- * Aborted flag for each instances.
- */
-const abortedFlags = new WeakMap();
-// Properties should be enumerable.
-Object.defineProperties(AbortSignal.prototype, {
- aborted: { enumerable: true },
-});
-// `toString()` should return `"[object AbortSignal]"`
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortSignal",
- });
-}
-
-/**
- * The AbortController.
- * @see https://dom.spec.whatwg.org/#abortcontroller
- */
-class AbortController {
- /**
- * Initialize this controller.
- */
- constructor() {
- signals.set(this, createAbortSignal());
- }
- /**
- * Returns the `AbortSignal` object associated with this object.
- */
- get signal() {
- return getSignal(this);
- }
- /**
- * Abort and signal to any observers that the associated activity is to be aborted.
- */
- abort() {
- abortSignal(getSignal(this));
- }
-}
-/**
- * Associated signals.
- */
-const signals = new WeakMap();
-/**
- * Get the associated signal of a given controller.
- */
-function getSignal(controller) {
- const signal = signals.get(controller);
- if (signal == null) {
- throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`);
- }
- return signal;
-}
-// Properties should be enumerable.
-Object.defineProperties(AbortController.prototype, {
- signal: { enumerable: true },
- abort: { enumerable: true },
-});
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortController",
- });
-}
-
-exports.AbortController = AbortController;
-exports.AbortSignal = AbortSignal;
-exports.default = AbortController;
-
-module.exports = AbortController
-module.exports.AbortController = module.exports["default"] = AbortController
-module.exports.AbortSignal = AbortSignal
-//# sourceMappingURL=abort-controller.js.map
diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs b/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs
deleted file mode 100644
index 88ba22d5574edc..00000000000000
--- a/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */
-import { EventTarget, defineEventAttribute } from 'event-target-shim';
-
-/**
- * The signal class.
- * @see https://dom.spec.whatwg.org/#abortsignal
- */
-class AbortSignal extends EventTarget {
- /**
- * AbortSignal cannot be constructed directly.
- */
- constructor() {
- super();
- throw new TypeError("AbortSignal cannot be constructed directly");
- }
- /**
- * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.
- */
- get aborted() {
- const aborted = abortedFlags.get(this);
- if (typeof aborted !== "boolean") {
- throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`);
- }
- return aborted;
- }
-}
-defineEventAttribute(AbortSignal.prototype, "abort");
-/**
- * Create an AbortSignal object.
- */
-function createAbortSignal() {
- const signal = Object.create(AbortSignal.prototype);
- EventTarget.call(signal);
- abortedFlags.set(signal, false);
- return signal;
-}
-/**
- * Abort a given signal.
- */
-function abortSignal(signal) {
- if (abortedFlags.get(signal) !== false) {
- return;
- }
- abortedFlags.set(signal, true);
- signal.dispatchEvent({ type: "abort" });
-}
-/**
- * Aborted flag for each instances.
- */
-const abortedFlags = new WeakMap();
-// Properties should be enumerable.
-Object.defineProperties(AbortSignal.prototype, {
- aborted: { enumerable: true },
-});
-// `toString()` should return `"[object AbortSignal]"`
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortSignal",
- });
-}
-
-/**
- * The AbortController.
- * @see https://dom.spec.whatwg.org/#abortcontroller
- */
-class AbortController {
- /**
- * Initialize this controller.
- */
- constructor() {
- signals.set(this, createAbortSignal());
- }
- /**
- * Returns the `AbortSignal` object associated with this object.
- */
- get signal() {
- return getSignal(this);
- }
- /**
- * Abort and signal to any observers that the associated activity is to be aborted.
- */
- abort() {
- abortSignal(getSignal(this));
- }
-}
-/**
- * Associated signals.
- */
-const signals = new WeakMap();
-/**
- * Get the associated signal of a given controller.
- */
-function getSignal(controller) {
- const signal = signals.get(controller);
- if (signal == null) {
- throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`);
- }
- return signal;
-}
-// Properties should be enumerable.
-Object.defineProperties(AbortController.prototype, {
- signal: { enumerable: true },
- abort: { enumerable: true },
-});
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortController",
- });
-}
-
-export default AbortController;
-export { AbortController, AbortSignal };
-//# sourceMappingURL=abort-controller.mjs.map
diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js b/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js
deleted file mode 100644
index f643cfd6b67110..00000000000000
--- a/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js
+++ /dev/null
@@ -1,5 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d=6.5"
- },
- "dependencies": {
- "event-target-shim": "^5.0.0"
- },
- "browser": "./browser.js",
- "devDependencies": {
- "@babel/core": "^7.2.2",
- "@babel/plugin-transform-modules-commonjs": "^7.2.0",
- "@babel/preset-env": "^7.3.0",
- "@babel/register": "^7.0.0",
- "@mysticatea/eslint-plugin": "^8.0.1",
- "@mysticatea/spy": "^0.1.2",
- "@types/mocha": "^5.2.5",
- "@types/node": "^10.12.18",
- "assert": "^1.4.1",
- "codecov": "^3.1.0",
- "dts-bundle-generator": "^2.0.0",
- "eslint": "^5.12.1",
- "karma": "^3.1.4",
- "karma-chrome-launcher": "^2.2.0",
- "karma-coverage": "^1.1.2",
- "karma-firefox-launcher": "^1.1.0",
- "karma-growl-reporter": "^1.0.0",
- "karma-ie-launcher": "^1.0.0",
- "karma-mocha": "^1.3.0",
- "karma-rollup-preprocessor": "^7.0.0-rc.2",
- "mocha": "^5.2.0",
- "npm-run-all": "^4.1.5",
- "nyc": "^13.1.0",
- "opener": "^1.5.1",
- "rimraf": "^2.6.3",
- "rollup": "^1.1.2",
- "rollup-plugin-babel": "^4.3.2",
- "rollup-plugin-babel-minify": "^7.0.0",
- "rollup-plugin-commonjs": "^9.2.0",
- "rollup-plugin-node-resolve": "^4.0.0",
- "rollup-plugin-sourcemaps": "^0.4.2",
- "rollup-plugin-typescript": "^1.0.0",
- "rollup-watch": "^4.3.1",
- "ts-node": "^8.0.1",
- "type-tester": "^1.0.0",
- "typescript": "^3.2.4"
- },
- "scripts": {
- "preversion": "npm test",
- "version": "npm run -s build && git add dist/*",
- "postversion": "git push && git push --tags",
- "clean": "rimraf .nyc_output coverage",
- "coverage": "opener coverage/lcov-report/index.html",
- "lint": "eslint . --ext .ts",
- "build": "run-s -s build:*",
- "build:rollup": "rollup -c",
- "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts",
- "test": "run-s -s lint test:*",
- "test:mocha": "nyc mocha test/*.ts",
- "test:karma": "karma start --single-run",
- "watch": "run-p -s watch:*",
- "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl",
- "watch:karma": "karma start --watch",
- "codecov": "codecov"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mysticatea/abort-controller.git"
- },
- "keywords": [
- "w3c",
- "whatwg",
- "event",
- "events",
- "abort",
- "cancel",
- "abortcontroller",
- "abortsignal",
- "controller",
- "signal",
- "shim"
- ],
- "author": "Toru Nagashima (https://github.com/mysticatea)",
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/mysticatea/abort-controller/issues"
- },
- "homepage": "https://github.com/mysticatea/abort-controller#readme"
-}
diff --git a/deps/npm/node_modules/abort-controller/polyfill.js b/deps/npm/node_modules/abort-controller/polyfill.js
deleted file mode 100644
index 3ca892330b1e51..00000000000000
--- a/deps/npm/node_modules/abort-controller/polyfill.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*globals require, self, window */
-"use strict"
-
-const ac = require("./dist/abort-controller")
-
-/*eslint-disable @mysticatea/prettier */
-const g =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- typeof global !== "undefined" ? global :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-if (g) {
- if (typeof g.AbortController === "undefined") {
- g.AbortController = ac.AbortController
- }
- if (typeof g.AbortSignal === "undefined") {
- g.AbortSignal = ac.AbortSignal
- }
-}
diff --git a/deps/npm/node_modules/abort-controller/polyfill.mjs b/deps/npm/node_modules/abort-controller/polyfill.mjs
deleted file mode 100644
index 0602a64dddfd2f..00000000000000
--- a/deps/npm/node_modules/abort-controller/polyfill.mjs
+++ /dev/null
@@ -1,19 +0,0 @@
-/*globals self, window */
-import * as ac from "./dist/abort-controller"
-
-/*eslint-disable @mysticatea/prettier */
-const g =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- typeof global !== "undefined" ? global :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-if (g) {
- if (typeof g.AbortController === "undefined") {
- g.AbortController = ac.AbortController
- }
- if (typeof g.AbortSignal === "undefined") {
- g.AbortSignal = ac.AbortSignal
- }
-}
diff --git a/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js
index 4b111b6bae8a81..75e44df309150f 100644
--- a/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js
+++ b/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js
@@ -1,6 +1,5 @@
'use strict'
-const stream = require('readable-stream')
-const delegate = require('delegates')
+const stream = require('stream')
const Tracker = require('./tracker.js')
class TrackerStream extends stream.Transform {
@@ -9,7 +8,11 @@ class TrackerStream extends stream.Transform {
this.tracker = new Tracker(name, size)
this.name = name
this.id = this.tracker.id
- this.tracker.on('change', delegateChange(this))
+ this.tracker.on('change', this.trackerChange.bind(this))
+ }
+
+ trackerChange (name, completion) {
+ this.emit('change', name, completion, this)
}
_transform (data, encoding, cb) {
@@ -22,17 +25,18 @@ class TrackerStream extends stream.Transform {
this.tracker.finish()
cb()
}
-}
-function delegateChange (trackerStream) {
- return function (name, completion, tracker) {
- trackerStream.emit('change', name, completion, trackerStream)
+ completed () {
+ return this.tracker.completed()
}
-}
-delegate(TrackerStream.prototype, 'tracker')
- .method('completed')
- .method('addWork')
- .method('finish')
+ addWork (work) {
+ return this.tracker.addWork(work)
+ }
+
+ finish () {
+ return this.tracker.finish()
+ }
+}
module.exports = TrackerStream
diff --git a/deps/npm/node_modules/are-we-there-yet/package.json b/deps/npm/node_modules/are-we-there-yet/package.json
index e238c6581df667..f072a21abb444b 100644
--- a/deps/npm/node_modules/are-we-there-yet/package.json
+++ b/deps/npm/node_modules/are-we-there-yet/package.json
@@ -1,11 +1,11 @@
{
"name": "are-we-there-yet",
- "version": "4.0.1",
+ "version": "4.0.2",
"description": "Keep track of the overall completion of many disparate processes",
"main": "lib/index.js",
"scripts": {
"test": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
@@ -25,13 +25,9 @@
"homepage": "https://github.com/npm/are-we-there-yet",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.17.0",
+ "@npmcli/template-oss": "4.21.3",
"tap": "^16.0.1"
},
- "dependencies": {
- "delegates": "^1.0.0",
- "readable-stream": "^4.1.0"
- },
"files": [
"bin/",
"lib/"
@@ -51,7 +47,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.17.0",
+ "version": "4.21.3",
"publish": true
}
}
diff --git a/deps/npm/node_modules/base64-js/LICENSE b/deps/npm/node_modules/base64-js/LICENSE
deleted file mode 100644
index 6d52b8acfbe771..00000000000000
--- a/deps/npm/node_modules/base64-js/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Jameson Little
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/base64-js/base64js.min.js b/deps/npm/node_modules/base64-js/base64js.min.js
deleted file mode 100644
index 908ac83fd12400..00000000000000
--- a/deps/npm/node_modules/base64-js/base64js.min.js
+++ /dev/null
@@ -1 +0,0 @@
-(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o
0) {
- throw new Error('Invalid string. Length must be a multiple of 4')
- }
-
- // Trim off extra bytes after placeholder bytes are found
- // See: https://github.com/beatgammit/base64-js/issues/42
- var validLen = b64.indexOf('=')
- if (validLen === -1) validLen = len
-
- var placeHoldersLen = validLen === len
- ? 0
- : 4 - (validLen % 4)
-
- return [validLen, placeHoldersLen]
-}
-
-// base64 is 4/3 + up to two characters of the original data
-function byteLength (b64) {
- var lens = getLens(b64)
- var validLen = lens[0]
- var placeHoldersLen = lens[1]
- return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
-}
-
-function _byteLength (b64, validLen, placeHoldersLen) {
- return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
-}
-
-function toByteArray (b64) {
- var tmp
- var lens = getLens(b64)
- var validLen = lens[0]
- var placeHoldersLen = lens[1]
-
- var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
-
- var curByte = 0
-
- // if there are placeholders, only get up to the last complete 4 chars
- var len = placeHoldersLen > 0
- ? validLen - 4
- : validLen
-
- var i
- for (i = 0; i < len; i += 4) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 18) |
- (revLookup[b64.charCodeAt(i + 1)] << 12) |
- (revLookup[b64.charCodeAt(i + 2)] << 6) |
- revLookup[b64.charCodeAt(i + 3)]
- arr[curByte++] = (tmp >> 16) & 0xFF
- arr[curByte++] = (tmp >> 8) & 0xFF
- arr[curByte++] = tmp & 0xFF
- }
-
- if (placeHoldersLen === 2) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 2) |
- (revLookup[b64.charCodeAt(i + 1)] >> 4)
- arr[curByte++] = tmp & 0xFF
- }
-
- if (placeHoldersLen === 1) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 10) |
- (revLookup[b64.charCodeAt(i + 1)] << 4) |
- (revLookup[b64.charCodeAt(i + 2)] >> 2)
- arr[curByte++] = (tmp >> 8) & 0xFF
- arr[curByte++] = tmp & 0xFF
- }
-
- return arr
-}
-
-function tripletToBase64 (num) {
- return lookup[num >> 18 & 0x3F] +
- lookup[num >> 12 & 0x3F] +
- lookup[num >> 6 & 0x3F] +
- lookup[num & 0x3F]
-}
-
-function encodeChunk (uint8, start, end) {
- var tmp
- var output = []
- for (var i = start; i < end; i += 3) {
- tmp =
- ((uint8[i] << 16) & 0xFF0000) +
- ((uint8[i + 1] << 8) & 0xFF00) +
- (uint8[i + 2] & 0xFF)
- output.push(tripletToBase64(tmp))
- }
- return output.join('')
-}
-
-function fromByteArray (uint8) {
- var tmp
- var len = uint8.length
- var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
- var parts = []
- var maxChunkLength = 16383 // must be multiple of 3
-
- // go through the array every three bytes, we'll deal with trailing stuff later
- for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
- parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
- }
-
- // pad the end with zeros, but make sure to not forget the extra bytes
- if (extraBytes === 1) {
- tmp = uint8[len - 1]
- parts.push(
- lookup[tmp >> 2] +
- lookup[(tmp << 4) & 0x3F] +
- '=='
- )
- } else if (extraBytes === 2) {
- tmp = (uint8[len - 2] << 8) + uint8[len - 1]
- parts.push(
- lookup[tmp >> 10] +
- lookup[(tmp >> 4) & 0x3F] +
- lookup[(tmp << 2) & 0x3F] +
- '='
- )
- }
-
- return parts.join('')
-}
diff --git a/deps/npm/node_modules/base64-js/package.json b/deps/npm/node_modules/base64-js/package.json
deleted file mode 100644
index c3972e39f2be5d..00000000000000
--- a/deps/npm/node_modules/base64-js/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "name": "base64-js",
- "description": "Base64 encoding/decoding in pure JS",
- "version": "1.5.1",
- "author": "T. Jameson Little ",
- "typings": "index.d.ts",
- "bugs": {
- "url": "https://github.com/beatgammit/base64-js/issues"
- },
- "devDependencies": {
- "babel-minify": "^0.5.1",
- "benchmark": "^2.1.4",
- "browserify": "^16.3.0",
- "standard": "*",
- "tape": "4.x"
- },
- "homepage": "https://github.com/beatgammit/base64-js",
- "keywords": [
- "base64"
- ],
- "license": "MIT",
- "main": "index.js",
- "repository": {
- "type": "git",
- "url": "git://github.com/beatgammit/base64-js.git"
- },
- "scripts": {
- "build": "browserify -s base64js -r ./ | minify > base64js.min.js",
- "lint": "standard",
- "test": "npm run lint && npm run unit",
- "unit": "tape test/*.js"
- },
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
-}
diff --git a/deps/npm/node_modules/buffer/AUTHORS.md b/deps/npm/node_modules/buffer/AUTHORS.md
deleted file mode 100644
index 468aa1908c3796..00000000000000
--- a/deps/npm/node_modules/buffer/AUTHORS.md
+++ /dev/null
@@ -1,73 +0,0 @@
-# Authors
-
-#### Ordered by first contribution.
-
-- Romain Beauxis (toots@rastageeks.org)
-- Tobias Koppers (tobias.koppers@googlemail.com)
-- Janus (ysangkok@gmail.com)
-- Rainer Dreyer (rdrey1@gmail.com)
-- Tõnis Tiigi (tonistiigi@gmail.com)
-- James Halliday (mail@substack.net)
-- Michael Williamson (mike@zwobble.org)
-- elliottcable (github@elliottcable.name)
-- rafael (rvalle@livelens.net)
-- Andrew Kelley (superjoe30@gmail.com)
-- Andreas Madsen (amwebdk@gmail.com)
-- Mike Brevoort (mike.brevoort@pearson.com)
-- Brian White (mscdex@mscdex.net)
-- Feross Aboukhadijeh (feross@feross.org)
-- Ruben Verborgh (ruben@verborgh.org)
-- eliang (eliang.cs@gmail.com)
-- Jesse Tane (jesse.tane@gmail.com)
-- Alfonso Boza (alfonso@cloud.com)
-- Mathias Buus (mathiasbuus@gmail.com)
-- Devon Govett (devongovett@gmail.com)
-- Daniel Cousens (github@dcousens.com)
-- Joseph Dykstra (josephdykstra@gmail.com)
-- Parsha Pourkhomami (parshap+git@gmail.com)
-- Damjan Košir (damjan.kosir@gmail.com)
-- daverayment (dave.rayment@gmail.com)
-- kawanet (u-suke@kawa.net)
-- Linus Unnebäck (linus@folkdatorn.se)
-- Nolan Lawson (nolan.lawson@gmail.com)
-- Calvin Metcalf (calvin.metcalf@gmail.com)
-- Koki Takahashi (hakatasiloving@gmail.com)
-- Guy Bedford (guybedford@gmail.com)
-- Jan Schär (jscissr@gmail.com)
-- RaulTsc (tomescu.raul@gmail.com)
-- Matthieu Monsch (monsch@alum.mit.edu)
-- Dan Ehrenberg (littledan@chromium.org)
-- Kirill Fomichev (fanatid@ya.ru)
-- Yusuke Kawasaki (u-suke@kawa.net)
-- DC (dcposch@dcpos.ch)
-- John-David Dalton (john.david.dalton@gmail.com)
-- adventure-yunfei (adventure030@gmail.com)
-- Emil Bay (github@tixz.dk)
-- Sam Sudar (sudar.sam@gmail.com)
-- Volker Mische (volker.mische@gmail.com)
-- David Walton (support@geekstocks.com)
-- Сковорода Никита Андреевич (chalkerx@gmail.com)
-- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com)
-- ukstv (sergey.ukustov@machinomy.com)
-- Renée Kooi (renee@kooi.me)
-- ranbochen (ranbochen@qq.com)
-- Vladimir Borovik (bobahbdb@gmail.com)
-- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com)
-- kumavis (aaron@kumavis.me)
-- Sergey Ukustov (sergey.ukustov@machinomy.com)
-- Fei Liu (liu.feiwood@gmail.com)
-- Blaine Bublitz (blaine.bublitz@gmail.com)
-- clement (clement@seald.io)
-- Koushik Dutta (koushd@gmail.com)
-- Jordan Harband (ljharb@gmail.com)
-- Niklas Mischkulnig (mischnic@users.noreply.github.com)
-- Nikolai Vavilov (vvnicholas@gmail.com)
-- Fedor Nezhivoi (gyzerok@users.noreply.github.com)
-- shuse2 (shus.toda@gmail.com)
-- Peter Newman (peternewman@users.noreply.github.com)
-- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com)
-- jkkang (jkkang@smartauth.kr)
-- Deklan Webster (deklanw@gmail.com)
-- Martin Heidegger (martin.heidegger@gmail.com)
-
-#### Generated by bin/update-authors.sh.
diff --git a/deps/npm/node_modules/buffer/LICENSE b/deps/npm/node_modules/buffer/LICENSE
deleted file mode 100644
index d6bf75dcf1f6f7..00000000000000
--- a/deps/npm/node_modules/buffer/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Feross Aboukhadijeh, and other contributors.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/buffer/index.js b/deps/npm/node_modules/buffer/index.js
deleted file mode 100644
index 7a0e9c2a123bc9..00000000000000
--- a/deps/npm/node_modules/buffer/index.js
+++ /dev/null
@@ -1,2106 +0,0 @@
-/*!
- * The buffer module from node.js, for the browser.
- *
- * @author Feross Aboukhadijeh
- * @license MIT
- */
-/* eslint-disable no-proto */
-
-'use strict'
-
-const base64 = require('base64-js')
-const ieee754 = require('ieee754')
-const customInspectSymbol =
- (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation
- ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation
- : null
-
-exports.Buffer = Buffer
-exports.SlowBuffer = SlowBuffer
-exports.INSPECT_MAX_BYTES = 50
-
-const K_MAX_LENGTH = 0x7fffffff
-exports.kMaxLength = K_MAX_LENGTH
-
-/**
- * If `Buffer.TYPED_ARRAY_SUPPORT`:
- * === true Use Uint8Array implementation (fastest)
- * === false Print warning and recommend using `buffer` v4.x which has an Object
- * implementation (most compatible, even IE6)
- *
- * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
- * Opera 11.6+, iOS 4.2+.
- *
- * We report that the browser does not support typed arrays if the are not subclassable
- * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`
- * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
- * for __proto__ and has a buggy typed array implementation.
- */
-Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport()
-
-if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&
- typeof console.error === 'function') {
- console.error(
- 'This browser lacks typed array (Uint8Array) support which is required by ' +
- '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'
- )
-}
-
-function typedArraySupport () {
- // Can typed array instances can be augmented?
- try {
- const arr = new Uint8Array(1)
- const proto = { foo: function () { return 42 } }
- Object.setPrototypeOf(proto, Uint8Array.prototype)
- Object.setPrototypeOf(arr, proto)
- return arr.foo() === 42
- } catch (e) {
- return false
- }
-}
-
-Object.defineProperty(Buffer.prototype, 'parent', {
- enumerable: true,
- get: function () {
- if (!Buffer.isBuffer(this)) return undefined
- return this.buffer
- }
-})
-
-Object.defineProperty(Buffer.prototype, 'offset', {
- enumerable: true,
- get: function () {
- if (!Buffer.isBuffer(this)) return undefined
- return this.byteOffset
- }
-})
-
-function createBuffer (length) {
- if (length > K_MAX_LENGTH) {
- throw new RangeError('The value "' + length + '" is invalid for option "size"')
- }
- // Return an augmented `Uint8Array` instance
- const buf = new Uint8Array(length)
- Object.setPrototypeOf(buf, Buffer.prototype)
- return buf
-}
-
-/**
- * The Buffer constructor returns instances of `Uint8Array` that have their
- * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
- * `Uint8Array`, so the returned instances will have all the node `Buffer` methods
- * and the `Uint8Array` methods. Square bracket notation works as expected -- it
- * returns a single octet.
- *
- * The `Uint8Array` prototype remains unmodified.
- */
-
-function Buffer (arg, encodingOrOffset, length) {
- // Common case.
- if (typeof arg === 'number') {
- if (typeof encodingOrOffset === 'string') {
- throw new TypeError(
- 'The "string" argument must be of type string. Received type number'
- )
- }
- return allocUnsafe(arg)
- }
- return from(arg, encodingOrOffset, length)
-}
-
-Buffer.poolSize = 8192 // not used by this implementation
-
-function from (value, encodingOrOffset, length) {
- if (typeof value === 'string') {
- return fromString(value, encodingOrOffset)
- }
-
- if (ArrayBuffer.isView(value)) {
- return fromArrayView(value)
- }
-
- if (value == null) {
- throw new TypeError(
- 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
- 'or Array-like Object. Received type ' + (typeof value)
- )
- }
-
- if (isInstance(value, ArrayBuffer) ||
- (value && isInstance(value.buffer, ArrayBuffer))) {
- return fromArrayBuffer(value, encodingOrOffset, length)
- }
-
- if (typeof SharedArrayBuffer !== 'undefined' &&
- (isInstance(value, SharedArrayBuffer) ||
- (value && isInstance(value.buffer, SharedArrayBuffer)))) {
- return fromArrayBuffer(value, encodingOrOffset, length)
- }
-
- if (typeof value === 'number') {
- throw new TypeError(
- 'The "value" argument must not be of type number. Received type number'
- )
- }
-
- const valueOf = value.valueOf && value.valueOf()
- if (valueOf != null && valueOf !== value) {
- return Buffer.from(valueOf, encodingOrOffset, length)
- }
-
- const b = fromObject(value)
- if (b) return b
-
- if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&
- typeof value[Symbol.toPrimitive] === 'function') {
- return Buffer.from(value[Symbol.toPrimitive]('string'), encodingOrOffset, length)
- }
-
- throw new TypeError(
- 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
- 'or Array-like Object. Received type ' + (typeof value)
- )
-}
-
-/**
- * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
- * if value is a number.
- * Buffer.from(str[, encoding])
- * Buffer.from(array)
- * Buffer.from(buffer)
- * Buffer.from(arrayBuffer[, byteOffset[, length]])
- **/
-Buffer.from = function (value, encodingOrOffset, length) {
- return from(value, encodingOrOffset, length)
-}
-
-// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
-// https://github.com/feross/buffer/pull/148
-Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype)
-Object.setPrototypeOf(Buffer, Uint8Array)
-
-function assertSize (size) {
- if (typeof size !== 'number') {
- throw new TypeError('"size" argument must be of type number')
- } else if (size < 0) {
- throw new RangeError('The value "' + size + '" is invalid for option "size"')
- }
-}
-
-function alloc (size, fill, encoding) {
- assertSize(size)
- if (size <= 0) {
- return createBuffer(size)
- }
- if (fill !== undefined) {
- // Only pay attention to encoding if it's a string. This
- // prevents accidentally sending in a number that would
- // be interpreted as a start offset.
- return typeof encoding === 'string'
- ? createBuffer(size).fill(fill, encoding)
- : createBuffer(size).fill(fill)
- }
- return createBuffer(size)
-}
-
-/**
- * Creates a new filled Buffer instance.
- * alloc(size[, fill[, encoding]])
- **/
-Buffer.alloc = function (size, fill, encoding) {
- return alloc(size, fill, encoding)
-}
-
-function allocUnsafe (size) {
- assertSize(size)
- return createBuffer(size < 0 ? 0 : checked(size) | 0)
-}
-
-/**
- * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
- * */
-Buffer.allocUnsafe = function (size) {
- return allocUnsafe(size)
-}
-/**
- * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
- */
-Buffer.allocUnsafeSlow = function (size) {
- return allocUnsafe(size)
-}
-
-function fromString (string, encoding) {
- if (typeof encoding !== 'string' || encoding === '') {
- encoding = 'utf8'
- }
-
- if (!Buffer.isEncoding(encoding)) {
- throw new TypeError('Unknown encoding: ' + encoding)
- }
-
- const length = byteLength(string, encoding) | 0
- let buf = createBuffer(length)
-
- const actual = buf.write(string, encoding)
-
- if (actual !== length) {
- // Writing a hex string, for example, that contains invalid characters will
- // cause everything after the first invalid character to be ignored. (e.g.
- // 'abxxcd' will be treated as 'ab')
- buf = buf.slice(0, actual)
- }
-
- return buf
-}
-
-function fromArrayLike (array) {
- const length = array.length < 0 ? 0 : checked(array.length) | 0
- const buf = createBuffer(length)
- for (let i = 0; i < length; i += 1) {
- buf[i] = array[i] & 255
- }
- return buf
-}
-
-function fromArrayView (arrayView) {
- if (isInstance(arrayView, Uint8Array)) {
- const copy = new Uint8Array(arrayView)
- return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength)
- }
- return fromArrayLike(arrayView)
-}
-
-function fromArrayBuffer (array, byteOffset, length) {
- if (byteOffset < 0 || array.byteLength < byteOffset) {
- throw new RangeError('"offset" is outside of buffer bounds')
- }
-
- if (array.byteLength < byteOffset + (length || 0)) {
- throw new RangeError('"length" is outside of buffer bounds')
- }
-
- let buf
- if (byteOffset === undefined && length === undefined) {
- buf = new Uint8Array(array)
- } else if (length === undefined) {
- buf = new Uint8Array(array, byteOffset)
- } else {
- buf = new Uint8Array(array, byteOffset, length)
- }
-
- // Return an augmented `Uint8Array` instance
- Object.setPrototypeOf(buf, Buffer.prototype)
-
- return buf
-}
-
-function fromObject (obj) {
- if (Buffer.isBuffer(obj)) {
- const len = checked(obj.length) | 0
- const buf = createBuffer(len)
-
- if (buf.length === 0) {
- return buf
- }
-
- obj.copy(buf, 0, 0, len)
- return buf
- }
-
- if (obj.length !== undefined) {
- if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {
- return createBuffer(0)
- }
- return fromArrayLike(obj)
- }
-
- if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
- return fromArrayLike(obj.data)
- }
-}
-
-function checked (length) {
- // Note: cannot use `length < K_MAX_LENGTH` here because that fails when
- // length is NaN (which is otherwise coerced to zero.)
- if (length >= K_MAX_LENGTH) {
- throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
- 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')
- }
- return length | 0
-}
-
-function SlowBuffer (length) {
- if (+length != length) { // eslint-disable-line eqeqeq
- length = 0
- }
- return Buffer.alloc(+length)
-}
-
-Buffer.isBuffer = function isBuffer (b) {
- return b != null && b._isBuffer === true &&
- b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
-}
-
-Buffer.compare = function compare (a, b) {
- if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)
- if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)
- if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
- throw new TypeError(
- 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
- )
- }
-
- if (a === b) return 0
-
- let x = a.length
- let y = b.length
-
- for (let i = 0, len = Math.min(x, y); i < len; ++i) {
- if (a[i] !== b[i]) {
- x = a[i]
- y = b[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-Buffer.isEncoding = function isEncoding (encoding) {
- switch (String(encoding).toLowerCase()) {
- case 'hex':
- case 'utf8':
- case 'utf-8':
- case 'ascii':
- case 'latin1':
- case 'binary':
- case 'base64':
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return true
- default:
- return false
- }
-}
-
-Buffer.concat = function concat (list, length) {
- if (!Array.isArray(list)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- }
-
- if (list.length === 0) {
- return Buffer.alloc(0)
- }
-
- let i
- if (length === undefined) {
- length = 0
- for (i = 0; i < list.length; ++i) {
- length += list[i].length
- }
- }
-
- const buffer = Buffer.allocUnsafe(length)
- let pos = 0
- for (i = 0; i < list.length; ++i) {
- let buf = list[i]
- if (isInstance(buf, Uint8Array)) {
- if (pos + buf.length > buffer.length) {
- if (!Buffer.isBuffer(buf)) buf = Buffer.from(buf)
- buf.copy(buffer, pos)
- } else {
- Uint8Array.prototype.set.call(
- buffer,
- buf,
- pos
- )
- }
- } else if (!Buffer.isBuffer(buf)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- } else {
- buf.copy(buffer, pos)
- }
- pos += buf.length
- }
- return buffer
-}
-
-function byteLength (string, encoding) {
- if (Buffer.isBuffer(string)) {
- return string.length
- }
- if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {
- return string.byteLength
- }
- if (typeof string !== 'string') {
- throw new TypeError(
- 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
- 'Received type ' + typeof string
- )
- }
-
- const len = string.length
- const mustMatch = (arguments.length > 2 && arguments[2] === true)
- if (!mustMatch && len === 0) return 0
-
- // Use a for loop to avoid recursion
- let loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'ascii':
- case 'latin1':
- case 'binary':
- return len
- case 'utf8':
- case 'utf-8':
- return utf8ToBytes(string).length
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return len * 2
- case 'hex':
- return len >>> 1
- case 'base64':
- return base64ToBytes(string).length
- default:
- if (loweredCase) {
- return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
- }
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-Buffer.byteLength = byteLength
-
-function slowToString (encoding, start, end) {
- let loweredCase = false
-
- // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
- // property of a typed array.
-
- // This behaves neither like String nor Uint8Array in that we set start/end
- // to their upper/lower bounds if the value passed is out of range.
- // undefined is handled specially as per ECMA-262 6th Edition,
- // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
- if (start === undefined || start < 0) {
- start = 0
- }
- // Return early if start > this.length. Done here to prevent potential uint32
- // coercion fail below.
- if (start > this.length) {
- return ''
- }
-
- if (end === undefined || end > this.length) {
- end = this.length
- }
-
- if (end <= 0) {
- return ''
- }
-
- // Force coercion to uint32. This will also coerce falsey/NaN values to 0.
- end >>>= 0
- start >>>= 0
-
- if (end <= start) {
- return ''
- }
-
- if (!encoding) encoding = 'utf8'
-
- while (true) {
- switch (encoding) {
- case 'hex':
- return hexSlice(this, start, end)
-
- case 'utf8':
- case 'utf-8':
- return utf8Slice(this, start, end)
-
- case 'ascii':
- return asciiSlice(this, start, end)
-
- case 'latin1':
- case 'binary':
- return latin1Slice(this, start, end)
-
- case 'base64':
- return base64Slice(this, start, end)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return utf16leSlice(this, start, end)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = (encoding + '').toLowerCase()
- loweredCase = true
- }
- }
-}
-
-// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
-// to detect a Buffer instance. It's not possible to use `instanceof Buffer`
-// reliably in a browserify context because there could be multiple different
-// copies of the 'buffer' package in use. This method works even for Buffer
-// instances that were created from another copy of the `buffer` package.
-// See: https://github.com/feross/buffer/issues/154
-Buffer.prototype._isBuffer = true
-
-function swap (b, n, m) {
- const i = b[n]
- b[n] = b[m]
- b[m] = i
-}
-
-Buffer.prototype.swap16 = function swap16 () {
- const len = this.length
- if (len % 2 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 16-bits')
- }
- for (let i = 0; i < len; i += 2) {
- swap(this, i, i + 1)
- }
- return this
-}
-
-Buffer.prototype.swap32 = function swap32 () {
- const len = this.length
- if (len % 4 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 32-bits')
- }
- for (let i = 0; i < len; i += 4) {
- swap(this, i, i + 3)
- swap(this, i + 1, i + 2)
- }
- return this
-}
-
-Buffer.prototype.swap64 = function swap64 () {
- const len = this.length
- if (len % 8 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 64-bits')
- }
- for (let i = 0; i < len; i += 8) {
- swap(this, i, i + 7)
- swap(this, i + 1, i + 6)
- swap(this, i + 2, i + 5)
- swap(this, i + 3, i + 4)
- }
- return this
-}
-
-Buffer.prototype.toString = function toString () {
- const length = this.length
- if (length === 0) return ''
- if (arguments.length === 0) return utf8Slice(this, 0, length)
- return slowToString.apply(this, arguments)
-}
-
-Buffer.prototype.toLocaleString = Buffer.prototype.toString
-
-Buffer.prototype.equals = function equals (b) {
- if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
- if (this === b) return true
- return Buffer.compare(this, b) === 0
-}
-
-Buffer.prototype.inspect = function inspect () {
- let str = ''
- const max = exports.INSPECT_MAX_BYTES
- str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()
- if (this.length > max) str += ' ... '
- return ''
-}
-if (customInspectSymbol) {
- Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect
-}
-
-Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
- if (isInstance(target, Uint8Array)) {
- target = Buffer.from(target, target.offset, target.byteLength)
- }
- if (!Buffer.isBuffer(target)) {
- throw new TypeError(
- 'The "target" argument must be one of type Buffer or Uint8Array. ' +
- 'Received type ' + (typeof target)
- )
- }
-
- if (start === undefined) {
- start = 0
- }
- if (end === undefined) {
- end = target ? target.length : 0
- }
- if (thisStart === undefined) {
- thisStart = 0
- }
- if (thisEnd === undefined) {
- thisEnd = this.length
- }
-
- if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
- throw new RangeError('out of range index')
- }
-
- if (thisStart >= thisEnd && start >= end) {
- return 0
- }
- if (thisStart >= thisEnd) {
- return -1
- }
- if (start >= end) {
- return 1
- }
-
- start >>>= 0
- end >>>= 0
- thisStart >>>= 0
- thisEnd >>>= 0
-
- if (this === target) return 0
-
- let x = thisEnd - thisStart
- let y = end - start
- const len = Math.min(x, y)
-
- const thisCopy = this.slice(thisStart, thisEnd)
- const targetCopy = target.slice(start, end)
-
- for (let i = 0; i < len; ++i) {
- if (thisCopy[i] !== targetCopy[i]) {
- x = thisCopy[i]
- y = targetCopy[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
-// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
-//
-// Arguments:
-// - buffer - a Buffer to search
-// - val - a string, Buffer, or number
-// - byteOffset - an index into `buffer`; will be clamped to an int32
-// - encoding - an optional encoding, relevant is val is a string
-// - dir - true for indexOf, false for lastIndexOf
-function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
- // Empty buffer means no match
- if (buffer.length === 0) return -1
-
- // Normalize byteOffset
- if (typeof byteOffset === 'string') {
- encoding = byteOffset
- byteOffset = 0
- } else if (byteOffset > 0x7fffffff) {
- byteOffset = 0x7fffffff
- } else if (byteOffset < -0x80000000) {
- byteOffset = -0x80000000
- }
- byteOffset = +byteOffset // Coerce to Number.
- if (numberIsNaN(byteOffset)) {
- // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
- byteOffset = dir ? 0 : (buffer.length - 1)
- }
-
- // Normalize byteOffset: negative offsets start from the end of the buffer
- if (byteOffset < 0) byteOffset = buffer.length + byteOffset
- if (byteOffset >= buffer.length) {
- if (dir) return -1
- else byteOffset = buffer.length - 1
- } else if (byteOffset < 0) {
- if (dir) byteOffset = 0
- else return -1
- }
-
- // Normalize val
- if (typeof val === 'string') {
- val = Buffer.from(val, encoding)
- }
-
- // Finally, search either indexOf (if dir is true) or lastIndexOf
- if (Buffer.isBuffer(val)) {
- // Special case: looking for empty string/buffer always fails
- if (val.length === 0) {
- return -1
- }
- return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
- } else if (typeof val === 'number') {
- val = val & 0xFF // Search for a byte value [0-255]
- if (typeof Uint8Array.prototype.indexOf === 'function') {
- if (dir) {
- return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
- } else {
- return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
- }
- }
- return arrayIndexOf(buffer, [val], byteOffset, encoding, dir)
- }
-
- throw new TypeError('val must be string, number or Buffer')
-}
-
-function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
- let indexSize = 1
- let arrLength = arr.length
- let valLength = val.length
-
- if (encoding !== undefined) {
- encoding = String(encoding).toLowerCase()
- if (encoding === 'ucs2' || encoding === 'ucs-2' ||
- encoding === 'utf16le' || encoding === 'utf-16le') {
- if (arr.length < 2 || val.length < 2) {
- return -1
- }
- indexSize = 2
- arrLength /= 2
- valLength /= 2
- byteOffset /= 2
- }
- }
-
- function read (buf, i) {
- if (indexSize === 1) {
- return buf[i]
- } else {
- return buf.readUInt16BE(i * indexSize)
- }
- }
-
- let i
- if (dir) {
- let foundIndex = -1
- for (i = byteOffset; i < arrLength; i++) {
- if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
- if (foundIndex === -1) foundIndex = i
- if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
- } else {
- if (foundIndex !== -1) i -= i - foundIndex
- foundIndex = -1
- }
- }
- } else {
- if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
- for (i = byteOffset; i >= 0; i--) {
- let found = true
- for (let j = 0; j < valLength; j++) {
- if (read(arr, i + j) !== read(val, j)) {
- found = false
- break
- }
- }
- if (found) return i
- }
- }
-
- return -1
-}
-
-Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
- return this.indexOf(val, byteOffset, encoding) !== -1
-}
-
-Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
-}
-
-Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
-}
-
-function hexWrite (buf, string, offset, length) {
- offset = Number(offset) || 0
- const remaining = buf.length - offset
- if (!length) {
- length = remaining
- } else {
- length = Number(length)
- if (length > remaining) {
- length = remaining
- }
- }
-
- const strLen = string.length
-
- if (length > strLen / 2) {
- length = strLen / 2
- }
- let i
- for (i = 0; i < length; ++i) {
- const parsed = parseInt(string.substr(i * 2, 2), 16)
- if (numberIsNaN(parsed)) return i
- buf[offset + i] = parsed
- }
- return i
-}
-
-function utf8Write (buf, string, offset, length) {
- return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-function asciiWrite (buf, string, offset, length) {
- return blitBuffer(asciiToBytes(string), buf, offset, length)
-}
-
-function base64Write (buf, string, offset, length) {
- return blitBuffer(base64ToBytes(string), buf, offset, length)
-}
-
-function ucs2Write (buf, string, offset, length) {
- return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-Buffer.prototype.write = function write (string, offset, length, encoding) {
- // Buffer#write(string)
- if (offset === undefined) {
- encoding = 'utf8'
- length = this.length
- offset = 0
- // Buffer#write(string, encoding)
- } else if (length === undefined && typeof offset === 'string') {
- encoding = offset
- length = this.length
- offset = 0
- // Buffer#write(string, offset[, length][, encoding])
- } else if (isFinite(offset)) {
- offset = offset >>> 0
- if (isFinite(length)) {
- length = length >>> 0
- if (encoding === undefined) encoding = 'utf8'
- } else {
- encoding = length
- length = undefined
- }
- } else {
- throw new Error(
- 'Buffer.write(string, encoding, offset[, length]) is no longer supported'
- )
- }
-
- const remaining = this.length - offset
- if (length === undefined || length > remaining) length = remaining
-
- if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
- throw new RangeError('Attempt to write outside buffer bounds')
- }
-
- if (!encoding) encoding = 'utf8'
-
- let loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'hex':
- return hexWrite(this, string, offset, length)
-
- case 'utf8':
- case 'utf-8':
- return utf8Write(this, string, offset, length)
-
- case 'ascii':
- case 'latin1':
- case 'binary':
- return asciiWrite(this, string, offset, length)
-
- case 'base64':
- // Warning: maxLength not taken into account in base64Write
- return base64Write(this, string, offset, length)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return ucs2Write(this, string, offset, length)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-
-Buffer.prototype.toJSON = function toJSON () {
- return {
- type: 'Buffer',
- data: Array.prototype.slice.call(this._arr || this, 0)
- }
-}
-
-function base64Slice (buf, start, end) {
- if (start === 0 && end === buf.length) {
- return base64.fromByteArray(buf)
- } else {
- return base64.fromByteArray(buf.slice(start, end))
- }
-}
-
-function utf8Slice (buf, start, end) {
- end = Math.min(buf.length, end)
- const res = []
-
- let i = start
- while (i < end) {
- const firstByte = buf[i]
- let codePoint = null
- let bytesPerSequence = (firstByte > 0xEF)
- ? 4
- : (firstByte > 0xDF)
- ? 3
- : (firstByte > 0xBF)
- ? 2
- : 1
-
- if (i + bytesPerSequence <= end) {
- let secondByte, thirdByte, fourthByte, tempCodePoint
-
- switch (bytesPerSequence) {
- case 1:
- if (firstByte < 0x80) {
- codePoint = firstByte
- }
- break
- case 2:
- secondByte = buf[i + 1]
- if ((secondByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
- if (tempCodePoint > 0x7F) {
- codePoint = tempCodePoint
- }
- }
- break
- case 3:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
- if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
- codePoint = tempCodePoint
- }
- }
- break
- case 4:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- fourthByte = buf[i + 3]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
- if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
- codePoint = tempCodePoint
- }
- }
- }
- }
-
- if (codePoint === null) {
- // we did not generate a valid codePoint so insert a
- // replacement char (U+FFFD) and advance only 1 byte
- codePoint = 0xFFFD
- bytesPerSequence = 1
- } else if (codePoint > 0xFFFF) {
- // encode to utf16 (surrogate pair dance)
- codePoint -= 0x10000
- res.push(codePoint >>> 10 & 0x3FF | 0xD800)
- codePoint = 0xDC00 | codePoint & 0x3FF
- }
-
- res.push(codePoint)
- i += bytesPerSequence
- }
-
- return decodeCodePointsArray(res)
-}
-
-// Based on http://stackoverflow.com/a/22747272/680742, the browser with
-// the lowest limit is Chrome, with 0x10000 args.
-// We go 1 magnitude less, for safety
-const MAX_ARGUMENTS_LENGTH = 0x1000
-
-function decodeCodePointsArray (codePoints) {
- const len = codePoints.length
- if (len <= MAX_ARGUMENTS_LENGTH) {
- return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
- }
-
- // Decode in chunks to avoid "call stack size exceeded".
- let res = ''
- let i = 0
- while (i < len) {
- res += String.fromCharCode.apply(
- String,
- codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
- )
- }
- return res
-}
-
-function asciiSlice (buf, start, end) {
- let ret = ''
- end = Math.min(buf.length, end)
-
- for (let i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i] & 0x7F)
- }
- return ret
-}
-
-function latin1Slice (buf, start, end) {
- let ret = ''
- end = Math.min(buf.length, end)
-
- for (let i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i])
- }
- return ret
-}
-
-function hexSlice (buf, start, end) {
- const len = buf.length
-
- if (!start || start < 0) start = 0
- if (!end || end < 0 || end > len) end = len
-
- let out = ''
- for (let i = start; i < end; ++i) {
- out += hexSliceLookupTable[buf[i]]
- }
- return out
-}
-
-function utf16leSlice (buf, start, end) {
- const bytes = buf.slice(start, end)
- let res = ''
- // If bytes.length is odd, the last 8 bits must be ignored (same as node.js)
- for (let i = 0; i < bytes.length - 1; i += 2) {
- res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))
- }
- return res
-}
-
-Buffer.prototype.slice = function slice (start, end) {
- const len = this.length
- start = ~~start
- end = end === undefined ? len : ~~end
-
- if (start < 0) {
- start += len
- if (start < 0) start = 0
- } else if (start > len) {
- start = len
- }
-
- if (end < 0) {
- end += len
- if (end < 0) end = 0
- } else if (end > len) {
- end = len
- }
-
- if (end < start) end = start
-
- const newBuf = this.subarray(start, end)
- // Return an augmented `Uint8Array` instance
- Object.setPrototypeOf(newBuf, Buffer.prototype)
-
- return newBuf
-}
-
-/*
- * Need to make sure that buffer isn't trying to write out of bounds.
- */
-function checkOffset (offset, ext, length) {
- if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
- if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
-}
-
-Buffer.prototype.readUintLE =
-Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- let val = this[offset]
- let mul = 1
- let i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUintBE =
-Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- checkOffset(offset, byteLength, this.length)
- }
-
- let val = this[offset + --byteLength]
- let mul = 1
- while (byteLength > 0 && (mul *= 0x100)) {
- val += this[offset + --byteLength] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUint8 =
-Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 1, this.length)
- return this[offset]
-}
-
-Buffer.prototype.readUint16LE =
-Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- return this[offset] | (this[offset + 1] << 8)
-}
-
-Buffer.prototype.readUint16BE =
-Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- return (this[offset] << 8) | this[offset + 1]
-}
-
-Buffer.prototype.readUint32LE =
-Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return ((this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16)) +
- (this[offset + 3] * 0x1000000)
-}
-
-Buffer.prototype.readUint32BE =
-Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] * 0x1000000) +
- ((this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- this[offset + 3])
-}
-
-Buffer.prototype.readBigUInt64LE = defineBigIntMethod(function readBigUInt64LE (offset) {
- offset = offset >>> 0
- validateNumber(offset, 'offset')
- const first = this[offset]
- const last = this[offset + 7]
- if (first === undefined || last === undefined) {
- boundsError(offset, this.length - 8)
- }
-
- const lo = first +
- this[++offset] * 2 ** 8 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 24
-
- const hi = this[++offset] +
- this[++offset] * 2 ** 8 +
- this[++offset] * 2 ** 16 +
- last * 2 ** 24
-
- return BigInt(lo) + (BigInt(hi) << BigInt(32))
-})
-
-Buffer.prototype.readBigUInt64BE = defineBigIntMethod(function readBigUInt64BE (offset) {
- offset = offset >>> 0
- validateNumber(offset, 'offset')
- const first = this[offset]
- const last = this[offset + 7]
- if (first === undefined || last === undefined) {
- boundsError(offset, this.length - 8)
- }
-
- const hi = first * 2 ** 24 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 +
- this[++offset]
-
- const lo = this[++offset] * 2 ** 24 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 +
- last
-
- return (BigInt(hi) << BigInt(32)) + BigInt(lo)
-})
-
-Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- let val = this[offset]
- let mul = 1
- let i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- let i = byteLength
- let mul = 1
- let val = this[offset + --i]
- while (i > 0 && (mul *= 0x100)) {
- val += this[offset + --i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 1, this.length)
- if (!(this[offset] & 0x80)) return (this[offset])
- return ((0xff - this[offset] + 1) * -1)
-}
-
-Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- const val = this[offset] | (this[offset + 1] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- const val = this[offset + 1] | (this[offset] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16) |
- (this[offset + 3] << 24)
-}
-
-Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] << 24) |
- (this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- (this[offset + 3])
-}
-
-Buffer.prototype.readBigInt64LE = defineBigIntMethod(function readBigInt64LE (offset) {
- offset = offset >>> 0
- validateNumber(offset, 'offset')
- const first = this[offset]
- const last = this[offset + 7]
- if (first === undefined || last === undefined) {
- boundsError(offset, this.length - 8)
- }
-
- const val = this[offset + 4] +
- this[offset + 5] * 2 ** 8 +
- this[offset + 6] * 2 ** 16 +
- (last << 24) // Overflow
-
- return (BigInt(val) << BigInt(32)) +
- BigInt(first +
- this[++offset] * 2 ** 8 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 24)
-})
-
-Buffer.prototype.readBigInt64BE = defineBigIntMethod(function readBigInt64BE (offset) {
- offset = offset >>> 0
- validateNumber(offset, 'offset')
- const first = this[offset]
- const last = this[offset + 7]
- if (first === undefined || last === undefined) {
- boundsError(offset, this.length - 8)
- }
-
- const val = (first << 24) + // Overflow
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 +
- this[++offset]
-
- return (BigInt(val) << BigInt(32)) +
- BigInt(this[++offset] * 2 ** 24 +
- this[++offset] * 2 ** 16 +
- this[++offset] * 2 ** 8 +
- last)
-})
-
-Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, true, 23, 4)
-}
-
-Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, false, 23, 4)
-}
-
-Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, true, 52, 8)
-}
-
-Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, false, 52, 8)
-}
-
-function checkInt (buf, value, offset, ext, max, min) {
- if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
- if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
-}
-
-Buffer.prototype.writeUintLE =
-Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- const maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- let mul = 1
- let i = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUintBE =
-Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- const maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- let i = byteLength - 1
- let mul = 1
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUint8 =
-Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-Buffer.prototype.writeUint16LE =
-Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- return offset + 2
-}
-
-Buffer.prototype.writeUint16BE =
-Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- return offset + 2
-}
-
-Buffer.prototype.writeUint32LE =
-Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- this[offset + 3] = (value >>> 24)
- this[offset + 2] = (value >>> 16)
- this[offset + 1] = (value >>> 8)
- this[offset] = (value & 0xff)
- return offset + 4
-}
-
-Buffer.prototype.writeUint32BE =
-Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- return offset + 4
-}
-
-function wrtBigUInt64LE (buf, value, offset, min, max) {
- checkIntBI(value, min, max, buf, offset, 7)
-
- let lo = Number(value & BigInt(0xffffffff))
- buf[offset++] = lo
- lo = lo >> 8
- buf[offset++] = lo
- lo = lo >> 8
- buf[offset++] = lo
- lo = lo >> 8
- buf[offset++] = lo
- let hi = Number(value >> BigInt(32) & BigInt(0xffffffff))
- buf[offset++] = hi
- hi = hi >> 8
- buf[offset++] = hi
- hi = hi >> 8
- buf[offset++] = hi
- hi = hi >> 8
- buf[offset++] = hi
- return offset
-}
-
-function wrtBigUInt64BE (buf, value, offset, min, max) {
- checkIntBI(value, min, max, buf, offset, 7)
-
- let lo = Number(value & BigInt(0xffffffff))
- buf[offset + 7] = lo
- lo = lo >> 8
- buf[offset + 6] = lo
- lo = lo >> 8
- buf[offset + 5] = lo
- lo = lo >> 8
- buf[offset + 4] = lo
- let hi = Number(value >> BigInt(32) & BigInt(0xffffffff))
- buf[offset + 3] = hi
- hi = hi >> 8
- buf[offset + 2] = hi
- hi = hi >> 8
- buf[offset + 1] = hi
- hi = hi >> 8
- buf[offset] = hi
- return offset + 8
-}
-
-Buffer.prototype.writeBigUInt64LE = defineBigIntMethod(function writeBigUInt64LE (value, offset = 0) {
- return wrtBigUInt64LE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff'))
-})
-
-Buffer.prototype.writeBigUInt64BE = defineBigIntMethod(function writeBigUInt64BE (value, offset = 0) {
- return wrtBigUInt64BE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff'))
-})
-
-Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- const limit = Math.pow(2, (8 * byteLength) - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- let i = 0
- let mul = 1
- let sub = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- const limit = Math.pow(2, (8 * byteLength) - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- let i = byteLength - 1
- let mul = 1
- let sub = 0
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
- if (value < 0) value = 0xff + value + 1
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- return offset + 2
-}
-
-Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- return offset + 2
-}
-
-Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- this[offset + 2] = (value >>> 16)
- this[offset + 3] = (value >>> 24)
- return offset + 4
-}
-
-Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- if (value < 0) value = 0xffffffff + value + 1
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- return offset + 4
-}
-
-Buffer.prototype.writeBigInt64LE = defineBigIntMethod(function writeBigInt64LE (value, offset = 0) {
- return wrtBigUInt64LE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff'))
-})
-
-Buffer.prototype.writeBigInt64BE = defineBigIntMethod(function writeBigInt64BE (value, offset = 0) {
- return wrtBigUInt64BE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff'))
-})
-
-function checkIEEE754 (buf, value, offset, ext, max, min) {
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
- if (offset < 0) throw new RangeError('Index out of range')
-}
-
-function writeFloat (buf, value, offset, littleEndian, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
- }
- ieee754.write(buf, value, offset, littleEndian, 23, 4)
- return offset + 4
-}
-
-Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
- return writeFloat(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
- return writeFloat(this, value, offset, false, noAssert)
-}
-
-function writeDouble (buf, value, offset, littleEndian, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
- }
- ieee754.write(buf, value, offset, littleEndian, 52, 8)
- return offset + 8
-}
-
-Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
- return writeDouble(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
- return writeDouble(this, value, offset, false, noAssert)
-}
-
-// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
-Buffer.prototype.copy = function copy (target, targetStart, start, end) {
- if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')
- if (!start) start = 0
- if (!end && end !== 0) end = this.length
- if (targetStart >= target.length) targetStart = target.length
- if (!targetStart) targetStart = 0
- if (end > 0 && end < start) end = start
-
- // Copy 0 bytes; we're done
- if (end === start) return 0
- if (target.length === 0 || this.length === 0) return 0
-
- // Fatal error conditions
- if (targetStart < 0) {
- throw new RangeError('targetStart out of bounds')
- }
- if (start < 0 || start >= this.length) throw new RangeError('Index out of range')
- if (end < 0) throw new RangeError('sourceEnd out of bounds')
-
- // Are we oob?
- if (end > this.length) end = this.length
- if (target.length - targetStart < end - start) {
- end = target.length - targetStart + start
- }
-
- const len = end - start
-
- if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {
- // Use built-in when available, missing from IE11
- this.copyWithin(targetStart, start, end)
- } else {
- Uint8Array.prototype.set.call(
- target,
- this.subarray(start, end),
- targetStart
- )
- }
-
- return len
-}
-
-// Usage:
-// buffer.fill(number[, offset[, end]])
-// buffer.fill(buffer[, offset[, end]])
-// buffer.fill(string[, offset[, end]][, encoding])
-Buffer.prototype.fill = function fill (val, start, end, encoding) {
- // Handle string cases:
- if (typeof val === 'string') {
- if (typeof start === 'string') {
- encoding = start
- start = 0
- end = this.length
- } else if (typeof end === 'string') {
- encoding = end
- end = this.length
- }
- if (encoding !== undefined && typeof encoding !== 'string') {
- throw new TypeError('encoding must be a string')
- }
- if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
- throw new TypeError('Unknown encoding: ' + encoding)
- }
- if (val.length === 1) {
- const code = val.charCodeAt(0)
- if ((encoding === 'utf8' && code < 128) ||
- encoding === 'latin1') {
- // Fast path: If `val` fits into a single byte, use that numeric value.
- val = code
- }
- }
- } else if (typeof val === 'number') {
- val = val & 255
- } else if (typeof val === 'boolean') {
- val = Number(val)
- }
-
- // Invalid ranges are not set to a default, so can range check early.
- if (start < 0 || this.length < start || this.length < end) {
- throw new RangeError('Out of range index')
- }
-
- if (end <= start) {
- return this
- }
-
- start = start >>> 0
- end = end === undefined ? this.length : end >>> 0
-
- if (!val) val = 0
-
- let i
- if (typeof val === 'number') {
- for (i = start; i < end; ++i) {
- this[i] = val
- }
- } else {
- const bytes = Buffer.isBuffer(val)
- ? val
- : Buffer.from(val, encoding)
- const len = bytes.length
- if (len === 0) {
- throw new TypeError('The value "' + val +
- '" is invalid for argument "value"')
- }
- for (i = 0; i < end - start; ++i) {
- this[i + start] = bytes[i % len]
- }
- }
-
- return this
-}
-
-// CUSTOM ERRORS
-// =============
-
-// Simplified versions from Node, changed for Buffer-only usage
-const errors = {}
-function E (sym, getMessage, Base) {
- errors[sym] = class NodeError extends Base {
- constructor () {
- super()
-
- Object.defineProperty(this, 'message', {
- value: getMessage.apply(this, arguments),
- writable: true,
- configurable: true
- })
-
- // Add the error code to the name to include it in the stack trace.
- this.name = `${this.name} [${sym}]`
- // Access the stack to generate the error message including the error code
- // from the name.
- this.stack // eslint-disable-line no-unused-expressions
- // Reset the name to the actual name.
- delete this.name
- }
-
- get code () {
- return sym
- }
-
- set code (value) {
- Object.defineProperty(this, 'code', {
- configurable: true,
- enumerable: true,
- value,
- writable: true
- })
- }
-
- toString () {
- return `${this.name} [${sym}]: ${this.message}`
- }
- }
-}
-
-E('ERR_BUFFER_OUT_OF_BOUNDS',
- function (name) {
- if (name) {
- return `${name} is outside of buffer bounds`
- }
-
- return 'Attempt to access memory outside buffer bounds'
- }, RangeError)
-E('ERR_INVALID_ARG_TYPE',
- function (name, actual) {
- return `The "${name}" argument must be of type number. Received type ${typeof actual}`
- }, TypeError)
-E('ERR_OUT_OF_RANGE',
- function (str, range, input) {
- let msg = `The value of "${str}" is out of range.`
- let received = input
- if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
- received = addNumericalSeparator(String(input))
- } else if (typeof input === 'bigint') {
- received = String(input)
- if (input > BigInt(2) ** BigInt(32) || input < -(BigInt(2) ** BigInt(32))) {
- received = addNumericalSeparator(received)
- }
- received += 'n'
- }
- msg += ` It must be ${range}. Received ${received}`
- return msg
- }, RangeError)
-
-function addNumericalSeparator (val) {
- let res = ''
- let i = val.length
- const start = val[0] === '-' ? 1 : 0
- for (; i >= start + 4; i -= 3) {
- res = `_${val.slice(i - 3, i)}${res}`
- }
- return `${val.slice(0, i)}${res}`
-}
-
-// CHECK FUNCTIONS
-// ===============
-
-function checkBounds (buf, offset, byteLength) {
- validateNumber(offset, 'offset')
- if (buf[offset] === undefined || buf[offset + byteLength] === undefined) {
- boundsError(offset, buf.length - (byteLength + 1))
- }
-}
-
-function checkIntBI (value, min, max, buf, offset, byteLength) {
- if (value > max || value < min) {
- const n = typeof min === 'bigint' ? 'n' : ''
- let range
- if (byteLength > 3) {
- if (min === 0 || min === BigInt(0)) {
- range = `>= 0${n} and < 2${n} ** ${(byteLength + 1) * 8}${n}`
- } else {
- range = `>= -(2${n} ** ${(byteLength + 1) * 8 - 1}${n}) and < 2 ** ` +
- `${(byteLength + 1) * 8 - 1}${n}`
- }
- } else {
- range = `>= ${min}${n} and <= ${max}${n}`
- }
- throw new errors.ERR_OUT_OF_RANGE('value', range, value)
- }
- checkBounds(buf, offset, byteLength)
-}
-
-function validateNumber (value, name) {
- if (typeof value !== 'number') {
- throw new errors.ERR_INVALID_ARG_TYPE(name, 'number', value)
- }
-}
-
-function boundsError (value, length, type) {
- if (Math.floor(value) !== value) {
- validateNumber(value, type)
- throw new errors.ERR_OUT_OF_RANGE(type || 'offset', 'an integer', value)
- }
-
- if (length < 0) {
- throw new errors.ERR_BUFFER_OUT_OF_BOUNDS()
- }
-
- throw new errors.ERR_OUT_OF_RANGE(type || 'offset',
- `>= ${type ? 1 : 0} and <= ${length}`,
- value)
-}
-
-// HELPER FUNCTIONS
-// ================
-
-const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g
-
-function base64clean (str) {
- // Node takes equal signs as end of the Base64 encoding
- str = str.split('=')[0]
- // Node strips out invalid characters like \n and \t from the string, base64-js does not
- str = str.trim().replace(INVALID_BASE64_RE, '')
- // Node converts strings with length < 2 to ''
- if (str.length < 2) return ''
- // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
- while (str.length % 4 !== 0) {
- str = str + '='
- }
- return str
-}
-
-function utf8ToBytes (string, units) {
- units = units || Infinity
- let codePoint
- const length = string.length
- let leadSurrogate = null
- const bytes = []
-
- for (let i = 0; i < length; ++i) {
- codePoint = string.charCodeAt(i)
-
- // is surrogate component
- if (codePoint > 0xD7FF && codePoint < 0xE000) {
- // last char was a lead
- if (!leadSurrogate) {
- // no lead yet
- if (codePoint > 0xDBFF) {
- // unexpected trail
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- } else if (i + 1 === length) {
- // unpaired lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- }
-
- // valid lead
- leadSurrogate = codePoint
-
- continue
- }
-
- // 2 leads in a row
- if (codePoint < 0xDC00) {
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- leadSurrogate = codePoint
- continue
- }
-
- // valid surrogate pair
- codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
- } else if (leadSurrogate) {
- // valid bmp char, but last char was a lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- }
-
- leadSurrogate = null
-
- // encode utf8
- if (codePoint < 0x80) {
- if ((units -= 1) < 0) break
- bytes.push(codePoint)
- } else if (codePoint < 0x800) {
- if ((units -= 2) < 0) break
- bytes.push(
- codePoint >> 0x6 | 0xC0,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x10000) {
- if ((units -= 3) < 0) break
- bytes.push(
- codePoint >> 0xC | 0xE0,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x110000) {
- if ((units -= 4) < 0) break
- bytes.push(
- codePoint >> 0x12 | 0xF0,
- codePoint >> 0xC & 0x3F | 0x80,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else {
- throw new Error('Invalid code point')
- }
- }
-
- return bytes
-}
-
-function asciiToBytes (str) {
- const byteArray = []
- for (let i = 0; i < str.length; ++i) {
- // Node's code seems to be doing this and not & 0x7F..
- byteArray.push(str.charCodeAt(i) & 0xFF)
- }
- return byteArray
-}
-
-function utf16leToBytes (str, units) {
- let c, hi, lo
- const byteArray = []
- for (let i = 0; i < str.length; ++i) {
- if ((units -= 2) < 0) break
-
- c = str.charCodeAt(i)
- hi = c >> 8
- lo = c % 256
- byteArray.push(lo)
- byteArray.push(hi)
- }
-
- return byteArray
-}
-
-function base64ToBytes (str) {
- return base64.toByteArray(base64clean(str))
-}
-
-function blitBuffer (src, dst, offset, length) {
- let i
- for (i = 0; i < length; ++i) {
- if ((i + offset >= dst.length) || (i >= src.length)) break
- dst[i + offset] = src[i]
- }
- return i
-}
-
-// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
-// the `instanceof` check but they should be treated as of that type.
-// See: https://github.com/feross/buffer/issues/166
-function isInstance (obj, type) {
- return obj instanceof type ||
- (obj != null && obj.constructor != null && obj.constructor.name != null &&
- obj.constructor.name === type.name)
-}
-function numberIsNaN (obj) {
- // For IE11 support
- return obj !== obj // eslint-disable-line no-self-compare
-}
-
-// Create lookup table for `toString('hex')`
-// See: https://github.com/feross/buffer/issues/219
-const hexSliceLookupTable = (function () {
- const alphabet = '0123456789abcdef'
- const table = new Array(256)
- for (let i = 0; i < 16; ++i) {
- const i16 = i * 16
- for (let j = 0; j < 16; ++j) {
- table[i16 + j] = alphabet[i] + alphabet[j]
- }
- }
- return table
-})()
-
-// Return not function with Error if BigInt not supported
-function defineBigIntMethod (fn) {
- return typeof BigInt === 'undefined' ? BufferBigIntNotDefined : fn
-}
-
-function BufferBigIntNotDefined () {
- throw new Error('BigInt not supported')
-}
diff --git a/deps/npm/node_modules/buffer/package.json b/deps/npm/node_modules/buffer/package.json
deleted file mode 100644
index ca1ad9a7078842..00000000000000
--- a/deps/npm/node_modules/buffer/package.json
+++ /dev/null
@@ -1,93 +0,0 @@
-{
- "name": "buffer",
- "description": "Node.js Buffer API, for the browser",
- "version": "6.0.3",
- "author": {
- "name": "Feross Aboukhadijeh",
- "email": "feross@feross.org",
- "url": "https://feross.org"
- },
- "bugs": {
- "url": "https://github.com/feross/buffer/issues"
- },
- "contributors": [
- "Romain Beauxis ",
- "James Halliday "
- ],
- "dependencies": {
- "base64-js": "^1.3.1",
- "ieee754": "^1.2.1"
- },
- "devDependencies": {
- "airtap": "^3.0.0",
- "benchmark": "^2.1.4",
- "browserify": "^17.0.0",
- "concat-stream": "^2.0.0",
- "hyperquest": "^2.1.3",
- "is-buffer": "^2.0.5",
- "is-nan": "^1.3.0",
- "split": "^1.0.1",
- "standard": "*",
- "tape": "^5.0.1",
- "through2": "^4.0.2",
- "uglify-js": "^3.11.5"
- },
- "homepage": "https://github.com/feross/buffer",
- "jspm": {
- "map": {
- "./index.js": {
- "node": "@node/buffer"
- }
- }
- },
- "keywords": [
- "arraybuffer",
- "browser",
- "browserify",
- "buffer",
- "compatible",
- "dataview",
- "uint8array"
- ],
- "license": "MIT",
- "main": "index.js",
- "types": "index.d.ts",
- "repository": {
- "type": "git",
- "url": "git://github.com/feross/buffer.git"
- },
- "scripts": {
- "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html",
- "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js",
- "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c",
- "test": "standard && node ./bin/test.js",
- "test-browser-old": "airtap -- test/*.js",
- "test-browser-old-local": "airtap --local -- test/*.js",
- "test-browser-new": "airtap -- test/*.js test/node/*.js",
- "test-browser-new-local": "airtap --local -- test/*.js test/node/*.js",
- "test-node": "tape test/*.js test/node/*.js",
- "update-authors": "./bin/update-authors.sh"
- },
- "standard": {
- "ignore": [
- "test/node/**/*.js",
- "test/common.js",
- "test/_polyfill.js",
- "perf/**/*.js"
- ]
- },
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
-}
diff --git a/deps/npm/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/cacache/lib/content/read.js
index f41b539df65dce..a1fa8a08cc0f93 100644
--- a/deps/npm/node_modules/cacache/lib/content/read.js
+++ b/deps/npm/node_modules/cacache/lib/content/read.js
@@ -13,18 +13,20 @@ async function read (cache, integrity, opts = {}) {
const { size } = opts
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
// get size
- const stat = await fs.stat(cpath)
+ const stat = size ? { size } : await fs.stat(cpath)
return { stat, cpath, sri }
})
- if (typeof size === 'number' && stat.size !== size) {
- throw sizeError(size, stat.size)
- }
if (stat.size > MAX_SINGLE_READ_SIZE) {
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
}
const data = await fs.readFile(cpath, { encoding: null })
+
+ if (stat.size !== data.length) {
+ throw sizeError(stat.size, data.length)
+ }
+
if (!ssri.checkData(data, sri)) {
throw integrityError(sri, cpath)
}
@@ -55,13 +57,10 @@ function readStream (cache, integrity, opts = {}) {
// Set all this up to run on the stream and then just return the stream
Promise.resolve().then(async () => {
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
- // just stat to ensure it exists
- const stat = await fs.stat(cpath)
+ // get size
+ const stat = size ? { size } : await fs.stat(cpath)
return { stat, cpath, sri }
})
- if (typeof size === 'number' && size !== stat.size) {
- return stream.emit('error', sizeError(size, stat.size))
- }
return readPipeline(cpath, stat.size, sri, stream)
}).catch(err => stream.emit('error', err))
diff --git a/deps/npm/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/cacache/lib/content/write.js
index 71461465812878..09ca4e4e5a4d3f 100644
--- a/deps/npm/node_modules/cacache/lib/content/write.js
+++ b/deps/npm/node_modules/cacache/lib/content/write.js
@@ -67,6 +67,7 @@ class CacacheWriteStream extends Flush {
this.cache,
this.opts
)
+ this.handleContentP.catch(error => this.emit('error', error))
}
return this.inputStream.write(chunk, encoding, cb)
}
diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json
index 1b14bf4bd14904..3f87af3e7dbcee 100644
--- a/deps/npm/node_modules/cacache/package.json
+++ b/deps/npm/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
{
"name": "cacache",
- "version": "18.0.0",
+ "version": "18.0.2",
"cache-version": {
"content": "2",
"index": "5"
@@ -16,7 +16,7 @@
"snap": "tap",
"coverage": "tap",
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"npmclilint": "npmcli-lint",
"lintfix": "npm run lint -- --fix",
"postsnap": "npm run lintfix --",
@@ -50,7 +50,7 @@
"glob": "^10.2.2",
"lru-cache": "^10.0.1",
"minipass": "^7.0.3",
- "minipass-collect": "^1.0.2",
+ "minipass-collect": "^2.0.1",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"p-map": "^4.0.0",
@@ -60,7 +60,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.18.0",
+ "@npmcli/template-oss": "4.21.3",
"tap": "^16.0.0"
},
"engines": {
@@ -69,14 +69,8 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"windowsCI": false,
- "version": "4.18.0",
- "publish": "true",
- "ciVersions": [
- "16.14.0",
- "16.x",
- "18.0.0",
- "18.x"
- ]
+ "version": "4.21.3",
+ "publish": "true"
},
"author": "GitHub Inc.",
"tap": {
diff --git a/deps/npm/node_modules/delegates/History.md b/deps/npm/node_modules/delegates/History.md
deleted file mode 100644
index 25959eab67b840..00000000000000
--- a/deps/npm/node_modules/delegates/History.md
+++ /dev/null
@@ -1,22 +0,0 @@
-
-1.0.0 / 2015-12-14
-==================
-
- * Merge pull request #12 from kasicka/master
- * Add license text
-
-0.1.0 / 2014-10-17
-==================
-
- * adds `.fluent()` to api
-
-0.0.3 / 2014-01-13
-==================
-
- * fix receiver for .method()
-
-0.0.2 / 2014-01-13
-==================
-
- * Object.defineProperty() sucks
- * Initial commit
diff --git a/deps/npm/node_modules/delegates/License b/deps/npm/node_modules/delegates/License
deleted file mode 100644
index 60de60addbe7e9..00000000000000
--- a/deps/npm/node_modules/delegates/License
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2015 TJ Holowaychuk
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/delegates/Makefile b/deps/npm/node_modules/delegates/Makefile
deleted file mode 100644
index a9dcfd50dbdb22..00000000000000
--- a/deps/npm/node_modules/delegates/Makefile
+++ /dev/null
@@ -1,8 +0,0 @@
-
-test:
- @./node_modules/.bin/mocha \
- --require should \
- --reporter spec \
- --bail
-
-.PHONY: test
\ No newline at end of file
diff --git a/deps/npm/node_modules/delegates/index.js b/deps/npm/node_modules/delegates/index.js
deleted file mode 100644
index 17c222d52935c6..00000000000000
--- a/deps/npm/node_modules/delegates/index.js
+++ /dev/null
@@ -1,121 +0,0 @@
-
-/**
- * Expose `Delegator`.
- */
-
-module.exports = Delegator;
-
-/**
- * Initialize a delegator.
- *
- * @param {Object} proto
- * @param {String} target
- * @api public
- */
-
-function Delegator(proto, target) {
- if (!(this instanceof Delegator)) return new Delegator(proto, target);
- this.proto = proto;
- this.target = target;
- this.methods = [];
- this.getters = [];
- this.setters = [];
- this.fluents = [];
-}
-
-/**
- * Delegate method `name`.
- *
- * @param {String} name
- * @return {Delegator} self
- * @api public
- */
-
-Delegator.prototype.method = function(name){
- var proto = this.proto;
- var target = this.target;
- this.methods.push(name);
-
- proto[name] = function(){
- return this[target][name].apply(this[target], arguments);
- };
-
- return this;
-};
-
-/**
- * Delegator accessor `name`.
- *
- * @param {String} name
- * @return {Delegator} self
- * @api public
- */
-
-Delegator.prototype.access = function(name){
- return this.getter(name).setter(name);
-};
-
-/**
- * Delegator getter `name`.
- *
- * @param {String} name
- * @return {Delegator} self
- * @api public
- */
-
-Delegator.prototype.getter = function(name){
- var proto = this.proto;
- var target = this.target;
- this.getters.push(name);
-
- proto.__defineGetter__(name, function(){
- return this[target][name];
- });
-
- return this;
-};
-
-/**
- * Delegator setter `name`.
- *
- * @param {String} name
- * @return {Delegator} self
- * @api public
- */
-
-Delegator.prototype.setter = function(name){
- var proto = this.proto;
- var target = this.target;
- this.setters.push(name);
-
- proto.__defineSetter__(name, function(val){
- return this[target][name] = val;
- });
-
- return this;
-};
-
-/**
- * Delegator fluent accessor
- *
- * @param {String} name
- * @return {Delegator} self
- * @api public
- */
-
-Delegator.prototype.fluent = function (name) {
- var proto = this.proto;
- var target = this.target;
- this.fluents.push(name);
-
- proto[name] = function(val){
- if ('undefined' != typeof val) {
- this[target][name] = val;
- return this;
- } else {
- return this[target][name];
- }
- };
-
- return this;
-};
diff --git a/deps/npm/node_modules/delegates/package.json b/deps/npm/node_modules/delegates/package.json
deleted file mode 100644
index 17240384fd43b4..00000000000000
--- a/deps/npm/node_modules/delegates/package.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "name": "delegates",
- "version": "1.0.0",
- "repository": "visionmedia/node-delegates",
- "description": "delegate methods and accessors to another property",
- "keywords": ["delegate", "delegation"],
- "dependencies": {},
- "devDependencies": {
- "mocha": "*",
- "should": "*"
- },
- "license": "MIT"
-}
diff --git a/deps/npm/node_modules/delegates/test/index.js b/deps/npm/node_modules/delegates/test/index.js
deleted file mode 100644
index 7b6e3d4df19d90..00000000000000
--- a/deps/npm/node_modules/delegates/test/index.js
+++ /dev/null
@@ -1,94 +0,0 @@
-
-var assert = require('assert');
-var delegate = require('..');
-
-describe('.method(name)', function(){
- it('should delegate methods', function(){
- var obj = {};
-
- obj.request = {
- foo: function(bar){
- assert(this == obj.request);
- return bar;
- }
- };
-
- delegate(obj, 'request').method('foo');
-
- obj.foo('something').should.equal('something');
- })
-})
-
-describe('.getter(name)', function(){
- it('should delegate getters', function(){
- var obj = {};
-
- obj.request = {
- get type() {
- return 'text/html';
- }
- }
-
- delegate(obj, 'request').getter('type');
-
- obj.type.should.equal('text/html');
- })
-})
-
-describe('.setter(name)', function(){
- it('should delegate setters', function(){
- var obj = {};
-
- obj.request = {
- get type() {
- return this._type.toUpperCase();
- },
-
- set type(val) {
- this._type = val;
- }
- }
-
- delegate(obj, 'request').setter('type');
-
- obj.type = 'hey';
- obj.request.type.should.equal('HEY');
- })
-})
-
-describe('.access(name)', function(){
- it('should delegate getters and setters', function(){
- var obj = {};
-
- obj.request = {
- get type() {
- return this._type.toUpperCase();
- },
-
- set type(val) {
- this._type = val;
- }
- }
-
- delegate(obj, 'request').access('type');
-
- obj.type = 'hey';
- obj.type.should.equal('HEY');
- })
-})
-
-describe('.fluent(name)', function () {
- it('should delegate in a fluent fashion', function () {
- var obj = {
- settings: {
- env: 'development'
- }
- };
-
- delegate(obj, 'settings').fluent('env');
-
- obj.env().should.equal('development');
- obj.env('production').should.equal(obj);
- obj.settings.env.should.equal('production');
- })
-})
diff --git a/deps/npm/node_modules/event-target-shim/LICENSE b/deps/npm/node_modules/event-target-shim/LICENSE
deleted file mode 100644
index b71bf4e29d62c5..00000000000000
--- a/deps/npm/node_modules/event-target-shim/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Toru Nagashima
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js b/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js
deleted file mode 100644
index 53ce22036e35ef..00000000000000
--- a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js
+++ /dev/null
@@ -1,871 +0,0 @@
-/**
- * @author Toru Nagashima
- * @copyright 2015 Toru Nagashima. All rights reserved.
- * See LICENSE file in root directory for full license.
- */
-'use strict';
-
-Object.defineProperty(exports, '__esModule', { value: true });
-
-/**
- * @typedef {object} PrivateData
- * @property {EventTarget} eventTarget The event target.
- * @property {{type:string}} event The original event object.
- * @property {number} eventPhase The current event phase.
- * @property {EventTarget|null} currentTarget The current event target.
- * @property {boolean} canceled The flag to prevent default.
- * @property {boolean} stopped The flag to stop propagation.
- * @property {boolean} immediateStopped The flag to stop propagation immediately.
- * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.
- * @property {number} timeStamp The unix time.
- * @private
- */
-
-/**
- * Private data for event wrappers.
- * @type {WeakMap}
- * @private
- */
-const privateData = new WeakMap();
-
-/**
- * Cache for wrapper classes.
- * @type {WeakMap
+21.6.1
+21.6.0 21.5.0 21.4.0 21.3.0
@@ -41,6 +43,189 @@
* [io.js](CHANGELOG_IOJS.md)
* [Archive](CHANGELOG_ARCHIVE.md)
+
+
+## 2024-01-22, Version 21.6.1 (Current), @RafaelGSS
+
+### Notable Changes
+
+This release fixes a bug in `undici` using WebStreams
+
+### Commits
+
+* \[[`662ac95729`](https://github.com/nodejs/node/commit/662ac95729)] - _**Revert**_ "**stream**: fix cloned webstreams not being unref'd" (Matteo Collina) [#51491](https://github.com/nodejs/node/pull/51491)
+* \[[`1b8bba8aee`](https://github.com/nodejs/node/commit/1b8bba8aee)] - **test**: add regression test for 51586 (Matteo Collina) [#51491](https://github.com/nodejs/node/pull/51491)
+
+
+
+## 2024-01-15, Version 21.6.0 (Current), @RafaelGSS
+
+### New connection attempt events
+
+Three new events were added in the `net.createConnection` flow:
+
+* `connectionAttempt`: Emitted when a new connection attempt is established. In case of Happy Eyeballs, this might emitted multiple times.
+* `connectionAttemptFailed`: Emitted when a connection attempt failed. In case of Happy Eyeballs, this might emitted multiple times.
+* `connectionAttemptTimeout`: Emitted when a connection attempt timed out. In case of Happy Eyeballs, this will not be emitted for the last attempt. This is not emitted at all if Happy Eyeballs is not used.
+
+Additionally, a previous bug has been fixed where a new connection attempt could have been started after a previous one failed and after the connection was destroyed by the user.
+This led to a failed assertion.
+
+Contributed by Paolo Insogna in [#51045](https://github.com/nodejs/node/pull/51045).
+
+### Changes to the Permission Model
+
+Node.js 21.6.0 comes with several fixes for the experimental permission model and two new semver-minor commits.
+We're adding a new flag `--allow-addons` to enable addon usage when using the Permission Model.
+
+```console
+$ node --experimental-permission --allow-addons
+```
+
+Contributed by Rafael Gonzaga in [#51183](https://github.com/nodejs/node/pull/51183)
+
+And relative paths are now supported through the `--allow-fs-*` flags.
+Therefore, with this release one can use:
+
+```console
+$ node --experimental-permission --allow-fs-read=./index.js
+```
+
+To give only read access to the entrypoint of the application.
+
+Contributed by Rafael Gonzaga and Carlos Espa in [#50758](https://github.com/nodejs/node/pull/50758)
+
+### Support configurable snapshot through `--build-snapshot-config` flag
+
+We are adding a new flag `--build-snapshot-config` to configure snapshots through a custom JSON configuration file.
+
+```console
+$ node --build-snapshot-config=/path/to/myconfig.json
+```
+
+When using this flag, additional script files provided on the command line will
+not be executed and instead be interpreted as regular command line arguments.
+
+These changes were contributed by Joyee Cheung and Anna Henningsen in [#50453](https://github.com/nodejs/node/pull/50453)
+
+### Other Notable Changes
+
+* \[[`c31ed51373`](https://github.com/nodejs/node/commit/c31ed51373)] - **(SEMVER-MINOR)** **timers**: export timers.promises (Marco Ippolito) [#51246](https://github.com/nodejs/node/pull/51246)
+
+### Commits
+
+* \[[`13a1241b83`](https://github.com/nodejs/node/commit/13a1241b83)] - **assert,crypto**: make KeyObject and CryptoKey testable for equality (Filip Skokan) [#50897](https://github.com/nodejs/node/pull/50897)
+* \[[`4dcc5114aa`](https://github.com/nodejs/node/commit/4dcc5114aa)] - **benchmark**: remove dependency on unshipped tools (Adam Majer) [#51146](https://github.com/nodejs/node/pull/51146)
+* \[[`2eb41f86b3`](https://github.com/nodejs/node/commit/2eb41f86b3)] - **build**: fix for VScode "Reopen in Container" (Serg Kryvonos) [#51271](https://github.com/nodejs/node/pull/51271)
+* \[[`e03ac83c19`](https://github.com/nodejs/node/commit/e03ac83c19)] - **build**: fix arm64 cross-compilation (Michaël Zasso) [#51256](https://github.com/nodejs/node/pull/51256)
+* \[[`cd61fce34e`](https://github.com/nodejs/node/commit/cd61fce34e)] - **build**: add `-flax-vector-conversions` to V8 build (Michaël Zasso) [#51257](https://github.com/nodejs/node/pull/51257)
+* \[[`e5017a522e`](https://github.com/nodejs/node/commit/e5017a522e)] - **crypto**: update CryptoKey symbol properties (Filip Skokan) [#50897](https://github.com/nodejs/node/pull/50897)
+* \[[`c0d2e8be11`](https://github.com/nodejs/node/commit/c0d2e8be11)] - **deps**: update corepack to 0.24.0 (Node.js GitHub Bot) [#51318](https://github.com/nodejs/node/pull/51318)
+* \[[`24a9a72492`](https://github.com/nodejs/node/commit/24a9a72492)] - **deps**: update acorn to 8.11.3 (Node.js GitHub Bot) [#51317](https://github.com/nodejs/node/pull/51317)
+* \[[`e53cbb22c2`](https://github.com/nodejs/node/commit/e53cbb22c2)] - **deps**: update ngtcp2 and nghttp3 (James M Snell) [#51291](https://github.com/nodejs/node/pull/51291)
+* \[[`f00f1204f1`](https://github.com/nodejs/node/commit/f00f1204f1)] - **deps**: update brotli to 1.1.0 (Node.js GitHub Bot) [#50804](https://github.com/nodejs/node/pull/50804)
+* \[[`a41dca0c51`](https://github.com/nodejs/node/commit/a41dca0c51)] - **deps**: update zlib to 1.3.0.1-motley-40e35a7 (Node.js GitHub Bot) [#51274](https://github.com/nodejs/node/pull/51274)
+* \[[`efa12a89c6`](https://github.com/nodejs/node/commit/efa12a89c6)] - **deps**: update simdutf to 4.0.8 (Node.js GitHub Bot) [#51000](https://github.com/nodejs/node/pull/51000)
+* \[[`25eba3d20b`](https://github.com/nodejs/node/commit/25eba3d20b)] - **deps**: V8: cherry-pick de611e69ad51 (Keyhan Vakil) [#51200](https://github.com/nodejs/node/pull/51200)
+* \[[`a07d6e23e4`](https://github.com/nodejs/node/commit/a07d6e23e4)] - **deps**: update simdjson to 3.6.3 (Node.js GitHub Bot) [#51104](https://github.com/nodejs/node/pull/51104)
+* \[[`6d1bfcb2dd`](https://github.com/nodejs/node/commit/6d1bfcb2dd)] - **deps**: update googletest to 530d5c8 (Node.js GitHub Bot) [#51191](https://github.com/nodejs/node/pull/51191)
+* \[[`75e5615c43`](https://github.com/nodejs/node/commit/75e5615c43)] - **deps**: update acorn-walk to 8.3.1 (Node.js GitHub Bot) [#50457](https://github.com/nodejs/node/pull/50457)
+* \[[`3ecc7dcc00`](https://github.com/nodejs/node/commit/3ecc7dcc00)] - **deps**: update acorn-walk to 8.3.0 (Node.js GitHub Bot) [#50457](https://github.com/nodejs/node/pull/50457)
+* \[[`e2f8d741c8`](https://github.com/nodejs/node/commit/e2f8d741c8)] - **deps**: update zlib to 1.3.0.1-motley-dd5fc13 (Node.js GitHub Bot) [#51105](https://github.com/nodejs/node/pull/51105)
+* \[[`4a5d3bda72`](https://github.com/nodejs/node/commit/4a5d3bda72)] - **doc**: the GN files should use Node's license (Cheng Zhao) [#50694](https://github.com/nodejs/node/pull/50694)
+* \[[`84127514ba`](https://github.com/nodejs/node/commit/84127514ba)] - **doc**: improve localWindowSize event descriptions (Davy Landman) [#51071](https://github.com/nodejs/node/pull/51071)
+* \[[`8ee882a49c`](https://github.com/nodejs/node/commit/8ee882a49c)] - **doc**: mark `--jitless` as experimental (Antoine du Hamel) [#51247](https://github.com/nodejs/node/pull/51247)
+* \[[`876743ece1`](https://github.com/nodejs/node/commit/876743ece1)] - **doc**: run license-builder (github-actions\[bot]) [#51199](https://github.com/nodejs/node/pull/51199)
+* \[[`ec6fcff009`](https://github.com/nodejs/node/commit/ec6fcff009)] - **doc**: fix limitations and known issues in pm (Rafael Gonzaga) [#51184](https://github.com/nodejs/node/pull/51184)
+* \[[`c13a5c0373`](https://github.com/nodejs/node/commit/c13a5c0373)] - **doc**: mention node:wasi in the Threat Model (Rafael Gonzaga) [#51211](https://github.com/nodejs/node/pull/51211)
+* \[[`4b19e62444`](https://github.com/nodejs/node/commit/4b19e62444)] - **doc**: remove ambiguous 'considered' (Rich Trott) [#51207](https://github.com/nodejs/node/pull/51207)
+* \[[`5453abd6ad`](https://github.com/nodejs/node/commit/5453abd6ad)] - **doc**: set exit code in custom test runner example (Matteo Collina) [#51056](https://github.com/nodejs/node/pull/51056)
+* \[[`f9d4e07faf`](https://github.com/nodejs/node/commit/f9d4e07faf)] - **doc**: remove version from `maintaining-dependencies.md` (Antoine du Hamel) [#51195](https://github.com/nodejs/node/pull/51195)
+* \[[`df8927a073`](https://github.com/nodejs/node/commit/df8927a073)] - **doc**: mention native addons are restricted in pm (Rafael Gonzaga) [#51185](https://github.com/nodejs/node/pull/51185)
+* \[[`e636d83914`](https://github.com/nodejs/node/commit/e636d83914)] - **doc**: correct note on behavior of stats.isDirectory (Nick Reilingh) [#50946](https://github.com/nodejs/node/pull/50946)
+* \[[`1c71435c2a`](https://github.com/nodejs/node/commit/1c71435c2a)] - **doc**: fix `TestsStream` parent class (Jungku Lee) [#51181](https://github.com/nodejs/node/pull/51181)
+* \[[`2c227b0d64`](https://github.com/nodejs/node/commit/2c227b0d64)] - **doc**: fix simdjson wrong link (Marco Ippolito) [#51177](https://github.com/nodejs/node/pull/51177)
+* \[[`efa13e1943`](https://github.com/nodejs/node/commit/efa13e1943)] - **(SEMVER-MINOR)** **doc**: add documentation for --build-snapshot-config (Anna Henningsen) [#50453](https://github.com/nodejs/node/pull/50453)
+* \[[`941aedc6fc`](https://github.com/nodejs/node/commit/941aedc6fc)] - **errors**: fix stacktrace of SystemError (uzlopak) [#49956](https://github.com/nodejs/node/pull/49956)
+* \[[`47548d9e61`](https://github.com/nodejs/node/commit/47548d9e61)] - **esm**: fix hint on invalid module specifier (Antoine du Hamel) [#51223](https://github.com/nodejs/node/pull/51223)
+* \[[`091098f40a`](https://github.com/nodejs/node/commit/091098f40a)] - **fs**: fix fs.promises.realpath for long paths on Windows (翠 / green) [#51032](https://github.com/nodejs/node/pull/51032)
+* \[[`e5a8fa01aa`](https://github.com/nodejs/node/commit/e5a8fa01aa)] - **fs**: make offset, position & length args in fh.read() optional (Pulkit Gupta) [#51087](https://github.com/nodejs/node/pull/51087)
+* \[[`c87e5d51cc`](https://github.com/nodejs/node/commit/c87e5d51cc)] - **fs**: add missing jsdoc parameters to `readSync` (Yagiz Nizipli) [#51225](https://github.com/nodejs/node/pull/51225)
+* \[[`e24249cf37`](https://github.com/nodejs/node/commit/e24249cf37)] - **fs**: remove `internalModuleReadJSON` binding (Yagiz Nizipli) [#51224](https://github.com/nodejs/node/pull/51224)
+* \[[`7421467812`](https://github.com/nodejs/node/commit/7421467812)] - **fs**: improve mkdtemp performance for buffer prefix (Yagiz Nizipli) [#51078](https://github.com/nodejs/node/pull/51078)
+* \[[`5b229d775f`](https://github.com/nodejs/node/commit/5b229d775f)] - **fs**: validate fd synchronously on c++ (Yagiz Nizipli) [#51027](https://github.com/nodejs/node/pull/51027)
+* \[[`c7a135962d`](https://github.com/nodejs/node/commit/c7a135962d)] - **http**: remove misleading warning (Luigi Pinca) [#51204](https://github.com/nodejs/node/pull/51204)
+* \[[`a325746ff4`](https://github.com/nodejs/node/commit/a325746ff4)] - **http**: do not override user-provided options object (KuthorX) [#33633](https://github.com/nodejs/node/pull/33633)
+* \[[`89eee7763f`](https://github.com/nodejs/node/commit/89eee7763f)] - **http2**: addtl http/2 settings (Marten Richter) [#49025](https://github.com/nodejs/node/pull/49025)
+* \[[`624142947f`](https://github.com/nodejs/node/commit/624142947f)] - **lib**: fix use of `--frozen-intrinsics` with `--jitless` (Antoine du Hamel) [#51248](https://github.com/nodejs/node/pull/51248)
+* \[[`8f845eb001`](https://github.com/nodejs/node/commit/8f845eb001)] - **lib**: move function declaration outside of loop (Sanjaiyan Parthipan) [#51242](https://github.com/nodejs/node/pull/51242)
+* \[[`ed7305e49b`](https://github.com/nodejs/node/commit/ed7305e49b)] - **lib**: reduce overhead of `SafePromiseAllSettledReturnVoid` calls (Antoine du Hamel) [#51243](https://github.com/nodejs/node/pull/51243)
+* \[[`291265ce27`](https://github.com/nodejs/node/commit/291265ce27)] - **lib**: expose default prepareStackTrace (Chengzhong Wu) [#50827](https://github.com/nodejs/node/pull/50827)
+* \[[`8ff6bc45ca`](https://github.com/nodejs/node/commit/8ff6bc45ca)] - **lib,permission**: handle buffer on fs.symlink (Rafael Gonzaga) [#51212](https://github.com/nodejs/node/pull/51212)
+* \[[`416b4f8063`](https://github.com/nodejs/node/commit/416b4f8063)] - **(SEMVER-MINOR)** **lib,src,permission**: port path.resolve to C++ (Rafael Gonzaga) [#50758](https://github.com/nodejs/node/pull/50758)
+* \[[`6648a5c576`](https://github.com/nodejs/node/commit/6648a5c576)] - **meta**: notify tsc on changes in SECURITY.md (Rafael Gonzaga) [#51259](https://github.com/nodejs/node/pull/51259)
+* \[[`83a99ccedd`](https://github.com/nodejs/node/commit/83a99ccedd)] - **meta**: update artifact actions to v4 (Michaël Zasso) [#51219](https://github.com/nodejs/node/pull/51219)
+* \[[`b621ada69a`](https://github.com/nodejs/node/commit/b621ada69a)] - **module**: move the CJS exports cache to internal/modules/cjs/loader (Joyee Cheung) [#51157](https://github.com/nodejs/node/pull/51157)
+* \[[`e4be5b60f0`](https://github.com/nodejs/node/commit/e4be5b60f0)] - **(SEMVER-MINOR)** **net**: add connection attempt events (Paolo Insogna) [#51045](https://github.com/nodejs/node/pull/51045)
+* \[[`3a492056e2`](https://github.com/nodejs/node/commit/3a492056e2)] - **node-api**: type tag external values without v8::Private (Chengzhong Wu) [#51149](https://github.com/nodejs/node/pull/51149)
+* \[[`b2135ae7dc`](https://github.com/nodejs/node/commit/b2135ae7dc)] - **node-api**: segregate nogc APIs from rest via type system (Gabriel Schulhof) [#50060](https://github.com/nodejs/node/pull/50060)
+* \[[`8f4325dcd5`](https://github.com/nodejs/node/commit/8f4325dcd5)] - **permission**: fix wildcard when children > 1 (Rafael Gonzaga) [#51209](https://github.com/nodejs/node/pull/51209)
+* \[[`7ecf99404e`](https://github.com/nodejs/node/commit/7ecf99404e)] - **quic**: update quic impl to use latest ngtcp2/nghttp3 (James M Snell) [#51291](https://github.com/nodejs/node/pull/51291)
+* \[[`5b32e21f3b`](https://github.com/nodejs/node/commit/5b32e21f3b)] - **quic**: add quic internalBinding, refine Endpoint, add types (James M Snell) [#51112](https://github.com/nodejs/node/pull/51112)
+* \[[`3310095bea`](https://github.com/nodejs/node/commit/3310095bea)] - **repl**: fix prepareStackTrace frames array order (Chengzhong Wu) [#50827](https://github.com/nodejs/node/pull/50827)
+* \[[`a0ff00b526`](https://github.com/nodejs/node/commit/a0ff00b526)] - **src**: avoid draining platform tasks at FreeEnvironment (Chengzhong Wu) [#51290](https://github.com/nodejs/node/pull/51290)
+* \[[`115e0585cd`](https://github.com/nodejs/node/commit/115e0585cd)] - **src**: add fast api for Histogram (James M Snell) [#51296](https://github.com/nodejs/node/pull/51296)
+* \[[`29b81576c6`](https://github.com/nodejs/node/commit/29b81576c6)] - **src**: refactor `GetCreationContext` calls (Yagiz Nizipli) [#51287](https://github.com/nodejs/node/pull/51287)
+* \[[`54dd978400`](https://github.com/nodejs/node/commit/54dd978400)] - **src**: enter isolate before destructing IsolateData (Ben Noordhuis) [#51138](https://github.com/nodejs/node/pull/51138)
+* \[[`864ecb0dfa`](https://github.com/nodejs/node/commit/864ecb0dfa)] - **src**: do not treat all paths ending with node\_modules as such (Michaël Zasso) [#51269](https://github.com/nodejs/node/pull/51269)
+* \[[`df31c8114c`](https://github.com/nodejs/node/commit/df31c8114c)] - **src**: eliminate duplicate code in histogram.cc (James M Snell) [#51263](https://github.com/nodejs/node/pull/51263)
+* \[[`17c73e6d0c`](https://github.com/nodejs/node/commit/17c73e6d0c)] - **src**: fix unix abstract socket path for trace event (theanarkh) [#50858](https://github.com/nodejs/node/pull/50858)
+* \[[`96d64edc94`](https://github.com/nodejs/node/commit/96d64edc94)] - **src**: use BignumPointer and use BN\_clear\_free (James M Snell) [#50454](https://github.com/nodejs/node/pull/50454)
+* \[[`8a2dd93a14`](https://github.com/nodejs/node/commit/8a2dd93a14)] - **src**: implement FastByteLengthUtf8 with simdutf::utf8\_length\_from\_latin1 (Daniel Lemire) [#50840](https://github.com/nodejs/node/pull/50840)
+* \[[`e54ddf898f`](https://github.com/nodejs/node/commit/e54ddf898f)] - **(SEMVER-MINOR)** **src**: support configurable snapshot (Joyee Cheung) [#50453](https://github.com/nodejs/node/pull/50453)
+* \[[`a69c7d7bc3`](https://github.com/nodejs/node/commit/a69c7d7bc3)] - **(SEMVER-MINOR)** **src,permission**: add --allow-addon flag (Rafael Gonzaga) [#51183](https://github.com/nodejs/node/pull/51183)
+* \[[`e7925e66fc`](https://github.com/nodejs/node/commit/e7925e66fc)] - **src,stream**: improve WriteString (ywave620) [#51155](https://github.com/nodejs/node/pull/51155)
+* \[[`82de6603af`](https://github.com/nodejs/node/commit/82de6603af)] - **stream**: fix code style (Mattias Buelens) [#51168](https://github.com/nodejs/node/pull/51168)
+* \[[`e443953656`](https://github.com/nodejs/node/commit/e443953656)] - **stream**: fix cloned webstreams not being unref'd (James M Snell) [#51255](https://github.com/nodejs/node/pull/51255)
+* \[[`757a84c9ea`](https://github.com/nodejs/node/commit/757a84c9ea)] - **test**: fix flaky conditions for ppc64 SEA tests (Richard Lau) [#51422](https://github.com/nodejs/node/pull/51422)
+* \[[`85ee2f7255`](https://github.com/nodejs/node/commit/85ee2f7255)] - **test**: replace forEach() with for...of (Alexander Jones) [#50608](https://github.com/nodejs/node/pull/50608)
+* \[[`549e4b4142`](https://github.com/nodejs/node/commit/549e4b4142)] - **test**: replace forEach with for...of (Ospite Privilegiato) [#50787](https://github.com/nodejs/node/pull/50787)
+* \[[`ef44f9bef2`](https://github.com/nodejs/node/commit/ef44f9bef2)] - **test**: replace foreach with for of (lucacapocci94-dev) [#50790](https://github.com/nodejs/node/pull/50790)
+* \[[`652af45485`](https://github.com/nodejs/node/commit/652af45485)] - **test**: replace forEach() with for...of (Jia) [#50610](https://github.com/nodejs/node/pull/50610)
+* \[[`684dd9db2f`](https://github.com/nodejs/node/commit/684dd9db2f)] - **test**: fix inconsistency write size in `test-fs-readfile-tostring-fail` (Jungku Lee) [#51141](https://github.com/nodejs/node/pull/51141)
+* \[[`aaf710f535`](https://github.com/nodejs/node/commit/aaf710f535)] - **test**: replace forEach test-http-server-multiheaders2 (Marco Mac) [#50794](https://github.com/nodejs/node/pull/50794)
+* \[[`57c64550cc`](https://github.com/nodejs/node/commit/57c64550cc)] - **test**: replace forEach with for-of in test-webcrypto-export-import-ec (Chiara Ricciardi) [#51249](https://github.com/nodejs/node/pull/51249)
+* \[[`88e865181b`](https://github.com/nodejs/node/commit/88e865181b)] - **test**: move to for of loop in test-http-hostname-typechecking.js (Luca Del Puppo) [#50782](https://github.com/nodejs/node/pull/50782)
+* \[[`3db376f67a`](https://github.com/nodejs/node/commit/3db376f67a)] - **test**: skip test-watch-mode-inspect on arm (Michael Dawson) [#51210](https://github.com/nodejs/node/pull/51210)
+* \[[`38232d1c52`](https://github.com/nodejs/node/commit/38232d1c52)] - **test**: replace forEach with for of in file test-trace-events-net.js (Ianna83) [#50789](https://github.com/nodejs/node/pull/50789)
+* \[[`f1cb58355a`](https://github.com/nodejs/node/commit/f1cb58355a)] - **test**: replace forEach() with for...of in test/parallel/test-util-log.js (Edoardo Dusi) [#50783](https://github.com/nodejs/node/pull/50783)
+* \[[`9bfd84c117`](https://github.com/nodejs/node/commit/9bfd84c117)] - **test**: replace forEach with for of in test-trace-events-api.js (Andrea Pavone) [#50784](https://github.com/nodejs/node/pull/50784)
+* \[[`7e9834915a`](https://github.com/nodejs/node/commit/7e9834915a)] - **test**: replace forEach with for-of in test-v8-serders.js (Mattia Iannone) [#50791](https://github.com/nodejs/node/pull/50791)
+* \[[`b6f232e841`](https://github.com/nodejs/node/commit/b6f232e841)] - **test**: add URL tests to fs-read in pm (Rafael Gonzaga) [#51213](https://github.com/nodejs/node/pull/51213)
+* \[[`8a2178c5f5`](https://github.com/nodejs/node/commit/8a2178c5f5)] - **test**: use tmpdir.refresh() in test-esm-loader-resolve-type.mjs (Luigi Pinca) [#51206](https://github.com/nodejs/node/pull/51206)
+* \[[`7e9a0b192a`](https://github.com/nodejs/node/commit/7e9a0b192a)] - **test**: use tmpdir.refresh() in test-esm-json.mjs (Luigi Pinca) [#51205](https://github.com/nodejs/node/pull/51205)
+* \[[`d7c2572fe0`](https://github.com/nodejs/node/commit/d7c2572fe0)] - **test**: fix flakiness in worker\*.test-free-called (Jithil P Ponnan) [#51013](https://github.com/nodejs/node/pull/51013)
+* \[[`979cebc955`](https://github.com/nodejs/node/commit/979cebc955)] - **test\_runner**: fixed test object is incorrectly passed to setup() (Pulkit Gupta) [#50982](https://github.com/nodejs/node/pull/50982)
+* \[[`63db82abe6`](https://github.com/nodejs/node/commit/63db82abe6)] - **test\_runner**: fixed to run after hook if before throws an error (Pulkit Gupta) [#51062](https://github.com/nodejs/node/pull/51062)
+* \[[`c31ed51373`](https://github.com/nodejs/node/commit/c31ed51373)] - **(SEMVER-MINOR)** **timers**: export timers.promises (Marco Ippolito) [#51246](https://github.com/nodejs/node/pull/51246)
+* \[[`fc10f889eb`](https://github.com/nodejs/node/commit/fc10f889eb)] - **tools**: update lint-md-dependencies to rollup\@4.9.2 (Node.js GitHub Bot) [#51320](https://github.com/nodejs/node/pull/51320)
+* \[[`d5a5f12d15`](https://github.com/nodejs/node/commit/d5a5f12d15)] - **tools**: fix dep\_updaters dir updates (Michaël Zasso) [#51294](https://github.com/nodejs/node/pull/51294)
+* \[[`bdcb5ed510`](https://github.com/nodejs/node/commit/bdcb5ed510)] - **tools**: update inspector\_protocol to c488ba2 (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`69a46add77`](https://github.com/nodejs/node/commit/69a46add77)] - **tools**: update inspector\_protocol to 9b4a4aa (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`e325f49d19`](https://github.com/nodejs/node/commit/e325f49d19)] - **tools**: update inspector\_protocol to 2f51e05 (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`60d804851b`](https://github.com/nodejs/node/commit/60d804851b)] - **tools**: update inspector\_protocol to d7b099b (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`d18168489f`](https://github.com/nodejs/node/commit/d18168489f)] - **tools**: update inspector\_protocol to 912eb68 (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`ef4f46fc39`](https://github.com/nodejs/node/commit/ef4f46fc39)] - **tools**: update inspector\_protocol to 547c5b8 (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`c3126fc016`](https://github.com/nodejs/node/commit/c3126fc016)] - **tools**: update inspector\_protocol to ca525fc (cola119) [#51293](https://github.com/nodejs/node/pull/51293)
+* \[[`917d887dde`](https://github.com/nodejs/node/commit/917d887dde)] - **tools**: update lint-md-dependencies to rollup\@4.9.1 (Node.js GitHub Bot) [#51276](https://github.com/nodejs/node/pull/51276)
+* \[[`37594918e0`](https://github.com/nodejs/node/commit/37594918e0)] - **tools**: check timezone current version (Marco Ippolito) [#51178](https://github.com/nodejs/node/pull/51178)
+* \[[`d0d2faf899`](https://github.com/nodejs/node/commit/d0d2faf899)] - **tools**: update lint-md-dependencies to rollup\@4.9.0 (Node.js GitHub Bot) [#51193](https://github.com/nodejs/node/pull/51193)
+* \[[`c96ef6533c`](https://github.com/nodejs/node/commit/c96ef6533c)] - **tools**: update eslint to 8.56.0 (Node.js GitHub Bot) [#51194](https://github.com/nodejs/node/pull/51194)
+* \[[`f4f781d493`](https://github.com/nodejs/node/commit/f4f781d493)] - **util**: pass invalidSubtypeIndex instead of trimmedSubtype to error (Gaurish Sethia) [#51264](https://github.com/nodejs/node/pull/51264)
+* \[[`867b484429`](https://github.com/nodejs/node/commit/867b484429)] - **watch**: clarify that the fileName parameter can be null (Luigi Pinca) [#51305](https://github.com/nodejs/node/pull/51305)
+* \[[`56e8969b65`](https://github.com/nodejs/node/commit/56e8969b65)] - **watch**: fix null `fileName` on windows systems (vnc5) [#49891](https://github.com/nodejs/node/pull/49891)
+* \[[`3f4fd6efbb`](https://github.com/nodejs/node/commit/3f4fd6efbb)] - **watch**: fix infinite loop when passing --watch=true flag (Pulkit Gupta) [#51160](https://github.com/nodejs/node/pull/51160)
+
## 2023-12-19, Version 21.5.0 (Current), @RafaelGSS
diff --git a/doc/contributing/offboarding.md b/doc/contributing/offboarding.md
index f30688a33d0d87..f9d8140b54b4d8 100644
--- a/doc/contributing/offboarding.md
+++ b/doc/contributing/offboarding.md
@@ -3,7 +3,7 @@
This document is a checklist of things to do when a collaborator becomes
emeritus or leaves the project.
-* Remove the collaborator from the @nodejs/collaborators team.
+* Remove the collaborator from the [`@nodejs/collaborators`][] team.
* Open a fast-track pull request to move the collaborator to the collaborator
emeriti list in README.md.
* Determine what GitHub teams the collaborator belongs to. In consultation with
@@ -15,6 +15,11 @@ emeritus or leaves the project.
* When in doubt, especially if you are unable to get in contact with the
collaborator, remove them from all teams. It is easy enough to add them
back later, so we err on the side of privacy and security.
-* Open an issue in the [build](https://github.com/nodejs/build) repository
- titled `Remove Collaborator from Coverity` asking that the collaborator
- be removed from the Node.js coverity project if they had access.
+* Remove them from the [`@nodejs`](https://github.com/orgs/nodejs/people) GitHub
+ org unless they are members for a reason other than being a Collaborator.
+* [Open an issue](https://github.com/nodejs/build/issues/new) in the
+ nodejs/build repository titled `Remove Collaborator from Coverity` asking that
+ the collaborator be removed from the Node.js coverity project if they had
+ access.
+
+[`@nodejs/collaborators`]: https://github.com/orgs/nodejs/teams/collaborators/members
diff --git a/doc/contributing/security-release-process.md b/doc/contributing/security-release-process.md
index 4408782069970f..4524df2a90047d 100644
--- a/doc/contributing/security-release-process.md
+++ b/doc/contributing/security-release-process.md
@@ -124,7 +124,7 @@ out a better way, forward the email you receive to
`oss-security@lists.openwall.com` as a CC.
* [ ] Post in the [nodejs-social channel][]
- in the OpenJS slack asking for amplication of the blog post.
+ in the OpenJS slack asking for amplification of the blog post.
```text
Security release pre-alert:
@@ -184,7 +184,7 @@ out a better way, forward the email you receive to
```
* [ ] Post in the [nodejs-social channel][]
- in the OpenJS slack asking for amplication of the blog post.
+ in the OpenJS slack asking for amplification of the blog post.
```text
Security release:
diff --git a/doc/contributing/strategic-initiatives.md b/doc/contributing/strategic-initiatives.md
index ca4308daa3ebb8..5af550441db29a 100644
--- a/doc/contributing/strategic-initiatives.md
+++ b/doc/contributing/strategic-initiatives.md
@@ -14,7 +14,7 @@ agenda to ensure they are active and have the support they need.
| Startup Snapshot | [Joyee Cheung][joyeecheung] | |
| V8 Currency | [Michaël Zasso][targos] | |
| Next-10 | [Michael Dawson][mhdawson] | |
-| Single executable apps | [Darshan Sen][RaisinTen] | |
+| Single executable apps | | |
| Performance | | |
| Primordials | [Benjamin Gruenbaum][benjamingr] | |
@@ -39,7 +39,6 @@ agenda to ensure they are active and have the support they need.
-[RaisinTen]: https://github.com/RaisinTen
[aduh95]: https://github.com/aduh95
[benjamingr]: https://github.com/benjamingr
[jasnell]: https://github.com/jasnell
diff --git a/lib/fs.js b/lib/fs.js
index 00ca3f50acef53..22688f631daf88 100644
--- a/lib/fs.js
+++ b/lib/fs.js
@@ -2379,6 +2379,7 @@ function writeFileSync(path, data, options) {
* encoding?: string | null;
* mode?: number;
* flag?: string;
+ * flush?: boolean;
* } | string} [options]
* @param {(err?: Error) => any} callback
* @returns {void}
diff --git a/lib/http2.js b/lib/http2.js
index 8db4d918629a1b..8c6ebebb7b6a4c 100644
--- a/lib/http2.js
+++ b/lib/http2.js
@@ -8,6 +8,7 @@ const {
getDefaultSettings,
getPackedSettings,
getUnpackedSettings,
+ performServerHandshake,
sensitiveHeaders,
Http2ServerRequest,
Http2ServerResponse,
@@ -21,6 +22,7 @@ module.exports = {
getDefaultSettings,
getPackedSettings,
getUnpackedSettings,
+ performServerHandshake,
sensitiveHeaders,
Http2ServerRequest,
Http2ServerResponse,
diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js
index 7a773d5208e250..8812c3b35184b2 100644
--- a/lib/internal/bootstrap/node.js
+++ b/lib/internal/bootstrap/node.js
@@ -172,6 +172,7 @@ const rawMethods = internalBinding('process_methods');
process._kill = rawMethods._kill;
const wrapped = perThreadSetup.wrapProcessMethods(rawMethods);
+ process.loadEnvFile = wrapped.loadEnvFile;
process._rawDebug = wrapped._rawDebug;
process.cpuUsage = wrapped.cpuUsage;
process.resourceUsage = wrapped.resourceUsage;
diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js
index 57ab47178d033d..f030f537a084d7 100644
--- a/lib/internal/bootstrap/realm.js
+++ b/lib/internal/bootstrap/realm.js
@@ -350,7 +350,9 @@ class BuiltinModule {
const url = `node:${this.id}`;
const builtin = this;
const exportsKeys = ArrayPrototypeSlice(this.exportKeys);
- ArrayPrototypePush(exportsKeys, 'default');
+ if (!ArrayPrototypeIncludes(exportsKeys, 'default')) {
+ ArrayPrototypePush(exportsKeys, 'default');
+ }
this.module = new ModuleWrap(
url, undefined, exportsKeys,
function() {
diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js
index 42364c78c667cb..3272608bf7d04d 100644
--- a/lib/internal/fs/promises.js
+++ b/lib/internal/fs/promises.js
@@ -365,6 +365,7 @@ class FileHandle extends EventEmitter {
* autoClose?: boolean;
* emitClose?: boolean;
* start: number;
+ * highWaterMark?: number;
* flush?: boolean;
* }} [options]
* @returns {WriteStream}
@@ -1254,7 +1255,7 @@ async function* _watch(filename, options = kEmptyObject) {
// e.g. Linux due to the limitations of inotify.
if (options.recursive && !isOSX && !isWindows) {
const watcher = new nonNativeWatcher.FSWatcher(options);
- await watcher[kFSWatchStart](filename);
+ watcher[kFSWatchStart](filename);
yield* watcher;
return;
}
diff --git a/lib/internal/fs/recursive_watch.js b/lib/internal/fs/recursive_watch.js
index 54298832da5a1b..7d8b12eeb93445 100644
--- a/lib/internal/fs/recursive_watch.js
+++ b/lib/internal/fs/recursive_watch.js
@@ -1,10 +1,7 @@
'use strict';
const {
- ArrayPrototypePush,
- SafePromiseAllReturnVoid,
Promise,
- PromisePrototypeThen,
SafeMap,
SafeSet,
StringPrototypeStartsWith,
@@ -31,47 +28,19 @@ const {
} = require('path');
let internalSync;
-let internalPromises;
-
-function lazyLoadFsPromises() {
- internalPromises ??= require('fs/promises');
- return internalPromises;
-}
function lazyLoadFsSync() {
internalSync ??= require('fs');
return internalSync;
}
-let kResistStopPropagation;
-
-async function traverse(dir, files = new SafeMap(), symbolicLinks = new SafeSet()) {
- const { opendir } = lazyLoadFsPromises();
-
- const filenames = await opendir(dir);
- const subdirectories = [];
-
- for await (const file of filenames) {
- const f = pathJoin(dir, file.name);
-
- files.set(f, file);
-
- // Do not follow symbolic links
- if (file.isSymbolicLink()) {
- symbolicLinks.add(f);
- } else if (file.isDirectory()) {
- ArrayPrototypePush(subdirectories, traverse(f, files));
- }
- }
-
- await SafePromiseAllReturnVoid(subdirectories);
- return files;
-}
+let kResistStopPropagation;
class FSWatcher extends EventEmitter {
#options = null;
#closed = false;
#files = new SafeMap();
+ #watchers = new SafeMap();
#symbolicFiles = new SafeSet();
#rootPath = pathResolve();
#watchingFile = false;
@@ -111,11 +80,11 @@ class FSWatcher extends EventEmitter {
return;
}
- const { unwatchFile } = lazyLoadFsSync();
this.#closed = true;
for (const file of this.#files.keys()) {
- unwatchFile(file);
+ this.#watchers.get(file).close();
+ this.#watchers.delete(file);
}
this.#files.clear();
@@ -124,24 +93,26 @@ class FSWatcher extends EventEmitter {
}
#unwatchFiles(file) {
- const { unwatchFile } = lazyLoadFsSync();
-
this.#symbolicFiles.delete(file);
for (const filename of this.#files.keys()) {
if (StringPrototypeStartsWith(filename, file)) {
- unwatchFile(filename);
+ this.#files.delete(filename);
+ this.#watchers.get(filename).close();
+ this.#watchers.delete(filename);
}
}
}
- async #watchFolder(folder) {
- const { opendir } = lazyLoadFsPromises();
+ #watchFolder(folder) {
+ const { readdirSync } = lazyLoadFsSync();
try {
- const files = await opendir(folder);
+ const files = readdirSync(folder, {
+ withFileTypes: true,
+ });
- for await (const file of files) {
+ for (const file of files) {
if (this.#closed) {
break;
}
@@ -155,11 +126,9 @@ class FSWatcher extends EventEmitter {
this.#symbolicFiles.add(f);
}
- this.#files.set(f, file);
- if (file.isFile()) {
- this.#watchFile(f);
- } else if (file.isDirectory() && !file.isSymbolicLink()) {
- await this.#watchFolder(f);
+ this.#watchFile(f);
+ if (file.isDirectory() && !file.isSymbolicLink()) {
+ this.#watchFolder(f);
}
}
}
@@ -173,22 +142,30 @@ class FSWatcher extends EventEmitter {
return;
}
- const { watchFile } = lazyLoadFsSync();
- const existingStat = this.#files.get(file);
+ const { watch, statSync } = lazyLoadFsSync();
+
+ if (this.#files.has(file)) {
+ return;
+ }
+
+ {
+ const existingStat = statSync(file);
+ this.#files.set(file, existingStat);
+ }
- watchFile(file, {
+ const watcher = watch(file, {
persistent: this.#options.persistent,
- }, (currentStats, previousStats) => {
- if (existingStat && !existingStat.isDirectory() &&
- currentStats.nlink !== 0 && existingStat.mtimeMs === currentStats.mtimeMs) {
- return;
- }
+ }, (eventType, filename) => {
+ const existingStat = this.#files.get(file);
+ const currentStats = statSync(file);
this.#files.set(file, currentStats);
- if (currentStats.birthtimeMs === 0 && previousStats.birthtimeMs !== 0) {
+ if (currentStats.birthtimeMs === 0 && existingStat.birthtimeMs !== 0) {
// The file is now deleted
this.#files.delete(file);
+ this.#watchers.delete(file);
+ watcher.close();
this.emit('change', 'rename', pathRelative(this.#rootPath, file));
this.#unwatchFiles(file);
} else if (file === this.#rootPath && this.#watchingFile) {
@@ -205,6 +182,7 @@ class FSWatcher extends EventEmitter {
this.emit('change', 'change', pathRelative(this.#rootPath, file));
}
});
+ this.#watchers.set(file, watcher);
}
[kFSWatchStart](filename) {
@@ -217,19 +195,9 @@ class FSWatcher extends EventEmitter {
this.#closed = false;
this.#watchingFile = file.isFile();
+ this.#watchFile(filename);
if (file.isDirectory()) {
- this.#files.set(filename, file);
-
- PromisePrototypeThen(
- traverse(filename, this.#files, this.#symbolicFiles),
- () => {
- for (const f of this.#files.keys()) {
- this.#watchFile(f);
- }
- },
- );
- } else {
- this.#watchFile(filename);
+ this.#watchFolder(filename);
}
} catch (error) {
if (error.code === 'ENOENT') {
@@ -264,7 +232,10 @@ class FSWatcher extends EventEmitter {
resolve({ __proto__: null, value: { eventType, filename } });
});
} : (resolve, reject) => {
- const onAbort = () => reject(new AbortError(undefined, { cause: signal.reason }));
+ const onAbort = () => {
+ this.close();
+ reject(new AbortError(undefined, { cause: signal.reason }));
+ };
if (signal.aborted) return onAbort();
kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation;
signal.addEventListener('abort', onAbort, { __proto__: null, once: true, [kResistStopPropagation]: true });
@@ -277,6 +248,10 @@ class FSWatcher extends EventEmitter {
next: () => (this.#closed ?
{ __proto__: null, done: true } :
new Promise(promiseExecutor)),
+ return: () => {
+ this.close();
+ return { __proto__: null, done: true };
+ },
[SymbolAsyncIterator]() { return this; },
};
}
diff --git a/lib/internal/http2/compat.js b/lib/internal/http2/compat.js
index 7bf079900c652f..f0dd26de0d6359 100644
--- a/lib/internal/http2/compat.js
+++ b/lib/internal/http2/compat.js
@@ -76,6 +76,7 @@ const kRawHeaders = Symbol('rawHeaders');
const kTrailers = Symbol('trailers');
const kRawTrailers = Symbol('rawTrailers');
const kSetHeader = Symbol('setHeader');
+const kAppendHeader = Symbol('appendHeader');
const kAborted = Symbol('aborted');
let statusMessageWarned = false;
@@ -652,6 +653,47 @@ class Http2ServerResponse extends Stream {
this[kHeaders][name] = value;
}
+ appendHeader(name, value) {
+ validateString(name, 'name');
+ if (this[kStream].headersSent)
+ throw new ERR_HTTP2_HEADERS_SENT();
+
+ this[kAppendHeader](name, value);
+ }
+
+ [kAppendHeader](name, value) {
+ name = StringPrototypeToLowerCase(StringPrototypeTrim(name));
+ assertValidHeader(name, value);
+
+ if (!isConnectionHeaderAllowed(name, value)) {
+ return;
+ }
+
+ if (name[0] === ':')
+ assertValidPseudoHeader(name);
+ else if (!checkIsHttpToken(name))
+ this.destroy(new ERR_INVALID_HTTP_TOKEN('Header name', name));
+
+ // Handle various possible cases the same as OutgoingMessage.appendHeader:
+ const headers = this[kHeaders];
+ if (headers === null || !headers[name]) {
+ return this.setHeader(name, value);
+ }
+
+ if (!ArrayIsArray(headers[name])) {
+ headers[name] = [headers[name]];
+ }
+
+ const existingValues = headers[name];
+ if (ArrayIsArray(value)) {
+ for (let i = 0, length = value.length; i < length; i++) {
+ existingValues.push(value[i]);
+ }
+ } else {
+ existingValues.push(value);
+ }
+ }
+
get statusMessage() {
statusMessageWarn();
@@ -684,10 +726,33 @@ class Http2ServerResponse extends Stream {
let i;
if (ArrayIsArray(headers)) {
+ if (this[kHeaders]) {
+ // Headers in obj should override previous headers but still
+ // allow explicit duplicates. To do so, we first remove any
+ // existing conflicts, then use appendHeader. This is the
+ // slow path, which only applies when you use setHeader and
+ // then pass headers in writeHead too.
+
+ // We need to handle both the tuple and flat array formats, just
+ // like the logic further below.
+ if (headers.length && ArrayIsArray(headers[0])) {
+ for (let n = 0; n < headers.length; n += 1) {
+ const key = headers[n + 0][0];
+ this.removeHeader(key);
+ }
+ } else {
+ for (let n = 0; n < headers.length; n += 2) {
+ const key = headers[n + 0];
+ this.removeHeader(key);
+ }
+ }
+ }
+
+ // Append all the headers provided in the array:
if (headers.length && ArrayIsArray(headers[0])) {
for (i = 0; i < headers.length; i++) {
const header = headers[i];
- this[kSetHeader](header[0], header[1]);
+ this[kAppendHeader](header[0], header[1]);
}
} else {
if (headers.length % 2 !== 0) {
@@ -695,7 +760,7 @@ class Http2ServerResponse extends Stream {
}
for (i = 0; i < headers.length; i += 2) {
- this[kSetHeader](headers[i], headers[i + 1]);
+ this[kAppendHeader](headers[i], headers[i + 1]);
}
}
} else if (typeof headers === 'object') {
diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js
index 69956d2885e1f6..4e67eba4ecac40 100644
--- a/lib/internal/http2/core.js
+++ b/lib/internal/http2/core.js
@@ -1228,12 +1228,6 @@ class Http2Session extends EventEmitter {
constructor(type, options, socket) {
super();
- if (!socket._handle || !socket._handle.isStreamBase) {
- socket = new JSStreamSocket(socket);
- }
- socket.on('error', socketOnError);
- socket.on('close', socketOnClose);
-
// No validation is performed on the input parameters because this
// constructor is not exported directly for users.
@@ -1245,6 +1239,12 @@ class Http2Session extends EventEmitter {
socket[kSession] = this;
+ if (!socket._handle || !socket._handle.isStreamBase) {
+ socket = new JSStreamSocket(socket);
+ }
+ socket.on('error', socketOnError);
+ socket.on('close', socketOnClose);
+
this[kState] = {
destroyCode: NGHTTP2_NO_ERROR,
flags: SESSION_FLAGS_PENDING,
@@ -1644,7 +1644,7 @@ class ServerHttp2Session extends Http2Session {
// not be an issue in practice. Additionally, the 'priority' event on
// server instances (or any other object) is fully undocumented.
this[kNativeFields][kSessionPriorityListenerCount] =
- server.listenerCount('priority');
+ server ? server.listenerCount('priority') : 0;
}
get server() {
@@ -3435,6 +3435,11 @@ function getUnpackedSettings(buf, options = kEmptyObject) {
return settings;
}
+function performServerHandshake(socket, options = {}) {
+ options = initializeOptions(options);
+ return new ServerHttp2Session(options, socket, undefined);
+}
+
binding.setCallbackFunctions(
onSessionInternalError,
onPriority,
@@ -3458,6 +3463,7 @@ module.exports = {
getDefaultSettings,
getPackedSettings,
getUnpackedSettings,
+ performServerHandshake,
sensitiveHeaders: kSensitiveHeaders,
Http2Session,
Http2Stream,
diff --git a/lib/internal/js_stream_socket.js b/lib/internal/js_stream_socket.js
index a6aee73f468b08..3e01327202be1a 100644
--- a/lib/internal/js_stream_socket.js
+++ b/lib/internal/js_stream_socket.js
@@ -17,6 +17,7 @@ let debug = require('internal/util/debuglog').debuglog(
);
const { owner_symbol } = require('internal/async_hooks').symbols;
const { ERR_STREAM_WRAP } = require('internal/errors').codes;
+const { kSession } = require('internal/stream_base_commons');
const kCurrentWriteRequest = Symbol('kCurrentWriteRequest');
const kCurrentShutdownRequest = Symbol('kCurrentShutdownRequest');
@@ -263,6 +264,14 @@ class JSStreamSocket extends Socket {
cb();
});
}
+
+ get [kSession]() {
+ return this.stream[kSession];
+ }
+
+ set [kSession](session) {
+ this.stream[kSession] = session;
+ }
}
module.exports = JSStreamSocket;
diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js
index 44abacb41a3430..e5b47d8874aeb7 100644
--- a/lib/internal/modules/cjs/loader.js
+++ b/lib/internal/modules/cjs/loader.js
@@ -469,15 +469,15 @@ function tryPackage(requestPath, exts, isMain, originalPath) {
}
/**
- * Check if the file exists and is not a directory if using `--preserve-symlinks` and `isMain` is false, keep symlinks
- * intact, otherwise resolve to the absolute realpath.
+ * Check if the file exists and is not a directory if using `--preserve-symlinks` and `isMain` is false or
+ * `--preserve-symlinks-main` and `isMain` is true , keep symlinks intact, otherwise resolve to the absolute realpath.
* @param {string} requestPath The path to the file to load.
* @param {boolean} isMain Whether the file is the main module.
*/
function tryFile(requestPath, isMain) {
const rc = _stat(requestPath);
if (rc !== 0) { return; }
- if (getOptionValue('--preserve-symlinks') && !isMain) {
+ if (getOptionValue(isMain ? '--preserve-symlinks-main' : '--preserve-symlinks')) {
return path.resolve(requestPath);
}
return toRealPath(requestPath);
diff --git a/lib/internal/process/per_thread.js b/lib/internal/process/per_thread.js
index 9b86f20053da3b..b45f2a61e0ddaf 100644
--- a/lib/internal/process/per_thread.js
+++ b/lib/internal/process/per_thread.js
@@ -46,6 +46,8 @@ const {
validateNumber,
validateObject,
} = require('internal/validators');
+const { getValidatedPath } = require('internal/fs/utils');
+const { toNamespacedPath } = require('path');
const constants = internalBinding('constants').os.signals;
const kInternal = Symbol('internal properties');
@@ -57,21 +59,13 @@ const { exitCodes: { kNoFailure } } = internalBinding('errors');
const binding = internalBinding('process_methods');
-let hrValues;
-let hrBigintValues;
-
-function refreshHrtimeBuffer() {
- // The 3 entries filled in by the original process.hrtime contains
- // the upper/lower 32 bits of the second part of the value,
- // and the remaining nanoseconds of the value.
- hrValues = binding.hrtimeBuffer;
- // Use a BigUint64Array in the closure because this is actually a bit
- // faster than simply returning a BigInt from C++ in V8 7.1.
- hrBigintValues = new BigUint64Array(binding.hrtimeBuffer.buffer, 0, 1);
-}
-
-// Create the buffers.
-refreshHrtimeBuffer();
+// The 3 entries filled in by the original process.hrtime contains
+// the upper/lower 32 bits of the second part of the value,
+// and the remaining nanoseconds of the value.
+const hrValues = binding.hrtimeBuffer;
+// Use a BigUint64Array because this is actually a bit
+// faster than simply returning a BigInt from C++ in V8 7.1.
+const hrBigintValues = new BigUint64Array(binding.hrtimeBuffer.buffer, 0, 1);
function hrtime(time) {
binding.hrtime();
@@ -108,6 +102,7 @@ function wrapProcessMethods(binding) {
memoryUsage: _memoryUsage,
rss,
resourceUsage: _resourceUsage,
+ loadEnvFile: _loadEnvFile,
} = binding;
function _rawDebug(...args) {
@@ -258,6 +253,19 @@ function wrapProcessMethods(binding) {
};
}
+ /**
+ * Loads the `.env` file to process.env.
+ * @param {string | URL | Buffer | undefined} path
+ */
+ function loadEnvFile(path = undefined) { // Provide optional value so that `loadEnvFile.length` returns 0
+ if (path != null) {
+ path = getValidatedPath(path);
+ _loadEnvFile(toNamespacedPath(path));
+ } else {
+ _loadEnvFile();
+ }
+ }
+
return {
_rawDebug,
@@ -266,6 +274,7 @@ function wrapProcessMethods(binding) {
memoryUsage,
kill,
exit,
+ loadEnvFile,
};
}
@@ -425,5 +434,4 @@ module.exports = {
wrapProcessMethods,
hrtime,
hrtimeBigInt,
- refreshHrtimeBuffer,
};
diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js
index b6bdb4785003f7..98533b7828d3ff 100644
--- a/lib/internal/process/pre_execution.js
+++ b/lib/internal/process/pre_execution.js
@@ -211,8 +211,6 @@ function patchProcessObject(expandArgv1) {
const binding = internalBinding('process_methods');
binding.patchProcessObject(process);
- require('internal/process/per_thread').refreshHrtimeBuffer();
-
// Since we replace process.argv[0] below, preserve the original value in case the user needs it.
ObjectDefineProperty(process, 'argv0', {
__proto__: null,
diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js
index e38b1a67c09660..ee2c345e7a0403 100644
--- a/lib/internal/test_runner/test.js
+++ b/lib/internal/test_runner/test.js
@@ -132,6 +132,14 @@ class TestContext {
return this.#test.name;
}
+ get error() {
+ return this.#test.error;
+ }
+
+ get passed() {
+ return this.#test.passed;
+ }
+
diagnostic(message) {
this.#test.diagnostic(message);
}
@@ -639,12 +647,17 @@ class Test extends AsyncResource {
return;
}
- await afterEach();
- await after();
this.pass();
+ try {
+ await afterEach();
+ await after();
+ } catch (err) {
+ // If one of the after hooks has thrown unset endTime so that the
+ // catch below can do its cancel/fail logic.
+ this.endTime = null;
+ throw err;
+ }
} catch (err) {
- try { await afterEach(); } catch { /* test is already failing, let's ignore the error */ }
- try { await after(); } catch { /* Ignore error. */ }
if (isTestFailureError(err)) {
if (err.failureType === kTestTimeoutFailure) {
this.#cancel(err);
@@ -654,6 +667,8 @@ class Test extends AsyncResource {
} else {
this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure));
}
+ try { await afterEach(); } catch { /* test is already failing, let's ignore the error */ }
+ try { await after(); } catch { /* Ignore error. */ }
} finally {
stopPromise?.[SymbolDispose]();
diff --git a/lib/internal/url.js b/lib/internal/url.js
index 38f97926064595..0e69ff52b5edef 100644
--- a/lib/internal/url.js
+++ b/lib/internal/url.js
@@ -206,6 +206,7 @@ class URLContext {
}
}
+let setURLSearchParamsModified;
let setURLSearchParamsContext;
let getURLSearchParamsList;
let setURLSearchParams;
@@ -475,8 +476,9 @@ class URLSearchParams {
name = StringPrototypeToWellFormed(`${name}`);
value = StringPrototypeToWellFormed(`${value}`);
ArrayPrototypePush(this.#searchParams, name, value);
+
if (this.#context) {
- this.#context.search = this.toString();
+ setURLSearchParamsModified(this.#context);
}
}
@@ -509,8 +511,9 @@ class URLSearchParams {
}
}
}
+
if (this.#context) {
- this.#context.search = this.toString();
+ setURLSearchParamsModified(this.#context);
}
}
@@ -615,7 +618,7 @@ class URLSearchParams {
}
if (this.#context) {
- this.#context.search = this.toString();
+ setURLSearchParamsModified(this.#context);
}
}
@@ -664,7 +667,7 @@ class URLSearchParams {
}
if (this.#context) {
- this.#context.search = this.toString();
+ setURLSearchParamsModified(this.#context);
}
}
@@ -769,6 +772,20 @@ function isURL(self) {
class URL {
#context = new URLContext();
#searchParams;
+ #searchParamsModified;
+
+ static {
+ setURLSearchParamsModified = (obj) => {
+ // When URLSearchParams changes, we lazily update URL on the next read/write for performance.
+ obj.#searchParamsModified = true;
+
+ // If URL has an existing search, remove it without cascading back to URLSearchParams.
+ // Do this to avoid any internal confusion about whether URLSearchParams or URL is up-to-date.
+ if (obj.#context.hasSearch) {
+ obj.#updateContext(bindingUrl.update(obj.#context.href, updateActions.kSearch, ''));
+ }
+ };
+ }
constructor(input, base = undefined) {
markTransferMode(this, false, false);
@@ -814,7 +831,37 @@ class URL {
return `${constructor.name} ${inspect(obj, opts)}`;
}
- #updateContext(href) {
+ #getSearchFromContext() {
+ if (!this.#context.hasSearch) return '';
+ let endsAt = this.#context.href.length;
+ if (this.#context.hasHash) endsAt = this.#context.hash_start;
+ if (endsAt - this.#context.search_start <= 1) return '';
+ return StringPrototypeSlice(this.#context.href, this.#context.search_start, endsAt);
+ }
+
+ #getSearchFromParams() {
+ if (!this.#searchParams?.size) return '';
+ return `?${this.#searchParams}`;
+ }
+
+ #ensureSearchParamsUpdated() {
+ // URL is updated lazily to greatly improve performance when URLSearchParams is updated repeatedly.
+ // If URLSearchParams has been modified, reflect that back into URL, without cascading back.
+ if (this.#searchParamsModified) {
+ this.#searchParamsModified = false;
+ this.#updateContext(bindingUrl.update(this.#context.href, updateActions.kSearch, this.#getSearchFromParams()));
+ }
+ }
+
+ /**
+ * Update the internal context state for URL.
+ * @param {string} href New href string from `bindingUrl.update`.
+ * @param {boolean} [shouldUpdateSearchParams] If the update has potential to update search params (href/search).
+ */
+ #updateContext(href, shouldUpdateSearchParams = false) {
+ const previousSearch = shouldUpdateSearchParams && this.#searchParams &&
+ (this.#searchParamsModified ? this.#getSearchFromParams() : this.#getSearchFromContext());
+
this.#context.href = href;
const {
@@ -840,19 +887,31 @@ class URL {
this.#context.scheme_type = scheme_type;
if (this.#searchParams) {
- if (this.#context.hasSearch) {
- setURLSearchParams(this.#searchParams, this.search);
- } else {
- setURLSearchParams(this.#searchParams, undefined);
+ // If the search string has updated, URL becomes the source of truth, and we update URLSearchParams.
+ // Only do this when we're expecting it to have changed, otherwise a change to hash etc.
+ // would incorrectly compare the URLSearchParams state to the empty URL search state.
+ if (shouldUpdateSearchParams) {
+ const currentSearch = this.#getSearchFromContext();
+ if (previousSearch !== currentSearch) {
+ setURLSearchParams(this.#searchParams, currentSearch);
+ this.#searchParamsModified = false;
+ }
}
+
+ // If we have a URLSearchParams, ensure that URL is up-to-date with any modification to it.
+ this.#ensureSearchParamsUpdated();
}
}
toString() {
+ // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync.
+ this.#ensureSearchParamsUpdated();
return this.#context.href;
}
get href() {
+ // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync.
+ this.#ensureSearchParamsUpdated();
return this.#context.href;
}
@@ -860,7 +919,7 @@ class URL {
value = `${value}`;
const href = bindingUrl.update(this.#context.href, updateActions.kHref, value);
if (!href) { throw new ERR_INVALID_URL(value); }
- this.#updateContext(href);
+ this.#updateContext(href, true);
}
// readonly
@@ -1002,17 +1061,15 @@ class URL {
}
get search() {
- if (!this.#context.hasSearch) { return ''; }
- let endsAt = this.#context.href.length;
- if (this.#context.hasHash) { endsAt = this.#context.hash_start; }
- if (endsAt - this.#context.search_start <= 1) { return ''; }
- return StringPrototypeSlice(this.#context.href, this.#context.search_start, endsAt);
+ // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync.
+ this.#ensureSearchParamsUpdated();
+ return this.#getSearchFromContext();
}
set search(value) {
const href = bindingUrl.update(this.#context.href, updateActions.kSearch, StringPrototypeToWellFormed(`${value}`));
if (href) {
- this.#updateContext(href);
+ this.#updateContext(href, true);
}
}
@@ -1020,8 +1077,9 @@ class URL {
get searchParams() {
// Create URLSearchParams on demand to greatly improve the URL performance.
if (this.#searchParams == null) {
- this.#searchParams = new URLSearchParams(this.search);
+ this.#searchParams = new URLSearchParams(this.#getSearchFromContext());
setURLSearchParamsContext(this.#searchParams, this);
+ this.#searchParamsModified = false;
}
return this.#searchParams;
}
@@ -1041,6 +1099,8 @@ class URL {
}
toJSON() {
+ // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync.
+ this.#ensureSearchParamsUpdated();
return this.#context.href;
}
diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js
index 5103591312e479..d4526011bec46d 100644
--- a/lib/internal/webstreams/readablestream.js
+++ b/lib/internal/webstreams/readablestream.js
@@ -598,8 +598,6 @@ class ReadableStream {
[kTransferList]() {
const { port1, port2 } = new MessageChannel();
- port1.unref();
- port2.unref();
this[kState].transfer.port1 = port1;
this[kState].transfer.port2 = port2;
return [ port2 ];
@@ -609,7 +607,11 @@ class ReadableStream {
const transfer = lazyTransfer();
setupReadableStreamDefaultControllerFromSource(
this,
- new transfer.CrossRealmTransformReadableSource(port),
+ // The MessagePort is set to be referenced when reading.
+ // After two MessagePorts are closed, there is a problem with
+ // lingering promise not being properly resolved.
+ // https://github.com/nodejs/node/issues/51486
+ new transfer.CrossRealmTransformReadableSource(port, true),
0, () => 1);
}
}
diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js
index c4cb4077f88403..9835e6ab272c98 100644
--- a/lib/internal/webstreams/transfer.js
+++ b/lib/internal/webstreams/transfer.js
@@ -102,10 +102,11 @@ function InternalCloneableDOMException() {
InternalCloneableDOMException[kDeserialize] = () => {};
class CrossRealmTransformReadableSource {
- constructor(port) {
+ constructor(port, unref) {
this[kState] = {
port,
controller: undefined,
+ unref,
};
port.onmessage = ({ data }) => {
@@ -152,6 +153,10 @@ class CrossRealmTransformReadableSource {
}
async pull() {
+ if (this[kState].unref) {
+ this[kState].unref = false;
+ this[kState].port.ref();
+ }
this[kState].port.postMessage({ type: 'pull' });
}
@@ -172,11 +177,12 @@ class CrossRealmTransformReadableSource {
}
class CrossRealmTransformWritableSink {
- constructor(port) {
+ constructor(port, unref) {
this[kState] = {
port,
controller: undefined,
backpressurePromise: createDeferredPromise(),
+ unref,
};
port.onmessage = ({ data }) => {
@@ -212,6 +218,7 @@ class CrossRealmTransformWritableSink {
error);
port.close();
};
+
port.unref();
}
@@ -220,6 +227,10 @@ class CrossRealmTransformWritableSink {
}
async write(chunk) {
+ if (this[kState].unref) {
+ this[kState].unref = false;
+ this[kState].port.ref();
+ }
if (this[kState].backpressurePromise === undefined) {
this[kState].backpressurePromise = {
promise: PromiseResolve(),
@@ -264,12 +275,12 @@ class CrossRealmTransformWritableSink {
}
function newCrossRealmReadableStream(writable, port) {
- const readable =
- new ReadableStream(
- new CrossRealmTransformReadableSource(port));
+ // MessagePort should always be unref.
+ // There is a problem with the process not terminating.
+ // https://github.com/nodejs/node/issues/44985
+ const readable = new ReadableStream(new CrossRealmTransformReadableSource(port, false));
- const promise =
- readableStreamPipeTo(readable, writable, false, false, false);
+ const promise = readableStreamPipeTo(readable, writable, false, false, false);
setPromiseHandled(promise);
@@ -280,12 +291,15 @@ function newCrossRealmReadableStream(writable, port) {
}
function newCrossRealmWritableSink(readable, port) {
- const writable =
- new WritableStream(
- new CrossRealmTransformWritableSink(port));
+ // MessagePort should always be unref.
+ // There is a problem with the process not terminating.
+ // https://github.com/nodejs/node/issues/44985
+ const writable = new WritableStream(new CrossRealmTransformWritableSink(port, false));
const promise = readableStreamPipeTo(readable, writable, false, false, false);
+
setPromiseHandled(promise);
+
return {
writable,
promise,
diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js
index 3bc77fc6fb7067..121afc0e181ec6 100644
--- a/lib/internal/webstreams/writablestream.js
+++ b/lib/internal/webstreams/writablestream.js
@@ -263,8 +263,6 @@ class WritableStream {
this[kState].transfer.readable = readable;
this[kState].transfer.promise = promise;
- setPromiseHandled(this[kState].transfer.promise);
-
return {
data: { port: this[kState].transfer.port2 },
deserializeInfo:
@@ -274,8 +272,6 @@ class WritableStream {
[kTransferList]() {
const { port1, port2 } = new MessageChannel();
- port1.unref();
- port2.unref();
this[kState].transfer.port1 = port1;
this[kState].transfer.port2 = port2;
return [ port2 ];
@@ -285,7 +281,11 @@ class WritableStream {
const transfer = lazyTransfer();
setupWritableStreamDefaultControllerFromSink(
this,
- new transfer.CrossRealmTransformWritableSink(port),
+ // The MessagePort is set to be referenced when reading.
+ // After two MessagePorts are closed, there is a problem with
+ // lingering promise not being properly resolved.
+ // https://github.com/nodejs/node/issues/51486
+ new transfer.CrossRealmTransformWritableSink(port, true),
1,
() => 1);
}
diff --git a/lib/util.js b/lib/util.js
index 13a437c9318d05..e4cb01a8bd13f2 100644
--- a/lib/util.js
+++ b/lib/util.js
@@ -67,9 +67,11 @@ const { debuglog } = require('internal/util/debuglog');
const {
validateFunction,
validateNumber,
+ validateString,
} = require('internal/validators');
const { isBuffer } = require('buffer').Buffer;
const types = require('internal/util/types');
+const binding = internalBinding('util');
const {
deprecate,
@@ -371,6 +373,16 @@ function _exceptionWithHostPort(...args) {
return new ExceptionWithHostPort(...args);
}
+/**
+ * Parses the content of a `.env` file.
+ * @param {string} content
+ * @returns {Record}
+ */
+function parseEnv(content) {
+ validateString(content, 'content');
+ return binding.parseEnv(content);
+}
+
// Keep the `exports =` so that various functions can still be monkeypatched
module.exports = {
_errnoException,
@@ -465,6 +477,7 @@ module.exports = {
return lazyAbortController().aborted;
},
types,
+ parseEnv,
};
defineLazyProperties(
diff --git a/node.gni b/node.gni
index 2be97a17a2f710..2445d09b8179a6 100644
--- a/node.gni
+++ b/node.gni
@@ -1,8 +1,3 @@
-# Copyright 2019 the V8 project authors. All rights reserved.
-# Copyright 2023 Microsoft Inc.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
# This file is used by GN for building, which is NOT the build system used for
# building official binaries.
# Please take a look at node.gyp if you are making changes to build system.
diff --git a/src/async_wrap.h b/src/async_wrap.h
index 01e981aa671a23..7234d88b67a961 100644
--- a/src/async_wrap.h
+++ b/src/async_wrap.h
@@ -102,17 +102,9 @@ namespace node {
#define NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V)
#endif // HAVE_OPENSSL
-#if HAVE_INSPECTOR
-#define NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V) \
- V(INSPECTORJSBINDING)
-#else
-#define NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V)
-#endif // HAVE_INSPECTOR
-
-#define NODE_ASYNC_PROVIDER_TYPES(V) \
- NODE_ASYNC_NON_CRYPTO_PROVIDER_TYPES(V) \
- NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V) \
- NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V)
+#define NODE_ASYNC_PROVIDER_TYPES(V) \
+ NODE_ASYNC_NON_CRYPTO_PROVIDER_TYPES(V) \
+ NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V)
class Environment;
class DestroyParam;
diff --git a/src/base64_version.h b/src/base64_version.h
index c3737f4beebfcb..ce3d7c03f8c979 100644
--- a/src/base64_version.h
+++ b/src/base64_version.h
@@ -2,5 +2,5 @@
// Refer to tools/dep_updaters/update-base64.sh
#ifndef SRC_BASE64_VERSION_H_
#define SRC_BASE64_VERSION_H_
-#define BASE64_VERSION "0.5.1"
+#define BASE64_VERSION "0.5.2"
#endif // SRC_BASE64_VERSION_H_
diff --git a/src/inspector/node_inspector.gypi b/src/inspector/node_inspector.gypi
index ba24e6acfc0744..a2dfdcb42db196 100644
--- a/src/inspector/node_inspector.gypi
+++ b/src/inspector/node_inspector.gypi
@@ -39,7 +39,6 @@
],
'node_protocol_files': [
'<(protocol_tool_path)/lib/Allocator_h.template',
- '<(protocol_tool_path)/lib/Array_h.template',
'<(protocol_tool_path)/lib/base_string_adapter_cc.template',
'<(protocol_tool_path)/lib/base_string_adapter_h.template',
'<(protocol_tool_path)/lib/DispatcherBase_cpp.template',
diff --git a/src/inspector/tracing_agent.cc b/src/inspector/tracing_agent.cc
index cdbdd63f8aff9f..e7b6d3b3ea63bd 100644
--- a/src/inspector/tracing_agent.cc
+++ b/src/inspector/tracing_agent.cc
@@ -147,8 +147,8 @@ DispatchResponse TracingAgent::start(
std::set categories_set;
protocol::Array* categories =
traceConfig->getIncludedCategories();
- for (size_t i = 0; i < categories->length(); i++)
- categories_set.insert(categories->get(i));
+ for (size_t i = 0; i < categories->size(); i++)
+ categories_set.insert((*categories)[i]);
if (categories_set.empty())
return DispatchResponse::Error("At least one category should be enabled");
@@ -172,29 +172,29 @@ DispatchResponse TracingAgent::stop() {
DispatchResponse TracingAgent::getCategories(
std::unique_ptr>* categories) {
- *categories = Array::create();
+ *categories = std::make_unique>();
protocol::Array* categories_list = categories->get();
// In alphabetical order
- categories_list->addItem("node");
- categories_list->addItem("node.async_hooks");
- categories_list->addItem("node.bootstrap");
- categories_list->addItem("node.console");
- categories_list->addItem("node.dns.native");
- categories_list->addItem("node.environment");
- categories_list->addItem("node.fs.async");
- categories_list->addItem("node.fs.sync");
- categories_list->addItem("node.fs_dir.async");
- categories_list->addItem("node.fs_dir.sync");
- categories_list->addItem("node.http");
- categories_list->addItem("node.net.native");
- categories_list->addItem("node.perf");
- categories_list->addItem("node.perf.timerify");
- categories_list->addItem("node.perf.usertiming");
- categories_list->addItem("node.promises.rejections");
- categories_list->addItem("node.threadpoolwork.async");
- categories_list->addItem("node.threadpoolwork.sync");
- categories_list->addItem("node.vm.script");
- categories_list->addItem("v8");
+ categories_list->emplace_back("node");
+ categories_list->emplace_back("node.async_hooks");
+ categories_list->emplace_back("node.bootstrap");
+ categories_list->emplace_back("node.console");
+ categories_list->emplace_back("node.dns.native");
+ categories_list->emplace_back("node.environment");
+ categories_list->emplace_back("node.fs.async");
+ categories_list->emplace_back("node.fs.sync");
+ categories_list->emplace_back("node.fs_dir.async");
+ categories_list->emplace_back("node.fs_dir.sync");
+ categories_list->emplace_back("node.http");
+ categories_list->emplace_back("node.net.native");
+ categories_list->emplace_back("node.perf");
+ categories_list->emplace_back("node.perf.timerify");
+ categories_list->emplace_back("node.perf.usertiming");
+ categories_list->emplace_back("node.promises.rejections");
+ categories_list->emplace_back("node.threadpoolwork.async");
+ categories_list->emplace_back("node.threadpoolwork.sync");
+ categories_list->emplace_back("node.vm.script");
+ categories_list->emplace_back("v8");
return DispatchResponse::OK();
}
diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc
index 5c66757afd1a7a..0a2d9e2ec84b08 100644
--- a/src/inspector_js_api.cc
+++ b/src/inspector_js_api.cc
@@ -1,4 +1,3 @@
-#include "async_wrap-inl.h"
#include "base_object-inl.h"
#include "inspector_agent.h"
#include "inspector_io.h"
@@ -61,7 +60,7 @@ struct MainThreadConnection {
};
template
-class JSBindingsConnection : public AsyncWrap {
+class JSBindingsConnection : public BaseObject {
public:
class JSBindingsSessionDelegate : public InspectorSessionDelegate {
public:
@@ -91,15 +90,16 @@ class JSBindingsConnection : public AsyncWrap {
JSBindingsConnection(Environment* env,
Local wrap,
Local callback)
- : AsyncWrap(env, wrap, PROVIDER_INSPECTORJSBINDING),
- callback_(env->isolate(), callback) {
+ : BaseObject(env, wrap), callback_(env->isolate(), callback) {
Agent* inspector = env->inspector_agent();
session_ = ConnectionType::Connect(
inspector, std::make_unique(env, this));
}
void OnMessage(Local value) {
- MakeCallback(callback_.Get(env()->isolate()), 1, &value);
+ auto result = callback_.Get(env()->isolate())
+ ->Call(env()->context(), object(), 1, &value);
+ (void)result;
}
static void Bind(Environment* env, Local target) {
@@ -108,7 +108,6 @@ class JSBindingsConnection : public AsyncWrap {
NewFunctionTemplate(isolate, JSBindingsConnection::New);
tmpl->InstanceTemplate()->SetInternalFieldCount(
JSBindingsConnection::kInternalFieldCount);
- tmpl->Inherit(AsyncWrap::GetConstructorTemplate(env));
SetProtoMethod(isolate, tmpl, "dispatch", JSBindingsConnection::Dispatch);
SetProtoMethod(
isolate, tmpl, "disconnect", JSBindingsConnection::Disconnect);
diff --git a/src/node.cc b/src/node.cc
index cec88c4f4ecd84..f053474decc238 100644
--- a/src/node.cc
+++ b/src/node.cc
@@ -869,13 +869,20 @@ static ExitCode InitializeNodeWithArgsInternal(
if (!file_paths.empty()) {
CHECK(!per_process::v8_initialized);
- auto cwd = Environment::GetCwd(Environment::GetExecPath(*argv));
for (const auto& file_path : file_paths) {
- std::string path = cwd + kPathSeparator + file_path;
- auto path_exists = per_process::dotenv_file.ParsePath(path);
-
- if (!path_exists) errors->push_back(file_path + ": not found");
+ switch (per_process::dotenv_file.ParsePath(file_path)) {
+ case Dotenv::ParseResult::Valid:
+ break;
+ case Dotenv::ParseResult::InvalidContent:
+ errors->push_back(file_path + ": invalid format");
+ break;
+ case Dotenv::ParseResult::FileError:
+ errors->push_back(file_path + ": not found");
+ break;
+ default:
+ UNREACHABLE();
+ }
}
per_process::dotenv_file.AssignNodeOptionsIfAvailable(&node_options);
diff --git a/src/node_dotenv.cc b/src/node_dotenv.cc
index 992633c50b9a14..718e5407040505 100644
--- a/src/node_dotenv.cc
+++ b/src/node_dotenv.cc
@@ -5,7 +5,9 @@
namespace node {
+using v8::Local;
using v8::NewStringType;
+using v8::Object;
using v8::String;
std::vector Dotenv::GetPathFromArgs(
@@ -64,14 +66,47 @@ void Dotenv::SetEnvironment(node::Environment* env) {
}
}
-bool Dotenv::ParsePath(const std::string_view path) {
+Local Dotenv::ToObject(Environment* env) {
+ Local result = Object::New(env->isolate());
+
+ for (const auto& entry : store_) {
+ auto key = entry.first;
+ auto value = entry.second;
+
+ result
+ ->Set(
+ env->context(),
+ v8::String::NewFromUtf8(
+ env->isolate(), key.data(), NewStringType::kNormal, key.size())
+ .ToLocalChecked(),
+ v8::String::NewFromUtf8(env->isolate(),
+ value.data(),
+ NewStringType::kNormal,
+ value.size())
+ .ToLocalChecked())
+ .Check();
+ }
+
+ return result;
+}
+
+void Dotenv::ParseContent(const std::string_view content) {
+ using std::string_view_literals::operator""sv;
+ auto lines = SplitString(content, "\n"sv);
+
+ for (const auto& line : lines) {
+ ParseLine(line);
+ }
+}
+
+Dotenv::ParseResult Dotenv::ParsePath(const std::string_view path) {
uv_fs_t req;
auto defer_req_cleanup = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
uv_file file = uv_fs_open(nullptr, &req, path.data(), 0, 438, nullptr);
if (req.result < 0) {
// req will be cleaned up by scope leave.
- return false;
+ return ParseResult::FileError;
}
uv_fs_req_cleanup(&req);
@@ -89,7 +124,7 @@ bool Dotenv::ParsePath(const std::string_view path) {
auto r = uv_fs_read(nullptr, &req, file, &buf, 1, -1, nullptr);
if (req.result < 0) {
// req will be cleaned up by scope leave.
- return false;
+ return ParseResult::InvalidContent;
}
uv_fs_req_cleanup(&req);
if (r <= 0) {
@@ -98,13 +133,8 @@ bool Dotenv::ParsePath(const std::string_view path) {
result.append(buf.base, r);
}
- using std::string_view_literals::operator""sv;
- auto lines = SplitString(result, "\n"sv);
-
- for (const auto& line : lines) {
- ParseLine(line);
- }
- return true;
+ ParseContent(result);
+ return ParseResult::Valid;
}
void Dotenv::AssignNodeOptionsIfAvailable(std::string* node_options) {
@@ -142,7 +172,7 @@ void Dotenv::ParseLine(const std::string_view line) {
auto quote_character = value[quotation_index];
value.erase(0, 1);
- auto end_quotation_index = value.find_last_of(quote_character);
+ auto end_quotation_index = value.find(quote_character);
// We couldn't find the closing quotation character. Terminate.
if (end_quotation_index == std::string::npos) {
diff --git a/src/node_dotenv.h b/src/node_dotenv.h
index cc87008d149f43..f2a9ce56f41570 100644
--- a/src/node_dotenv.h
+++ b/src/node_dotenv.h
@@ -4,6 +4,7 @@
#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS
#include "util-inl.h"
+#include "v8.h"
#include