diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a00875881ab59c..a79f7ab22af7e2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -174,6 +174,7 @@ /.github/workflows/tools.yml @nodejs/security-wg /.github/workflows/update-openssl.yml @nodejs/security-wg /.github/workflows/update-v8.yml @nodejs/security-wg @nodejs/v8-update +/deps @nodejs/security-wg /tools/dep_updaters/* @nodejs/security-wg # Web Standards diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index a0c0004af7e293..392b900565504c 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -90,8 +90,8 @@ jobs: - name: Build run: | cd $TAR_DIR - make build-ci -j2 V=1 + make build-ci -j4 V=1 - name: Test run: | cd $TAR_DIR - make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" + make run-ci -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 10677ecaffccbf..012564d7ab14f9 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -53,11 +53,11 @@ jobs: - name: Install gcovr run: pip install gcovr==4.2 - name: Build - run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn --coverage --without-intl" + run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn --coverage --without-intl" # TODO(bcoe): fix the couple tests that fail with the inspector enabled. # The cause is most likely coverage's use of the inspector. - name: Test - run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0 + run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0 - name: Report JS run: npx c8 report --check-coverage env: diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 0895e029f714c0..83b973c1f752b6 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -53,11 +53,11 @@ jobs: - name: Install gcovr run: pip install gcovr==4.2 - name: Build - run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn --coverage" + run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn --coverage" # TODO(bcoe): fix the couple tests that fail with the inspector enabled. # The cause is most likely coverage's use of the inspector. - name: Test - run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0 + run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0 - name: Report JS run: npx c8 report --check-coverage env: diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index fd50b61ffef222..c97d55d4e8bef8 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -57,6 +57,6 @@ jobs: - name: Environment Information run: npx envinfo - name: Build - run: make build-ci -j2 V=1 + run: make build-ci -j4 V=1 - name: Test - run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9" + run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9" diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 7c7ebecad8367b..2c2e8060d9847f 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -50,6 +50,6 @@ jobs: - name: Environment Information run: npx envinfo - name: Build - run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn" + run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn" - name: Test Internet - run: make test-internet -j2 V=1; + run: make test-internet -j4 V=1; diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 96ed6797d81141..3dd559cff24da0 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -44,6 +44,6 @@ jobs: - name: Environment Information run: npx envinfo - name: Build - run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn" + run: make build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn" - name: Test - run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9" + run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9" diff --git a/CHANGELOG.md b/CHANGELOG.md index 51e38889abb31d..83e31527eb9d99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,7 +36,9 @@ release. -21.5.0
+21.6.1
+21.6.0
+21.5.0
21.4.0
21.3.0
21.2.0
diff --git a/README.md b/README.md index 2325722e35ae12..a5d6d344910e69 100644 --- a/README.md +++ b/README.md @@ -190,8 +190,6 @@ For information about the governance of the Node.js project, see **Moshe Atlow** <> (he/him) * [RafaelGSS](https://github.com/RafaelGSS) - **Rafael Gonzaga** <> (he/him) -* [RaisinTen](https://github.com/RaisinTen) - - **Darshan Sen** <> (he/him) * [richardlau](https://github.com/richardlau) - **Richard Lau** <> * [ronag](https://github.com/ronag) - @@ -266,6 +264,8 @@ For information about the governance of the Node.js project, see **Alexis Campailla** <> * [piscisaureus](https://github.com/piscisaureus) - **Bert Belder** <> +* [RaisinTen](https://github.com/RaisinTen) - + **Darshan Sen** <> (he/him) * [sam-github](https://github.com/sam-github) - **Sam Roberts** <> * [shigeki](https://github.com/shigeki) - @@ -389,6 +389,8 @@ For information about the governance of the Node.js project, see **Keyhan Vakil** <> * [legendecas](https://github.com/legendecas) - **Chengzhong Wu** <> (he/him) +* [lemire](https://github.com/lemire) - + **Daniel Lemire** <> * [linkgoron](https://github.com/linkgoron) - **Nitzan Uziely** <> * [LiviaMedeiros](https://github.com/LiviaMedeiros) - @@ -425,16 +427,12 @@ For information about the governance of the Node.js project, see **Myles Borins** <> (he/him) * [ovflowd](https://github.com/ovflowd) - **Claudio Wunder** <> (he/they) -* [oyyd](https://github.com/oyyd) - - **Ouyang Yadong** <> (he/him) * [panva](https://github.com/panva) - **Filip Skokan** <> (he/him) * [Qard](https://github.com/Qard) - **Stephen Belanger** <> (he/him) * [RafaelGSS](https://github.com/RafaelGSS) - **Rafael Gonzaga** <> (he/him) -* [RaisinTen](https://github.com/RaisinTen) - - **Darshan Sen** <> (he/him) * [rluvaton](https://github.com/rluvaton) - **Raz Luvaton** <> (he/him) * [richardlau](https://github.com/richardlau) - @@ -623,6 +621,8 @@ For information about the governance of the Node.js project, see **Alexis Campailla** <> * [othiym23](https://github.com/othiym23) - **Forrest L Norvell** <> (they/them/themself) +* [oyyd](https://github.com/oyyd) - + **Ouyang Yadong** <> (he/him) * [petkaantonov](https://github.com/petkaantonov) - **Petka Antonov** <> * [phillipj](https://github.com/phillipj) - @@ -639,6 +639,8 @@ For information about the governance of the Node.js project, see **Peter Marshall** <> (he/him) * [puzpuzpuz](https://github.com/puzpuzpuz) - **Andrey Pechkurov** <> (he/him) +* [RaisinTen](https://github.com/RaisinTen) - + **Darshan Sen** <> (he/him) * [refack](https://github.com/refack) - **Refael Ackermann (רפאל פלחי)** <> (he/him/הוא/אתה) * [rexagod](https://github.com/rexagod) - @@ -737,8 +739,6 @@ maintaining the Node.js project. **Preveen Padmanabhan** <> (he/him) * [PoojaDurgad](https://github.com/PoojaDurgad) - **Pooja Durgad** <> -* [RaisinTen](https://github.com/RaisinTen) - - **Darshan Sen** <> * [VoltrexKeyva](https://github.com/VoltrexKeyva) - **Mohammed Keyvanzadeh** <> (he/him) diff --git a/benchmark/url/url-searchparams-append.js b/benchmark/url/url-searchparams-append.js new file mode 100644 index 00000000000000..cd8099b517c6f7 --- /dev/null +++ b/benchmark/url/url-searchparams-append.js @@ -0,0 +1,19 @@ +'use strict'; +const common = require('../common.js'); + +const bench = common.createBenchmark(main, { + type: ['URL', 'URLSearchParams'], + n: [1e3, 1e6], +}); + +function main({ type, n }) { + const params = type === 'URL' ? + new URL('https://nodejs.org').searchParams : + new URLSearchParams(); + + bench.start(); + for (let i = 0; i < n; i++) { + params.append('test', i); + } + bench.end(n); +} diff --git a/benchmark/url/url-searchparams-update.js b/benchmark/url/url-searchparams-update.js new file mode 100644 index 00000000000000..082d476a5d2250 --- /dev/null +++ b/benchmark/url/url-searchparams-update.js @@ -0,0 +1,29 @@ +'use strict'; +const common = require('../common.js'); +const assert = require('assert'); + +const bench = common.createBenchmark(main, { + searchParams: ['true', 'false'], + property: ['pathname', 'search', 'hash'], + n: [1e6], +}); + +function getMethod(url, property) { + if (property === 'pathname') return (x) => url.pathname = `/${x}`; + if (property === 'search') return (x) => url.search = `?${x}`; + if (property === 'hash') return (x) => url.hash = `#${x}`; + throw new Error(`Unsupported property "${property}"`); +} + +function main({ searchParams, property, n }) { + const url = new URL('https://nodejs.org'); + if (searchParams === 'true') assert(url.searchParams); + + const method = getMethod(url, property); + + bench.start(); + for (let i = 0; i < n; i++) { + method(i); + } + bench.end(n); +} diff --git a/deps/acorn/acorn-walk/dist/walk.d.mts b/deps/acorn/acorn-walk/dist/walk.d.mts index 7bc8c9790fd1a7..e07a6afaf8e336 100644 --- a/deps/acorn/acorn-walk/dist/walk.d.mts +++ b/deps/acorn/acorn-walk/dist/walk.d.mts @@ -66,10 +66,10 @@ export function simple( /** * does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter. - * @param node - * @param visitors - * @param base - * @param state + * @param node + * @param visitors + * @param base + * @param state */ export function ancestor( node: acorn.Node, @@ -79,8 +79,8 @@ export function ancestor( ): void /** - * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node. - * @param node + * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node. + * @param node * @param state the start state * @param functions contain an object that maps node types to walker functions * @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used. @@ -94,10 +94,10 @@ export function recursive( /** * does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node - * @param node - * @param callback - * @param base - * @param state + * @param node + * @param callback + * @param base + * @param state */ export function full( node: acorn.Node, @@ -108,10 +108,10 @@ export function full( /** * does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter. - * @param node - * @param callback - * @param base - * @param state + * @param node + * @param callback + * @param base + * @param state */ export function fullAncestor( node: acorn.Node, @@ -122,8 +122,8 @@ export function fullAncestor( /** * builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}. - * @param functions - * @param base + * @param functions + * @param base */ export function make( functions: RecursiveVisitors, @@ -132,12 +132,12 @@ export function make( /** * tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred. - * @param node - * @param start - * @param end - * @param type - * @param base - * @param state + * @param node + * @param start + * @param end + * @param type + * @param base + * @param state */ export function findNodeAt( node: acorn.Node, @@ -150,11 +150,11 @@ export function findNodeAt( /** * like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position. - * @param node - * @param start - * @param type - * @param base - * @param state + * @param node + * @param start + * @param type + * @param base + * @param state */ export function findNodeAround( node: acorn.Node, @@ -165,8 +165,13 @@ export function findNodeAround( ): Found | undefined /** - * similar to {@link findNodeAround}, but will match all nodes after the given position (testing outer nodes before inner nodes). + * Find the outermost matching node after a given position. */ export const findNodeAfter: typeof findNodeAround +/** + * Find the outermost matching node before a given position. + */ +export const findNodeBefore: typeof findNodeAround + export const base: RecursiveVisitors diff --git a/deps/acorn/acorn-walk/dist/walk.d.ts b/deps/acorn/acorn-walk/dist/walk.d.ts index 7bc8c9790fd1a7..e07a6afaf8e336 100644 --- a/deps/acorn/acorn-walk/dist/walk.d.ts +++ b/deps/acorn/acorn-walk/dist/walk.d.ts @@ -66,10 +66,10 @@ export function simple( /** * does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter. - * @param node - * @param visitors - * @param base - * @param state + * @param node + * @param visitors + * @param base + * @param state */ export function ancestor( node: acorn.Node, @@ -79,8 +79,8 @@ export function ancestor( ): void /** - * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node. - * @param node + * does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node. + * @param node * @param state the start state * @param functions contain an object that maps node types to walker functions * @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used. @@ -94,10 +94,10 @@ export function recursive( /** * does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node - * @param node - * @param callback - * @param base - * @param state + * @param node + * @param callback + * @param base + * @param state */ export function full( node: acorn.Node, @@ -108,10 +108,10 @@ export function full( /** * does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter. - * @param node - * @param callback - * @param base - * @param state + * @param node + * @param callback + * @param base + * @param state */ export function fullAncestor( node: acorn.Node, @@ -122,8 +122,8 @@ export function fullAncestor( /** * builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}. - * @param functions - * @param base + * @param functions + * @param base */ export function make( functions: RecursiveVisitors, @@ -132,12 +132,12 @@ export function make( /** * tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred. - * @param node - * @param start - * @param end - * @param type - * @param base - * @param state + * @param node + * @param start + * @param end + * @param type + * @param base + * @param state */ export function findNodeAt( node: acorn.Node, @@ -150,11 +150,11 @@ export function findNodeAt( /** * like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position. - * @param node - * @param start - * @param type - * @param base - * @param state + * @param node + * @param start + * @param type + * @param base + * @param state */ export function findNodeAround( node: acorn.Node, @@ -165,8 +165,13 @@ export function findNodeAround( ): Found | undefined /** - * similar to {@link findNodeAround}, but will match all nodes after the given position (testing outer nodes before inner nodes). + * Find the outermost matching node after a given position. */ export const findNodeAfter: typeof findNodeAround +/** + * Find the outermost matching node before a given position. + */ +export const findNodeBefore: typeof findNodeAround + export const base: RecursiveVisitors diff --git a/deps/acorn/acorn-walk/package.json b/deps/acorn/acorn-walk/package.json index 393c87a39255a0..9d3b7e5248fb83 100644 --- a/deps/acorn/acorn-walk/package.json +++ b/deps/acorn/acorn-walk/package.json @@ -16,7 +16,7 @@ ], "./package.json": "./package.json" }, - "version": "8.3.1", + "version": "8.3.2", "engines": { "node": ">=0.4.0" }, diff --git a/deps/base64/base64/CMakeLists.txt b/deps/base64/base64/CMakeLists.txt index be1de665a2cd59..ff9f6f21e1ee28 100644 --- a/deps/base64/base64/CMakeLists.txt +++ b/deps/base64/base64/CMakeLists.txt @@ -17,7 +17,7 @@ if (POLICY CMP0127) cmake_policy(SET CMP0127 NEW) endif() -project(base64 LANGUAGES C VERSION 0.5.1) +project(base64 LANGUAGES C VERSION 0.5.2) include(GNUInstallDirs) include(CMakeDependentOption) diff --git a/deps/base64/base64/Makefile b/deps/base64/base64/Makefile index bcb944551ae881..bba3fde4dd05bf 100644 --- a/deps/base64/base64/Makefile +++ b/deps/base64/base64/Makefile @@ -1,4 +1,4 @@ -CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic +CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic -DBASE64_STATIC_DEFINE # Set OBJCOPY if not defined by environment: OBJCOPY ?= objcopy @@ -56,6 +56,7 @@ ifdef OPENMP CFLAGS += -fopenmp endif +TARGET := $(shell $(CC) -dumpmachine) .PHONY: all analyze clean @@ -64,9 +65,17 @@ all: bin/base64 lib/libbase64.o bin/base64: bin/base64.o lib/libbase64.o $(CC) $(CFLAGS) -o $@ $^ -lib/libbase64.o: $(OBJS) - $(LD) -r -o $@ $^ - $(OBJCOPY) --keep-global-symbols=lib/exports.txt $@ +# Workaround: mangle exported function names on MinGW32. +lib/exports.build.txt: lib/exports.txt +ifeq (i686-w64-mingw32, $(TARGET)) + sed -e 's/^/_/' $< > $@ +else + cp -f $< $@ +endif + +lib/libbase64.o: lib/exports.build.txt $(OBJS) + $(LD) -r -o $@ $(OBJS) + $(OBJCOPY) --keep-global-symbols=$< $@ lib/config.h: @echo "#define HAVE_AVX512 $(HAVE_AVX512)" > $@ @@ -97,4 +106,4 @@ analyze: clean scan-build --use-analyzer=`which clang` --status-bugs make clean: - rm -f bin/base64 bin/base64.o lib/libbase64.o lib/config.h $(OBJS) + rm -f bin/base64 bin/base64.o lib/libbase64.o lib/config.h lib/exports.build.txt $(OBJS) diff --git a/deps/base64/base64/bin/base64.c b/deps/base64/base64/bin/base64.c index 98d6b3cbab560c..0e32ed03762df7 100644 --- a/deps/base64/base64/bin/base64.c +++ b/deps/base64/base64/bin/base64.c @@ -1,4 +1,19 @@ -#define _XOPEN_SOURCE // IOV_MAX +// Test for MinGW. +#if defined(__MINGW32__) || defined(__MINGW64__) +# define MINGW +#endif + +// Decide if the writev(2) system call needs to be emulated as a series of +// write(2) calls. At least MinGW does not support writev(2). +#ifdef MINGW +# define EMULATE_WRITEV +#endif + +// Include the necessary system header when using the system's writev(2). +#ifndef EMULATE_WRITEV +# define _XOPEN_SOURCE // Unlock IOV_MAX +# include +#endif #include #include @@ -8,7 +23,7 @@ #include #include #include -#include + #include "../include/libbase64.h" // Size of the buffer for the "raw" (not base64-encoded) data in bytes. @@ -50,6 +65,59 @@ struct buffer { char *enc; }; +// Optionally emulate writev(2) as a series of write calls. +#ifdef EMULATE_WRITEV + +// Quick and dirty definition of IOV_MAX as it is probably not defined. +#ifndef IOV_MAX +# define IOV_MAX 1024 +#endif + +// Quick and dirty definition of this system struct, for local use only. +struct iovec { + + // Opaque data pointer. + void *iov_base; + + // Length of the data in bytes. + size_t iov_len; +}; + +static ssize_t +writev (const int fd, const struct iovec *iov, int iovcnt) +{ + ssize_t r, nwrite = 0; + + // Reset the error marker. + errno = 0; + + while (iovcnt-- > 0) { + + // Write the vector; propagate errors back to the caller. Note + // that this loses information about how much vectors have been + // successfully written, but that also seems to be the case + // with the real function. The API is somewhat flawed. + if ((r = write(fd, iov->iov_base, iov->iov_len)) < 0) { + return r; + } + + // Update the total write count. + nwrite += r; + + // Return early after a partial write; the caller should retry. + if ((size_t) r != iov->iov_len) { + break; + } + + // Move to the next vector. + iov++; + } + + return nwrite; +} + +#endif // EMULATE_WRITEV + static bool buffer_alloc (const struct config *config, struct buffer *buf) { @@ -272,10 +340,23 @@ encode (const struct config *config, struct buffer *buf) return true; } -static int +static inline size_t +find_newline (const char *p, const size_t avail) +{ + // This is very naive and can probably be improved by vectorization. + for (size_t len = 0; len < avail; len++) { + if (p[len] == '\n') { + return len; + } + } + + return avail; +} + +static bool decode (const struct config *config, struct buffer *buf) { - size_t nread, nout; + size_t avail; struct base64_state state; // Initialize the decoder's state structure. @@ -283,18 +364,51 @@ decode (const struct config *config, struct buffer *buf) // Read encoded data into the buffer. Use the smallest buffer size to // be on the safe side: the decoded output will fit the raw buffer. - while ((nread = fread(buf->enc, 1, BUFFER_RAW_SIZE, config->fp)) > 0) { + while ((avail = fread(buf->enc, 1, BUFFER_RAW_SIZE, config->fp)) > 0) { + char *start = buf->enc; + char *outbuf = buf->raw; + size_t ototal = 0; + + // By popular demand, this utility tries to be bug-compatible + // with GNU `base64'. That includes silently ignoring newlines + // in the input. Tokenize the input on newline characters. + while (avail > 0) { + + // Find the offset of the next newline character, which + // is also the length of the next chunk. + size_t outlen, len = find_newline(start, avail); + + // Ignore empty chunks. + if (len == 0) { + start++; + avail--; + continue; + } - // Decode the input into the raw buffer. - if (base64_stream_decode(&state, buf->enc, nread, - buf->raw, &nout) == 0) { - fprintf(stderr, "%s: %s: decoding error\n", - config->name, config->file); - return false; + // Decode the chunk into the raw buffer. + if (base64_stream_decode(&state, start, len, + outbuf, &outlen) == 0) { + fprintf(stderr, "%s: %s: decoding error\n", + config->name, config->file); + return false; + } + + // Update the output buffer pointer and total size. + outbuf += outlen; + ototal += outlen; + + // Bail out if the whole string has been consumed. + if (len == avail) { + break; + } + + // Move the start pointer past the newline. + start += len + 1; + avail -= len + 1; } // Append the raw data to the output stream. - if (write_stdout(config, buf->raw, nout) == false) { + if (write_stdout(config, buf->raw, ototal) == false) { return false; } } diff --git a/deps/base64/base64/lib/env.h b/deps/base64/base64/lib/env.h index d5c2fdb7952735..d489ba54215bbf 100644 --- a/deps/base64/base64/lib/env.h +++ b/deps/base64/base64/lib/env.h @@ -1,6 +1,8 @@ #ifndef BASE64_ENV_H #define BASE64_ENV_H +#include + // This header file contains macro definitions that describe certain aspects of // the compile-time environment. Compatibility and portability macros go here. @@ -46,12 +48,10 @@ #if defined (__x86_64__) // This also works for the x32 ABI, which has a 64-bit word size. # define BASE64_WORDSIZE 64 -#elif defined (_INTEGRAL_MAX_BITS) -# define BASE64_WORDSIZE _INTEGRAL_MAX_BITS -#elif defined (__WORDSIZE) -# define BASE64_WORDSIZE __WORDSIZE -#elif defined (__SIZE_WIDTH__) -# define BASE64_WORDSIZE __SIZE_WIDTH__ +#elif SIZE_MAX == UINT32_MAX +# define BASE64_WORDSIZE 32 +#elif SIZE_MAX == UINT64_MAX +# define BASE64_WORDSIZE 64 #else # error BASE64_WORDSIZE_NOT_DEFINED #endif diff --git a/deps/base64/base64/test/CMakeLists.txt b/deps/base64/base64/test/CMakeLists.txt index ef8787047b2944..f07b65a00c2cb4 100644 --- a/deps/base64/base64/test/CMakeLists.txt +++ b/deps/base64/base64/test/CMakeLists.txt @@ -32,12 +32,10 @@ add_base64_test(test_base64 test_base64.c ) -if (NOT WIN32) - add_base64_test(benchmark - codec_supported.c - benchmark.c - ) -endif() +add_base64_test(benchmark + codec_supported.c + benchmark.c +) if(CMAKE_SYSTEM_NAME STREQUAL "Linux") target_link_libraries(benchmark PRIVATE rt) diff --git a/deps/base64/base64/test/Makefile b/deps/base64/base64/test/Makefile index c896627e0bd8d6..7ecb893a6363b9 100644 --- a/deps/base64/base64/test/Makefile +++ b/deps/base64/base64/test/Makefile @@ -1,4 +1,4 @@ -CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic +CFLAGS += -std=c99 -O3 -Wall -Wextra -pedantic -DBASE64_STATIC_DEFINE ifdef OPENMP CFLAGS += -fopenmp endif @@ -6,6 +6,8 @@ endif TARGET := $(shell $(CC) -dumpmachine) ifneq (, $(findstring darwin, $(TARGET))) BENCH_LDFLAGS= +else ifneq (, $(findstring mingw, $(TARGET))) + BENCH_LDFLAGS= else # default to linux, -lrt needed BENCH_LDFLAGS=-lrt diff --git a/deps/base64/base64/test/benchmark.c b/deps/base64/base64/test/benchmark.c index 80d21a389cb98c..e78b696bedb6b3 100644 --- a/deps/base64/base64/test/benchmark.c +++ b/deps/base64/base64/test/benchmark.c @@ -8,17 +8,25 @@ #define _XOPEN_SOURCE 600 #endif +// Standard cross-platform includes. #include -#include -#include -#include -#include #include #include -#include -#ifdef __MACH__ -#include +// Platform-specific includes. +#if defined(_WIN32) || defined(_WIN64) +# include +# include +#else +# include +# include +# include +# include +# include +#endif + +#if defined(__MACH__) +# include #endif #include "../include/libbase64.h" @@ -60,6 +68,27 @@ bytes_to_mb (size_t bytes) static bool get_random_data (struct buffers *b, char **errmsg) { +#if defined(_WIN32) || defined(_WIN64) + HCRYPTPROV hProvider = 0; + + if (!CryptAcquireContext(&hProvider, 0, 0, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT | CRYPT_SILENT)) { + *errmsg = "Error: CryptAcquireContext"; + return false; + } + + if (!CryptGenRandom(hProvider, b->regsz, b->reg)) { + CryptReleaseContext(hProvider, 0); + *errmsg = "Error: CryptGenRandom"; + return false; + } + + if (!CryptReleaseContext(hProvider, 0)) { + *errmsg = "Error: CryptReleaseContext"; + return false; + } + + return true; +#else int fd; ssize_t nread; size_t total_read = 0; @@ -80,16 +109,19 @@ get_random_data (struct buffers *b, char **errmsg) } total_read += nread; } + close(fd); return true; +#endif } -#ifdef __MACH__ +#if defined(__MACH__) typedef uint64_t base64_timespec; + static void -base64_gettime (base64_timespec * o_time) +base64_gettime (base64_timespec *t) { - *o_time = mach_absolute_time(); + *t = mach_absolute_time(); } static float @@ -101,18 +133,39 @@ timediff_sec (base64_timespec *start, base64_timespec *end) return (float)((diff * tb.numer) / tb.denom) / 1e9f; } +#elif defined(_WIN32) || defined(_WIN64) +typedef ULARGE_INTEGER base64_timespec; + +static void +base64_gettime (base64_timespec *t) +{ + FILETIME current_time_ft; + + GetSystemTimePreciseAsFileTime(¤t_time_ft); + + t->LowPart = current_time_ft.dwLowDateTime; + t->HighPart = current_time_ft.dwHighDateTime; +} + +static float +timediff_sec (base64_timespec *start, base64_timespec *end) +{ + // Timer resolution is 100 nanoseconds (10^-7 sec). + return (end->QuadPart - start->QuadPart) / 1e7f; +} #else typedef struct timespec base64_timespec; + static void -base64_gettime (base64_timespec * o_time) +base64_gettime (base64_timespec *t) { - clock_gettime(CLOCK_REALTIME, o_time); + clock_gettime(CLOCK_REALTIME, t); } static float timediff_sec (base64_timespec *start, base64_timespec *end) { - return (end->tv_sec - start->tv_sec) + ((float)(end->tv_nsec - start->tv_nsec)) / 1e9f; + return (end->tv_sec - start->tv_sec) + (end->tv_nsec - start->tv_nsec) / 1e9f; } #endif diff --git a/deps/cares/cares.gyp b/deps/cares/cares.gyp index 587cf7f3d626d5..6963a1c9479f29 100644 --- a/deps/cares/cares.gyp +++ b/deps/cares/cares.gyp @@ -181,7 +181,7 @@ }], [ 'OS not in "win android"', { 'cflags': [ - '--std=gnu89' + '--std=gnu11' ], }], [ 'OS=="linux"', { diff --git a/deps/icu-small/source/data/in/icudt74l.dat.bz2 b/deps/icu-small/source/data/in/icudt74l.dat.bz2 index f52401bb1a5d7e..e299a5c8abfc02 100644 Binary files a/deps/icu-small/source/data/in/icudt74l.dat.bz2 and b/deps/icu-small/source/data/in/icudt74l.dat.bz2 differ diff --git a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h index a8d4b4afd3a470..f16d15cb39bb52 100644 --- a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h +++ b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/ngtcp2.h @@ -4269,9 +4269,6 @@ NGTCP2_EXTERN int ngtcp2_conn_open_uni_stream(ngtcp2_conn *conn, * * |flags| is currently unused, and should be set to 0. * - * This function returns 0 if a stream denoted by |stream_id| is not - * found. - * * This function returns 0 if it succeeds, or one of the following * negative error codes: * @@ -4294,9 +4291,6 @@ NGTCP2_EXTERN int ngtcp2_conn_shutdown_stream(ngtcp2_conn *conn, uint32_t flags, * * |flags| is currently unused, and should be set to 0. * - * This function returns 0 if a stream denoted by |stream_id| is not - * found. - * * This function returns 0 if it succeeds, or one of the following * negative error codes: * @@ -4321,9 +4315,6 @@ NGTCP2_EXTERN int ngtcp2_conn_shutdown_stream_write(ngtcp2_conn *conn, * * |flags| is currently unused, and should be set to 0. * - * This function returns 0 if a stream denoted by |stream_id| is not - * found. - * * This function returns 0 if it succeeds, or one of the following * negative error codes: * @@ -4684,9 +4675,6 @@ NGTCP2_EXTERN int ngtcp2_conn_in_draining_period(ngtcp2_conn *conn); * specifies the stream ID. This function only extends stream-level * flow control window. * - * This function returns 0 if a stream denoted by |stream_id| is not - * found. - * * This function returns 0 if it succeeds, or one of the following * negative error codes: * diff --git a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h index 9f7592b84a4585..66a70ffe962964 100644 --- a/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h +++ b/deps/ngtcp2/ngtcp2/lib/includes/ngtcp2/version.h @@ -36,7 +36,7 @@ * * Version number of the ngtcp2 library release. */ -#define NGTCP2_VERSION "0.8.1" +#define NGTCP2_VERSION "1.1.0" /** * @macro @@ -46,6 +46,6 @@ * number, 8 bits for minor and 8 bits for patch. Version 1.2.3 * becomes 0x010203. */ -#define NGTCP2_VERSION_NUM 0x000801 +#define NGTCP2_VERSION_NUM 0x010100 #endif /* VERSION_H */ diff --git a/deps/npm/docs/README.md b/deps/npm/docs/README.md deleted file mode 100644 index 5fc7ccf6cd60ac..00000000000000 --- a/deps/npm/docs/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# docs - -[![CI - docs](https://github.com/npm/cli/actions/workflows/ci-docs.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-docs.yml) - -Scripts to build the npm docs. diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md index 3b94ea27763f8f..b73614de2d4f42 100644 --- a/deps/npm/docs/content/commands/npm-install-test.md +++ b/deps/npm/docs/content/commands/npm-install-test.md @@ -290,6 +290,16 @@ field of package.json, which comes from `process.platform`. +#### `libc` + +* Default: null +* Type: null or String + +Override libc of native modules to install. Acceptable values are same as +`libc` field of package.json + + + #### `workspace` * Default: diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 738ca3372c8e9f..36496954270b30 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -680,6 +680,16 @@ field of package.json, which comes from `process.platform`. +#### `libc` + +* Default: null +* Type: null or String + +Override libc of native modules to install. Acceptable values are same as +`libc` field of package.json + + + #### `workspace` * Default: diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 4ad025dbb5b08e..a21d14e577df61 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@10.2.4 /path/to/npm +npm@10.3.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm-publish.md b/deps/npm/docs/content/commands/npm-publish.md index 0e18cddf8b36d4..a9c368e218543f 100644 --- a/deps/npm/docs/content/commands/npm-publish.md +++ b/deps/npm/docs/content/commands/npm-publish.md @@ -22,7 +22,7 @@ scope-configured registry (see A `package` is interpreted the same way as other commands (like -`npm install` and can be: +`npm install`) and can be: * a) a folder containing a program described by a [`package.json`](/configuring-npm/package-json) file diff --git a/deps/npm/docs/content/commands/npm-sbom.md b/deps/npm/docs/content/commands/npm-sbom.md index ee0d60c6fde790..6e8033b96aedc7 100644 --- a/deps/npm/docs/content/commands/npm-sbom.md +++ b/deps/npm/docs/content/commands/npm-sbom.md @@ -266,7 +266,7 @@ SBOM format to use when generating SBOMs. * Type: "library", "application", or "framework" The type of package described by the generated SBOM. For SPDX, this is the -value for the `primaryPackagePurpose` fieled. For CycloneDX, this is the +value for the `primaryPackagePurpose` field. For CycloneDX, this is the value for the `type` field. diff --git a/deps/npm/docs/content/commands/npm-unpublish.md b/deps/npm/docs/content/commands/npm-unpublish.md index 8ab976e96cb6c7..2421e102325363 100644 --- a/deps/npm/docs/content/commands/npm-unpublish.md +++ b/deps/npm/docs/content/commands/npm-unpublish.md @@ -27,8 +27,12 @@ removing the tarball. The npm registry will return an error if you are not [logged in](/commands/npm-adduser). -If you do not specify a version or if you remove all of a package's -versions then the registry will remove the root package entry entirely. +If you do not specify a package name at all, the name and version to be +unpublished will be pulled from the project in the current directory. + +If you specify a package name but do not specify a version or if you +remove all of a package's versions then the registry will remove the +root package entry entirely. Even if you unpublish a package version, that specific name and version combination can never be reused. In order to publish the package again, diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 1528614f69a69a..d92e83e5ccdd19 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -10.2.4 +10.3.0 ### Description diff --git a/deps/npm/docs/content/commands/npx.md b/deps/npm/docs/content/commands/npx.md index 5ce300e724b103..e596baa5da4793 100644 --- a/deps/npm/docs/content/commands/npx.md +++ b/deps/npm/docs/content/commands/npx.md @@ -150,7 +150,8 @@ This resulted in some shifts in its functionality: always present in the executed process `PATH`. - The `--npm` option is removed. `npx` will always use the `npm` it ships with. -- The `--node-arg` and `-n` options are removed. +- The `--node-arg` and `-n` options have been removed. Use [`NODE_OPTIONS`](https://nodejs.org/api/cli.html#node_optionsoptions) instead: e.g., + `NODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true` - The `--always-spawn` option is redundant, and thus removed. - The `--shell` option is replaced with `--script-shell`, but maintained in the `npx` executable for backwards compatibility. diff --git a/deps/npm/docs/content/configuring-npm/npmrc.md b/deps/npm/docs/content/configuring-npm/npmrc.md index 8cd532abc1c2db..0aa99fc271013b 100644 --- a/deps/npm/docs/content/configuring-npm/npmrc.md +++ b/deps/npm/docs/content/configuring-npm/npmrc.md @@ -19,10 +19,10 @@ For a list of available configuration options, see The four relevant files are: -* per-project config file (/path/to/my/project/.npmrc) -* per-user config file (~/.npmrc) -* global config file ($PREFIX/etc/npmrc) -* npm builtin config file (/path/to/npm/npmrc) +* per-project config file (`/path/to/my/project/.npmrc`) +* per-user config file (`~/.npmrc`) +* global config file (`$PREFIX/etc/npmrc`) +* npm builtin config file (`/path/to/npm/npmrc`) All npm config files are an ini-formatted list of `key = value` parameters. Environment variables can be replaced using `${VARIABLE_NAME}`. For diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index 630ad453196a0a..2ef888fe1a4d64 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -291,25 +291,39 @@ Certain files are always included, regardless of settings: `README` & `LICENSE` can have any case and extension. -Conversely, some files are always ignored: +Some files are always ignored by default: +* `*.orig` +* `.*.swp` +* `.DS_Store` +* `._*` * `.git` -* `CVS` -* `.svn` * `.hg` * `.lock-wscript` +* `.npmrc` +* `.svn` * `.wafpickle-N` -* `.*.swp` -* `.DS_Store` -* `._*` +* `CVS` +* `config.gypi` +* `node_modules` * `npm-debug.log` +* `package-lock.json` (use + [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) + if you wish it to be published) +* `pnpm-lock.yaml` +* `yarn.lock` + +Most of these ignored files can be included specifically if included in +the `files` globs. Exceptions to this are: + +* `.git` * `.npmrc` * `node_modules` -* `config.gypi` -* `*.orig` -* `package-lock.json` (use - [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) if you wish - it to be published) +* `package-lock.json` +* `pnpm-lock.yaml` +* `yarn.lock` + +These can not be included. ### main diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md index 80969ee23e5355..93c820ab3b45ad 100644 --- a/deps/npm/docs/content/using-npm/config.md +++ b/deps/npm/docs/content/using-npm/config.md @@ -855,6 +855,16 @@ Use of `legacy-peer-deps` is not recommended, as it will not enforce the +#### `libc` + +* Default: null +* Type: null or String + +Override libc of native modules to install. Acceptable values are same as +`libc` field of package.json + + + #### `link` * Default: false @@ -1373,7 +1383,7 @@ SBOM format to use when generating SBOMs. * Type: "library", "application", or "framework" The type of package described by the generated SBOM. For SPDX, this is the -value for the `primaryPackagePurpose` fieled. For CycloneDX, this is the +value for the `primaryPackagePurpose` field. For CycloneDX, this is the value for the `type` field. diff --git a/deps/npm/docs/lib/index.js b/deps/npm/docs/lib/index.js new file mode 100644 index 00000000000000..5d4ae7af3457bb --- /dev/null +++ b/deps/npm/docs/lib/index.js @@ -0,0 +1,189 @@ +const localeCompare = require('@isaacs/string-locale-compare')('en') +const { join, basename, resolve } = require('path') +const transformHTML = require('./transform-html.js') +const { version } = require('../../lib/npm.js') +const { aliases } = require('../../lib/utils/cmd-list') +const { shorthands, definitions } = require('@npmcli/config/lib/definitions') + +const DOC_EXT = '.md' + +const TAGS = { + CONFIG: '', + USAGE: '', + SHORTHANDS: '', +} + +const assertPlaceholder = (src, path, placeholder) => { + if (!src.includes(placeholder)) { + throw new Error( + `Cannot replace ${placeholder} in ${path} due to missing placeholder` + ) + } + return placeholder +} + +const getCommandByDoc = (docFile, docExt) => { + // Grab the command name from the *.md filename + // NOTE: We cannot use the name property command file because in the case of + // `npx` the file being used is `lib/commands/exec.js` + const name = basename(docFile, docExt).replace('npm-', '') + + if (name === 'npm') { + return { + name, + params: null, + usage: 'npm', + } + } + + // special case for `npx`: + // `npx` is not technically a command in and of itself, + // so it just needs the usage of npm exex + const srcName = name === 'npx' ? 'exec' : name + const { params, usage = [''], workspaces } = require(`../../lib/commands/${srcName}`) + const usagePrefix = name === 'npx' ? 'npx' : `npm ${name}` + if (params) { + for (const param of params) { + if (definitions[param].exclusive) { + for (const e of definitions[param].exclusive) { + if (!params.includes(e)) { + params.splice(params.indexOf(param) + 1, 0, e) + } + } + } + } + } + + return { + name, + workspaces, + params: name === 'npx' ? null : params, + usage: usage.map(u => `${usagePrefix} ${u}`.trim()).join('\n'), + } +} + +const replaceVersion = (src) => src.replace(/@VERSION@/g, version) + +const replaceUsage = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.USAGE) + const { usage, name, workspaces } = getCommandByDoc(path, DOC_EXT) + + const synopsis = ['```bash', usage] + + const cmdAliases = Object.keys(aliases).reduce((p, c) => { + if (aliases[c] === name) { + p.push(c) + } + return p + }, []) + + if (cmdAliases.length === 1) { + synopsis.push('', `alias: ${cmdAliases[0]}`) + } else if (cmdAliases.length > 1) { + synopsis.push('', `aliases: ${cmdAliases.join(', ')}`) + } + + synopsis.push('```') + + if (!workspaces) { + synopsis.push('', 'Note: This command is unaware of workspaces.') + } + + return src.replace(replacer, synopsis.join('\n')) +} + +const replaceParams = (src, { path }) => { + const { params } = getCommandByDoc(path, DOC_EXT) + const replacer = params && assertPlaceholder(src, path, TAGS.CONFIG) + + if (!params) { + return src + } + + const paramsConfig = params.map((n) => definitions[n].describe()) + + return src.replace(replacer, paramsConfig.join('\n\n')) +} + +const replaceConfig = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.CONFIG) + + // sort not-deprecated ones to the top + /* istanbul ignore next - typically already sorted in the definitions file, + * but this is here so that our help doc will stay consistent if we decide + * to move them around. */ + const sort = ([keya, { deprecated: depa }], [keyb, { deprecated: depb }]) => { + return depa && !depb ? 1 + : !depa && depb ? -1 + : localeCompare(keya, keyb) + } + + const allConfig = Object.entries(definitions).sort(sort) + .map(([_, def]) => def.describe()) + .join('\n\n') + + return src.replace(replacer, allConfig) +} + +const replaceShorthands = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.SHORTHANDS) + + const sh = Object.entries(shorthands) + .sort(([shorta, expansiona], [shortb, expansionb]) => + // sort by what they're short FOR + localeCompare(expansiona.join(' '), expansionb.join(' ')) || localeCompare(shorta, shortb) + ) + .map(([short, expansion]) => { + // XXX: this is incorrect. we have multicharacter flags like `-iwr` that + // can only be set with a single dash + const dash = short.length === 1 ? '-' : '--' + return `* \`${dash}${short}\`: \`${expansion.join(' ')}\`` + }) + + return src.replace(replacer, sh.join('\n')) +} + +const replaceHelpLinks = (src) => { + // replaces markdown links with equivalent-ish npm help commands + return src.replace( + /\[`?([\w\s-]+)`?\]\(\/(?:commands|configuring-npm|using-npm)\/(?:[\w\s-]+)\)/g, + (_, p1) => { + const term = p1.replace(/npm\s/g, '').replace(/\s+/g, ' ').trim() + const help = `npm help ${term.includes(' ') ? `"${term}"` : term}` + return help + } + ) +} + +const transformMan = (src, { data, unified, remarkParse, remarkMan }) => unified() + .use(remarkParse) + .use(remarkMan) + .processSync(`# ${data.title}(${data.section}) - ${data.description}\n\n${src}`) + .toString() + +const manPath = (name, { data }) => join(`man${data.section}`, `${name}.${data.section}`) + +const transformMd = (src, { frontmatter }) => ['---', frontmatter, '---', '', src].join('\n') + +module.exports = { + DOC_EXT, + TAGS, + paths: { + content: resolve(__dirname, 'content'), + nav: resolve(__dirname, 'content', 'nav.yml'), + template: resolve(__dirname, 'template.html'), + man: resolve(__dirname, '..', '..', 'man'), + html: resolve(__dirname, '..', 'output'), + md: resolve(__dirname, '..', 'content'), + }, + usage: replaceUsage, + params: replaceParams, + config: replaceConfig, + shorthands: replaceShorthands, + version: replaceVersion, + helpLinks: replaceHelpLinks, + man: transformMan, + manPath: manPath, + md: transformMd, + html: transformHTML, +} diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html index dd78090d2a5db3..b6b16aba6a5ca0 100644 --- a/deps/npm/docs/output/commands/npm-install-test.html +++ b/deps/npm/docs/output/commands/npm-install-test.html @@ -142,7 +142,7 @@

npm-install-test

Table of contents

- +

Synopsis

@@ -350,6 +350,13 @@

os

Override OS of native modules to install. Acceptable values are same as os field of package.json, which comes from process.platform.

+

libc

+
    +
  • Default: null
  • +
  • Type: null or String
  • +
+

Override libc of native modules to install. Acceptable values are same as +libc field of package.json

workspace

Override OS of native modules to install. Acceptable values are same as os field of package.json, which comes from process.platform.

+

libc

+
    +
  • Default: null
  • +
  • Type: null or String
  • +
+

Override libc of native modules to install. Acceptable values are same as +libc field of package.json

workspace

  • Default:
  • diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 642c883f350281..ae62824a2d7e59 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@10.2.4 /path/to/npm
    +
    npm@10.3.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html index 24e715e4dbec9d..b5b18b4ddca610 100644 --- a/deps/npm/docs/output/commands/npm-publish.html +++ b/deps/npm/docs/output/commands/npm-publish.html @@ -156,7 +156,7 @@

    Description

    scope-configured registry (see package.json).

    A package is interpreted the same way as other commands (like -npm install and can be:

    +npm install) and can be:

    • a) a folder containing a program described by a package.json file
    • diff --git a/deps/npm/docs/output/commands/npm-sbom.html b/deps/npm/docs/output/commands/npm-sbom.html index 56b146680100d1..01302780a2ab77 100644 --- a/deps/npm/docs/output/commands/npm-sbom.html +++ b/deps/npm/docs/output/commands/npm-sbom.html @@ -379,7 +379,7 @@

      sbom-type

    • Type: "library", "application", or "framework"

    The type of package described by the generated SBOM. For SPDX, this is the -value for the primaryPackagePurpose fieled. For CycloneDX, this is the +value for the primaryPackagePurpose field. For CycloneDX, this is the value for the type field.

    workspace

      diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html index 0c4776309b5640..cf952c56f4cfdf 100644 --- a/deps/npm/docs/output/commands/npm-unpublish.html +++ b/deps/npm/docs/output/commands/npm-unpublish.html @@ -159,8 +159,11 @@

      Description

      removing the tarball.

      The npm registry will return an error if you are not logged in.

      -

      If you do not specify a version or if you remove all of a package's -versions then the registry will remove the root package entry entirely.

      +

      If you do not specify a package name at all, the name and version to be +unpublished will be pulled from the project in the current directory.

      +

      If you specify a package name but do not specify a version or if you +remove all of a package's versions then the registry will remove the +root package entry entirely.

      Even if you unpublish a package version, that specific name and version combination can never be reused. In order to publish the package again, you must use a new version number. If you unpublish the entire package, diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 7a530efa07bb17..094e2ef256b937 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -150,7 +150,7 @@

      Table of contents

    Note: This command is unaware of workspaces.

    Version

    -

    10.2.4

    +

    10.3.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html index 54d59ca7cdb329..d2bebc34c0c484 100644 --- a/deps/npm/docs/output/commands/npx.html +++ b/deps/npm/docs/output/commands/npx.html @@ -252,7 +252,8 @@

    Compatibility with Older npx Vers always present in the executed process PATH.
  • The --npm option is removed. npx will always use the npm it ships with.
  • -
  • The --node-arg and -n options are removed.
  • +
  • The --node-arg and -n options have been removed. Use NODE_OPTIONS instead: e.g., +NODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true
  • The --always-spawn option is redundant, and thus removed.
  • The --shell option is replaced with --script-shell, but maintained in the npx executable for backwards compatibility.
  • diff --git a/deps/npm/docs/output/configuring-npm/npmrc.html b/deps/npm/docs/output/configuring-npm/npmrc.html index 0e27de6ccb7d9d..c33c803690c56e 100644 --- a/deps/npm/docs/output/configuring-npm/npmrc.html +++ b/deps/npm/docs/output/configuring-npm/npmrc.html @@ -155,10 +155,10 @@

    Table of contents

    Files

    The four relevant files are:

      -
    • per-project config file (/path/to/my/project/.npmrc)
    • -
    • per-user config file (~/.npmrc)
    • -
    • global config file ($PREFIX/etc/npmrc)
    • -
    • npm builtin config file (/path/to/npm/npmrc)
    • +
    • per-project config file (/path/to/my/project/.npmrc)
    • +
    • per-user config file (~/.npmrc)
    • +
    • global config file ($PREFIX/etc/npmrc)
    • +
    • npm builtin config file (/path/to/npm/npmrc)

    All npm config files are an ini-formatted list of key = value parameters. Environment variables can be replaced using ${VARIABLE_NAME}. For diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index 712708ef406391..dd602671238b99 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -364,26 +364,39 @@

    files

  • The file(s) in the "bin" field

README & LICENSE can have any case and extension.

-

Conversely, some files are always ignored:

+

Some files are always ignored by default:

    +
  • *.orig
  • +
  • .*.swp
  • +
  • .DS_Store
  • +
  • ._*
  • .git
  • -
  • CVS
  • -
  • .svn
  • .hg
  • .lock-wscript
  • +
  • .npmrc
  • +
  • .svn
  • .wafpickle-N
  • -
  • .*.swp
  • -
  • .DS_Store
  • -
  • ._*
  • +
  • CVS
  • +
  • config.gypi
  • +
  • node_modules
  • npm-debug.log
  • +
  • package-lock.json (use +npm-shrinkwrap.json +if you wish it to be published)
  • +
  • pnpm-lock.yaml
  • +
  • yarn.lock
  • +
+

Most of these ignored files can be included specifically if included in +the files globs. Exceptions to this are:

+
    +
  • .git
  • .npmrc
  • node_modules
  • -
  • config.gypi
  • -
  • *.orig
  • -
  • package-lock.json (use -npm-shrinkwrap.json if you wish -it to be published)
  • +
  • package-lock.json
  • +
  • pnpm-lock.yaml
  • +
  • yarn.lock
+

These can not be included.

main

The main field is a module ID that is the primary entry point to your program. That is, if your package is named foo, and a user installs it, diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index c80d3342fc9049..3015e5d066b473 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -142,7 +142,7 @@

config

Table of contents

-
+

Description

@@ -764,6 +764,13 @@

legacy-peer-deps

peerDependencies could be unpacked in a correct place.

Use of legacy-peer-deps is not recommended, as it will not enforce the peerDependencies contract that meta-dependencies may rely on.

+

libc

+
    +
  • Default: null
  • +
  • Type: null or String
  • +
+

Override libc of native modules to install. Acceptable values are same as +libc field of package.json

  • Default: false
  • @@ -1129,7 +1136,7 @@

    sbom-type

  • Type: "library", "application", or "framework"

The type of package described by the generated SBOM. For SPDX, this is the -value for the primaryPackagePurpose fieled. For CycloneDX, this is the +value for the primaryPackagePurpose field. For CycloneDX, this is the value for the type field.

scope

    diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js index 6687ec4371dd82..d04a35fbec2a76 100644 --- a/deps/npm/lib/commands/install.js +++ b/deps/npm/lib/commands/install.js @@ -37,6 +37,7 @@ class Install extends ArboristWorkspaceCmd { 'dry-run', 'cpu', 'os', + 'libc', ...super.params, ] diff --git a/deps/npm/lib/commands/unpublish.js b/deps/npm/lib/commands/unpublish.js index 402f8f30efff85..a9c20900534c3a 100644 --- a/deps/npm/lib/commands/unpublish.js +++ b/deps/npm/lib/commands/unpublish.js @@ -1,7 +1,7 @@ const libaccess = require('libnpmaccess') const libunpub = require('libnpmpublish').unpublish const npa = require('npm-package-arg') -const npmFetch = require('npm-registry-fetch') +const pacote = require('pacote') const pkgJson = require('@npmcli/package-json') const { flatten } = require('@npmcli/config/lib/definitions') @@ -23,12 +23,12 @@ class Unpublish extends BaseCommand { static ignoreImplicitWorkspace = false static async getKeysOfVersions (name, opts) { - const pkgUri = npa(name).escapedName - const json = await npmFetch.json(`${pkgUri}?write=true`, { + const packument = await pacote.packument(name, { ...opts, spec: name, + query: { write: true }, }) - return Object.keys(json.versions) + return Object.keys(packument.versions) } static async completion (args, npm) { @@ -59,7 +59,7 @@ class Unpublish extends BaseCommand { return pkgs } - const versions = await this.getKeysOfVersions(pkgs[0], opts) + const versions = await Unpublish.getKeysOfVersions(pkgs[0], opts) if (!versions.length) { return pkgs } else { @@ -67,20 +67,35 @@ class Unpublish extends BaseCommand { } } - async exec (args) { + async exec (args, { localPrefix } = {}) { if (args.length > 1) { throw this.usageError() } - let spec = args.length && npa(args[0]) + // workspace mode + if (!localPrefix) { + localPrefix = this.npm.localPrefix + } + const force = this.npm.config.get('force') const { silent } = this.npm const dryRun = this.npm.config.get('dry-run') + let spec + if (args.length) { + spec = npa(args[0]) + if (spec.type !== 'version' && spec.rawSpec !== '*') { + throw this.usageError( + 'Can only unpublish a single version, or the entire project.\n' + + 'Tags and ranges are not supported.' + ) + } + } + log.silly('unpublish', 'args[0]', args[0]) log.silly('unpublish', 'spec', spec) - if ((!spec || !spec.rawSpec) && !force) { + if (spec?.rawSpec === '*' && !force) { throw this.usageError( 'Refusing to delete entire project.\n' + 'Run with --force to do this.' @@ -89,69 +104,67 @@ class Unpublish extends BaseCommand { const opts = { ...this.npm.flatOptions } - let pkgName - let pkgVersion let manifest - let manifestErr try { - const { content } = await pkgJson.prepare(this.npm.localPrefix) + const { content } = await pkgJson.prepare(localPrefix) manifest = content } catch (err) { - manifestErr = err - } - if (spec) { - // If cwd has a package.json with a name that matches the package being - // unpublished, load up the publishConfig - if (manifest && manifest.name === spec.name && manifest.publishConfig) { - flatten(manifest.publishConfig, opts) - } - const versions = await Unpublish.getKeysOfVersions(spec.name, opts) - if (versions.length === 1 && !force) { - throw this.usageError(LAST_REMAINING_VERSION_ERROR) - } - pkgName = spec.name - pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : '' - } else { - if (manifestErr) { - if (manifestErr.code === 'ENOENT' || manifestErr.code === 'ENOTDIR') { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + if (!spec) { + // We needed a local package.json to figure out what package to + // unpublish throw this.usageError() - } else { - throw manifestErr } + } else { + // folks should know if ANY local package.json had a parsing error. + // They may be relying on `publishConfig` to be loading and we don't + // want to ignore errors in that case. + throw err } + } - log.verbose('unpublish', manifest) - + let pkgVersion // for cli output + if (spec) { + pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : '' + } else { spec = npa.resolve(manifest.name, manifest.version) - if (manifest.publishConfig) { - flatten(manifest.publishConfig, opts) + log.verbose('unpublish', manifest) + pkgVersion = manifest.version ? `@${manifest.version}` : '' + if (!manifest.version && !force) { + throw this.usageError( + 'Refusing to delete entire project.\n' + + 'Run with --force to do this.' + ) } + } - pkgName = manifest.name - pkgVersion = manifest.version ? `@${manifest.version}` : '' + // If localPrefix has a package.json with a name that matches the package + // being unpublished, load up the publishConfig + if (manifest?.name === spec.name && manifest.publishConfig) { + flatten(manifest.publishConfig, opts) + } + + const versions = await Unpublish.getKeysOfVersions(spec.name, opts) + if (versions.length === 1 && spec.rawSpec === versions[0] && !force) { + throw this.usageError(LAST_REMAINING_VERSION_ERROR) + } + if (versions.length === 1) { + pkgVersion = '' } if (!dryRun) { await otplease(this.npm, opts, o => libunpub(spec, o)) } if (!silent) { - this.npm.output(`- ${pkgName}${pkgVersion}`) + this.npm.output(`- ${spec.name}${pkgVersion}`) } } async execWorkspaces (args) { await this.setWorkspaces() - const force = this.npm.config.get('force') - if (!force) { - throw this.usageError( - 'Refusing to delete entire project(s).\n' + - 'Run with --force to do this.' - ) - } - - for (const name of this.workspaceNames) { - await this.exec([name]) + for (const path of this.workspacePaths) { + await this.exec(args, { localPrefix: path }) } } } diff --git a/deps/npm/lib/commands/view.js b/deps/npm/lib/commands/view.js index f118184124db97..214a45e92611c9 100644 --- a/deps/npm/lib/commands/view.js +++ b/deps/npm/lib/commands/view.js @@ -392,20 +392,20 @@ class View extends BaseCommand { if (info.keywords.length) { this.npm.output('') - this.npm.output('keywords:', chalk.yellow(info.keywords.join(', '))) + this.npm.output(`keywords: ${chalk.yellow(info.keywords.join(', '))}`) } if (info.bins.length) { this.npm.output('') - this.npm.output('bin:', chalk.yellow(info.bins.join(', '))) + this.npm.output(`bin: ${chalk.yellow(info.bins.join(', '))}`) } this.npm.output('') this.npm.output('dist') - this.npm.output('.tarball:', info.tarball) - this.npm.output('.shasum:', info.shasum) - info.integrity && this.npm.output('.integrity:', info.integrity) - info.unpackedSize && this.npm.output('.unpackedSize:', info.unpackedSize) + this.npm.output(`.tarball: ${info.tarball}`) + this.npm.output(`.shasum: ${info.shasum}`) + info.integrity && this.npm.output(`.integrity: ${info.integrity}`) + info.unpackedSize && this.npm.output(`.unpackedSize: ${info.unpackedSize}`) const maxDeps = 24 if (info.deps.length) { @@ -420,7 +420,7 @@ class View extends BaseCommand { if (info.maintainers && info.maintainers.length) { this.npm.output('') this.npm.output('maintainers:') - info.maintainers.forEach((u) => this.npm.output('-', u)) + info.maintainers.forEach((u) => this.npm.output(`- ${u}`)) } this.npm.output('') diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index 14706629e79c2e..0a023f4ac8a302 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -216,11 +216,13 @@ class Npm { fs.mkdir(this.cache, { recursive: true }) .catch((e) => log.verbose('cache', `could not create cache: ${e}`))) - // its ok if this fails. user might have specified an invalid dir + // it's ok if this fails. user might have specified an invalid dir // which we will tell them about at the end - await this.time('npm:load:mkdirplogs', () => - fs.mkdir(this.logsDir, { recursive: true }) - .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`))) + if (this.config.get('logs-max') > 0) { + await this.time('npm:load:mkdirplogs', () => + fs.mkdir(this.logsDir, { recursive: true }) + .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`))) + } // note: this MUST be shorter than the actual argv length, because it // uses the same memory, so node will truncate it if it's too long. @@ -438,7 +440,7 @@ class Npm { output (...msg) { log.clearProgress() // eslint-disable-next-line no-console - console.log(...msg) + console.log(...msg.map(Display.clean)) log.showProgress() } @@ -476,7 +478,7 @@ class Npm { outputError (...msg) { log.clearProgress() // eslint-disable-next-line no-console - console.error(...msg) + console.error(...msg.map(Display.clean)) log.showProgress() } } diff --git a/deps/npm/lib/utils/display.js b/deps/npm/lib/utils/display.js index a41bf903e9a8fa..c5e5ca2b5b874a 100644 --- a/deps/npm/lib/utils/display.js +++ b/deps/npm/lib/utils/display.js @@ -3,6 +3,44 @@ const npmlog = require('npmlog') const log = require('./log-shim.js') const { explain } = require('./explain-eresolve.js') +const originalCustomInspect = Symbol('npm.display.original.util.inspect.custom') + +// These are most assuredly not a mistake +// https://eslint.org/docs/latest/rules/no-control-regex +/* eslint-disable no-control-regex */ +// \x00 through \x1f, \x7f through \x9f, not including \x09 \x0a \x0b \x0d +const hasC01 = /[\x00-\x08\x0c\x0e-\x1f\x7f-\x9f]/ +// Allows everything up to '[38;5;255m' in 8 bit notation +const allowedSGR = /^\[[0-9;]{0,8}m/ +// '[38;5;255m'.length +const sgrMaxLen = 10 + +// Strips all ANSI C0 and C1 control characters (except for SGR up to 8 bit) +function stripC01 (str) { + if (!hasC01.test(str)) { + return str + } + let result = '' + for (let i = 0; i < str.length; i++) { + const char = str[i] + const code = char.charCodeAt(0) + if (!hasC01.test(char)) { + // Most characters are in this set so continue early if we can + result = `${result}${char}` + } else if (code === 27 && allowedSGR.test(str.slice(i + 1, i + sgrMaxLen + 1))) { + // \x1b with allowed SGR + result = `${result}\x1b` + } else if (code <= 31) { + // escape all other C0 control characters besides \x7f + result = `${result}^${String.fromCharCode(code + 64)}` + } else { + // hasC01 ensures this is now a C1 control character or \x7f + result = `${result}^${String.fromCharCode(code - 64)}` + } + } + return result +} + class Display { #chalk = null @@ -12,6 +50,57 @@ class Display { log.pause() } + static clean (output) { + if (typeof output === 'string') { + // Strings are cleaned inline + return stripC01(output) + } + if (!output || typeof output !== 'object') { + // Numbers, booleans, null all end up here and don't need cleaning + return output + } + // output && typeof output === 'object' + // We can't use hasOwn et al for detecting the original but we can use it + // for detecting the properties we set via defineProperty + if ( + output[inspect.custom] && + (!Object.hasOwn(output, originalCustomInspect)) + ) { + // Save the old one if we didn't already do it. + Object.defineProperty(output, originalCustomInspect, { + value: output[inspect.custom], + writable: true, + }) + } + if (!Object.hasOwn(output, originalCustomInspect)) { + // Put a dummy one in for when we run multiple times on the same object + Object.defineProperty(output, originalCustomInspect, { + value: function () { + return this + }, + writable: true, + }) + } + // Set the custom inspect to our own function + Object.defineProperty(output, inspect.custom, { + value: function () { + const toClean = this[originalCustomInspect]() + // Custom inspect can return things other than objects, check type again + if (typeof toClean === 'string') { + // Strings are cleaned inline + return stripC01(toClean) + } + if (!toClean || typeof toClean !== 'object') { + // Numbers, booleans, null all end up here and don't need cleaning + return toClean + } + return stripC01(inspect(toClean, { customInspect: false })) + }, + writable: true, + }) + return output + } + on () { process.on('log', this.#logHandler) } @@ -103,7 +192,7 @@ class Display { // Explicitly call these on npmlog and not log shim // This is the final place we should call npmlog before removing it. #npmlog (level, ...args) { - npmlog[level](...args) + npmlog[level](...args.map(Display.clean)) } // Also (and this is a really inexcusable kludge), we patch the @@ -112,8 +201,8 @@ class Display { // highly abbreviated explanation of what's being overridden. #eresolveWarn (level, heading, message, expl) { if (level === 'warn' && - heading === 'ERESOLVE' && - expl && typeof expl === 'object' + heading === 'ERESOLVE' && + expl && typeof expl === 'object' ) { this.#npmlog(level, heading, message) this.#npmlog(level, '', explain(expl, this.#chalk, 2)) diff --git a/deps/npm/lib/utils/log-file.js b/deps/npm/lib/utils/log-file.js index 84f86983639ce6..8c06f5647e761e 100644 --- a/deps/npm/lib/utils/log-file.js +++ b/deps/npm/lib/utils/log-file.js @@ -6,6 +6,7 @@ const { Minipass } = require('minipass') const fsMiniPass = require('fs-minipass') const fs = require('fs/promises') const log = require('./log-shim') +const Display = require('./display') const padZero = (n, length) => n.toString().padStart(length.toString().length, '0') const globify = pattern => pattern.split('\\').join('/') @@ -49,6 +50,7 @@ class LogFiles { return format(...args) .split(/\r?\n/) + .map(Display.clean) .reduce((lines, line) => lines += prefix + (line ? ' ' : '') + line + os.EOL, '' diff --git a/deps/npm/lib/utils/open-url-prompt.js b/deps/npm/lib/utils/open-url-prompt.js index df0c9709c07744..71a68c253c0505 100644 --- a/deps/npm/lib/utils/open-url-prompt.js +++ b/deps/npm/lib/utils/open-url-prompt.js @@ -1,5 +1,5 @@ const readline = require('readline') -const promiseSpawn = require('@npmcli/promise-spawn') +const open = require('./open-url.js') function print (npm, title, url) { const json = npm.config.get('json') @@ -63,8 +63,7 @@ const promptOpen = async (npm, url, title, prompt, emitter) => { return } - const command = browser === true ? null : browser - await promiseSpawn.open(url, { command }) + await open(npm, url, 'Browser unavailable. Please open the URL manually') } module.exports = promptOpen diff --git a/deps/npm/lib/utils/reify-output.js b/deps/npm/lib/utils/reify-output.js index 22036dc8110cfc..3b79fc2be1898e 100644 --- a/deps/npm/lib/utils/reify-output.js +++ b/deps/npm/lib/utils/reify-output.js @@ -76,7 +76,7 @@ const reifyOutput = (npm, arb) => { summary.audit = npm.command === 'audit' ? auditReport : auditReport.toJSON().metadata } - npm.output(JSON.stringify(summary, 0, 2)) + npm.output(JSON.stringify(summary, null, 2)) } else { packagesChangedMessage(npm, summary) packagesFundingMessage(npm, summary) diff --git a/deps/npm/lib/utils/sbom-spdx.js b/deps/npm/lib/utils/sbom-spdx.js index 8c91147cb4102b..fdddd8944f32d1 100644 --- a/deps/npm/lib/utils/sbom-spdx.js +++ b/deps/npm/lib/utils/sbom-spdx.js @@ -11,10 +11,10 @@ const SPDX_IDENTIFER = 'SPDXRef-DOCUMENT' const NO_ASSERTION = 'NOASSERTION' const REL_DESCRIBES = 'DESCRIBES' -const REL_PREREQ = 'HAS_PREREQUISITE' +const REL_PREREQ = 'PREREQUISITE_FOR' const REL_OPTIONAL = 'OPTIONAL_DEPENDENCY_OF' const REL_DEV = 'DEV_DEPENDENCY_OF' -const REL_DEP = 'DEPENDS_ON' +const REL_DEP = 'DEPENDENCY_OF' const REF_CAT_PACKAGE_MANAGER = 'PACKAGE-MANAGER' const REF_TYPE_PURL = 'purl' @@ -147,8 +147,8 @@ const toSpdxRelationship = (node, edge) => { } return { - spdxElementId: toSpdxID(node), - relatedSpdxElement: toSpdxID(edge.to), + spdxElementId: toSpdxID(edge.to), + relatedSpdxElement: toSpdxID(node), relationshipType: type, } } diff --git a/deps/npm/lib/utils/update-notifier.js b/deps/npm/lib/utils/update-notifier.js index 2c839bfeff8436..1b3e21d878b94d 100644 --- a/deps/npm/lib/utils/update-notifier.js +++ b/deps/npm/lib/utils/update-notifier.js @@ -24,6 +24,7 @@ const updateCheck = async (npm, spec, version, current) => { // always prefer latest, even if doing --tag=whatever on the cmd defaultTag: 'latest', ...npm.flatOptions, + cache: false, }).catch(() => null) // if pacote failed, give up diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 53fd371f3a67e2..01e92a1f4ede44 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "November 2023" "" "" +.TH "NPM-ACCESS" "1" "January 2024" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index 2c55c56074528d..48ac7056d06ad9 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "November 2023" "" "" +.TH "NPM-ADDUSER" "1" "January 2024" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index c3b84eef16325b..10f382f9645c4f 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "November 2023" "" "" +.TH "NPM-AUDIT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index 16a65feaa6d038..a885517d062034 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "November 2023" "" "" +.TH "NPM-BUGS" "1" "January 2024" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index 8f303d21c2c455..2de4d40db0efb3 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "November 2023" "" "" +.TH "NPM-CACHE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 4d7a7a0c658742..4f28c62b79f9e5 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "November 2023" "" "" +.TH "NPM-CI" "1" "January 2024" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index 2597a926a124ac..33afabe5da5380 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "November 2023" "" "" +.TH "NPM-COMPLETION" "1" "January 2024" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index 02abc5c9d8f1d7..469bb97e36f668 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "November 2023" "" "" +.TH "NPM-CONFIG" "1" "January 2024" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 6ed4db0a520e0d..0ffc5c2fe43c36 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "November 2023" "" "" +.TH "NPM-DEDUPE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index faea73b948544f..fe2e7d0958b8dd 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "November 2023" "" "" +.TH "NPM-DEPRECATE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 217e2ebf607019..7e07d60da652d4 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "November 2023" "" "" +.TH "NPM-DIFF" "1" "January 2024" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index 668b8963a57384..c89c03b4e0307b 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "November 2023" "" "" +.TH "NPM-DIST-TAG" "1" "January 2024" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index 0f0d0f731b9bda..2c15c6f78838e2 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "November 2023" "" "" +.TH "NPM-DOCS" "1" "January 2024" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index 4d2c2aa00f4e0f..16c1d1d841ad7d 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "November 2023" "" "" +.TH "NPM-DOCTOR" "1" "January 2024" "" "" .SH "NAME" \fBnpm-doctor\fR - Check the health of your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index cc1e9b5c8ede7f..23a92b8a4d3ae0 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "November 2023" "" "" +.TH "NPM-EDIT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 8cb1010ffb552a..53512a4daecc74 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "November 2023" "" "" +.TH "NPM-EXEC" "1" "January 2024" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 6070bb24a24386..ec20107b04e06c 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "November 2023" "" "" +.TH "NPM-EXPLAIN" "1" "January 2024" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 27371aa6c9e9e7..f6bb2c51f747d6 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "November 2023" "" "" +.TH "NPM-EXPLORE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index d27a252baaca8d..b56814b8d2ac68 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "November 2023" "" "" +.TH "NPM-FIND-DUPES" "1" "January 2024" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 88bf7caa0d5d83..a9c1ae61f5ef4c 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "November 2023" "" "" +.TH "NPM-FUND" "1" "January 2024" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index b98088152314b8..9396188aa721d3 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "November 2023" "" "" +.TH "NPM-HELP-SEARCH" "1" "January 2024" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 949e54c44eab08..e3b322850a728a 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "November 2023" "" "" +.TH "NPM-HELP" "1" "January 2024" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index 145406b9d09877..c544b23a94a89f 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "November 2023" "" "" +.TH "NPM-HOOK" "1" "January 2024" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index a86a09b910a810..ce674e7d610308 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM-INIT" "1" "November 2023" "" "" +.TH "NPM-INIT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 21320f9378b7d8..cfb93cc754a5a1 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "November 2023" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "January 2024" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 3e6efb819bc6e2..9a7b69ff8a4571 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "November 2023" "" "" +.TH "NPM-INSTALL-TEST" "1" "January 2024" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" @@ -257,6 +257,16 @@ Type: null or String .P Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR. +.SS "\fBlibc\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json .SS "\fBworkspace\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 0493f5dc9577f0..bd1c0ded701450 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "November 2023" "" "" +.TH "NPM-INSTALL" "1" "January 2024" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -619,6 +619,16 @@ Type: null or String .P Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR. +.SS "\fBlibc\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json .SS "\fBworkspace\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 491cc6e45f066d..3403d178525184 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "November 2023" "" "" +.TH "NPM-LINK" "1" "January 2024" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index dff4c54b206299..f3d4934c85b20e 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "November 2023" "" "" +.TH "NPM-LOGIN" "1" "January 2024" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index f4e3525a9a43ad..b8f521280db4cb 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "November 2023" "" "" +.TH "NPM-LOGOUT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 7b8ea0771e1a43..5e314fe40308b0 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "November 2023" "" "" +.TH "NPM-LS" "1" "January 2024" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@10.2.4 /path/to/npm +npm@10.3.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 182f84af28895f..c4666d623d5f69 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "November 2023" "" "" +.TH "NPM-ORG" "1" "January 2024" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index 84351aeb35d4b9..186dfaeade934f 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "November 2023" "" "" +.TH "NPM-OUTDATED" "1" "January 2024" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index ceda18a6c31758..02afd6627f2c04 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "November 2023" "" "" +.TH "NPM-OWNER" "1" "January 2024" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index 050c208ffd64e8..075f61681ec84d 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "November 2023" "" "" +.TH "NPM-PACK" "1" "January 2024" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 6c6e1e28281664..4abefd19837e32 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "November 2023" "" "" +.TH "NPM-PING" "1" "January 2024" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 40b0329fef13f1..c70c95fc27d1ab 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "November 2023" "" "" +.TH "NPM-PKG" "1" "January 2024" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index a0ab1560881243..dabc60e7e6ef25 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "November 2023" "" "" +.TH "NPM-PREFIX" "1" "January 2024" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 834c26428d94b8..2966cf66d1c3c0 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "November 2023" "" "" +.TH "NPM-PROFILE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 32f57580e3fb49..af09d9962f7c93 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "November 2023" "" "" +.TH "NPM-PRUNE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 5d8444e2fa2d04..deaead3259a2e1 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "November 2023" "" "" +.TH "NPM-PUBLISH" "1" "January 2024" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" @@ -14,7 +14,7 @@ Publishes a package to the registry so that it can be installed by name. .P By default npm will publish to the public registry. This can be overridden by specifying a different default registry or using a npm help scope in the name, combined with a scope-configured registry (see \fB\fBpackage.json\fR\fR \fI\(la/configuring-npm/package-json\(ra\fR). .P -A \fBpackage\fR is interpreted the same way as other commands (like \fBnpm install\fR and can be: +A \fBpackage\fR is interpreted the same way as other commands (like \fBnpm install\fR) and can be: .RS 0 .IP \(bu 4 a) a folder containing a program described by a \fB\fBpackage.json\fR\fR \fI\(la/configuring-npm/package-json\(ra\fR file diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 947a7b78397571..7730ecea472b6c 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "November 2023" "" "" +.TH "NPM-QUERY" "1" "January 2024" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index b00eb2816eb116..f3e0c3ff4c6f0e 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "November 2023" "" "" +.TH "NPM-REBUILD" "1" "January 2024" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 1d89a9e85c7105..1da82d9b3b52f1 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "November 2023" "" "" +.TH "NPM-REPO" "1" "January 2024" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index 5f85577a45ac12..459d99a14026c4 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "November 2023" "" "" +.TH "NPM-RESTART" "1" "January 2024" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 1dce718fc0f961..6761a6608a4f5b 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "November 2023" "" "" +.TH "NPM-ROOT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index 3718c9293d45a1..66fcf9fab78ba2 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "November 2023" "" "" +.TH "NPM-RUN-SCRIPT" "1" "January 2024" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1 index 21a18ac91dc953..b89c3ff35dc049 100644 --- a/deps/npm/man/man1/npm-sbom.1 +++ b/deps/npm/man/man1/npm-sbom.1 @@ -1,4 +1,4 @@ -.TH "NPM-SBOM" "1" "November 2023" "" "" +.TH "NPM-SBOM" "1" "January 2024" "" "" .SH "NAME" \fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM) .SS "Synopsis" @@ -256,7 +256,7 @@ Type: "library", "application", or "framework" .RE 0 .P -The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR fieled. For CycloneDX, this is the value for the \fBtype\fR field. +The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR field. For CycloneDX, this is the value for the \fBtype\fR field. .SS "\fBworkspace\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 5aaeabd24af938..d8ffc4bbe4e8e2 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "November 2023" "" "" +.TH "NPM-SEARCH" "1" "January 2024" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 7e29d4151728c3..d7df87411550f5 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "November 2023" "" "" +.TH "NPM-SHRINKWRAP" "1" "January 2024" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index b6a3f7e8f174a9..610ec54213fe62 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "November 2023" "" "" +.TH "NPM-STAR" "1" "January 2024" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index ad66d694917f68..eb2391b790d625 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "November 2023" "" "" +.TH "NPM-STARS" "1" "January 2024" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index f35690a145e58c..a4cf36fdcacb75 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "November 2023" "" "" +.TH "NPM-START" "1" "January 2024" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 2913641d2845d3..8fe1df7fe1f190 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "November 2023" "" "" +.TH "NPM-STOP" "1" "January 2024" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index 729e9d3194dd49..c61904afc1cc82 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "November 2023" "" "" +.TH "NPM-TEAM" "1" "January 2024" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index bfecac60a9a113..865dfc5ea794ef 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "November 2023" "" "" +.TH "NPM-TEST" "1" "January 2024" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 84e0e8ea0220a3..0a42bd48c5fcbb 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "November 2023" "" "" +.TH "NPM-TOKEN" "1" "January 2024" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 749ae7533a4f5f..13be7457cf72f5 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "November 2023" "" "" +.TH "NPM-UNINSTALL" "1" "January 2024" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index 0b059350966a00..12a2fd78133444 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "November 2023" "" "" +.TH "NPM-UNPUBLISH" "1" "January 2024" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" @@ -19,7 +19,9 @@ This removes a package version from the registry, deleting its entry and removin .P The npm registry will return an error if you are not npm help "logged in". .P -If you do not specify a version or if you remove all of a package's versions then the registry will remove the root package entry entirely. +If you do not specify a package name at all, the name and version to be unpublished will be pulled from the project in the current directory. +.P +If you specify a package name but do not specify a version or if you remove all of a package's versions then the registry will remove the root package entry entirely. .P Even if you unpublish a package version, that specific name and version combination can never be reused. In order to publish the package again, you must use a new version number. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed. .SS "Configuration" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index 0e44709c83da65..8fdc2941fb60f1 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "November 2023" "" "" +.TH "NPM-UNSTAR" "1" "January 2024" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index 0b247f80d8d9f8..742fbe7f7547af 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "November 2023" "" "" +.TH "NPM-UPDATE" "1" "January 2024" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index 07f8352de3c7ce..f4a976fafa39d9 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "November 2023" "" "" +.TH "NPM-VERSION" "1" "January 2024" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index 70c435cdc929c6..7ce90e336e3154 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "November 2023" "" "" +.TH "NPM-VIEW" "1" "January 2024" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 6e182bbff873c2..bead7976024f53 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "November 2023" "" "" +.TH "NPM-WHOAMI" "1" "January 2024" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 05d06e9f7758a4..a1844237f29f23 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "November 2023" "" "" +.TH "NPM" "1" "January 2024" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -10.2.4 +10.3.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index d318fa2a6ea6c6..44833d0c7a575c 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "November 2023" "" "" +.TH "NPX" "1" "January 2024" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" @@ -128,7 +128,7 @@ The \fB--ignore-existing\fR option is removed. Locally installed bins are always .IP \(bu 4 The \fB--npm\fR option is removed. \fBnpx\fR will always use the \fBnpm\fR it ships with. .IP \(bu 4 -The \fB--node-arg\fR and \fB-n\fR options are removed. +The \fB--node-arg\fR and \fB-n\fR options have been removed. Use \fB\fBNODE_OPTIONS\fR\fR \fI\(lahttps://nodejs.org/api/cli.html#node_optionsoptions\(ra\fR instead: e.g., \fBNODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true\fR .IP \(bu 4 The \fB--always-spawn\fR option is redundant, and thus removed. .IP \(bu 4 diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index f6248235a2e9bd..73d1862bab4fb1 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "November 2023" "" "" +.TH "FOLDERS" "5" "January 2024" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index 5b32048710b9e0..0e3d7170254218 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "November 2023" "" "" +.TH "INSTALL" "5" "January 2024" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index f6248235a2e9bd..73d1862bab4fb1 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "November 2023" "" "" +.TH "FOLDERS" "5" "January 2024" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index 8054048290285e..eb82f2539897a8 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "November 2023" "" "" +.TH "PACKAGE.JSON" "5" "January 2024" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -263,40 +263,63 @@ The file(s) in the "bin" field .P \fBREADME\fR & \fBLICENSE\fR can have any case and extension. .P -Conversely, some files are always ignored: +Some files are always ignored by default: .RS 0 .IP \(bu 4 -\fB.git\fR +\fB*.orig\fR .IP \(bu 4 -\fBCVS\fR +\fB.*.swp\fR .IP \(bu 4 -\fB.svn\fR +\fB.DS_Store\fR +.IP \(bu 4 +\fB._*\fR +.IP \(bu 4 +\fB.git\fR .IP \(bu 4 \fB.hg\fR .IP \(bu 4 \fB.lock-wscript\fR .IP \(bu 4 +\fB.npmrc\fR +.IP \(bu 4 +\fB.svn\fR +.IP \(bu 4 \fB.wafpickle-N\fR .IP \(bu 4 -\fB.*.swp\fR +\fBCVS\fR .IP \(bu 4 -\fB.DS_Store\fR +\fBconfig.gypi\fR .IP \(bu 4 -\fB._*\fR +\fBnode_modules\fR .IP \(bu 4 \fBnpm-debug.log\fR .IP \(bu 4 +\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published) +.IP \(bu 4 +\fBpnpm-lock.yaml\fR +.IP \(bu 4 +\fByarn.lock\fR +.RE 0 + +.P +Most of these ignored files can be included specifically if included in the \fBfiles\fR globs. Exceptions to this are: +.RS 0 +.IP \(bu 4 +\fB.git\fR +.IP \(bu 4 \fB.npmrc\fR .IP \(bu 4 \fBnode_modules\fR .IP \(bu 4 -\fBconfig.gypi\fR +\fBpackage-lock.json\fR .IP \(bu 4 -\fB*.orig\fR +\fBpnpm-lock.yaml\fR .IP \(bu 4 -\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published) +\fByarn.lock\fR .RE 0 +.P +These can not be included. .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index d1f8b923c2c0de..5a18abb999e0ff 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "November 2023" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "January 2024" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 2fe1d3eeba7cc7..4af73a99fea2cd 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "November 2023" "" "" +.TH "NPMRC" "5" "January 2024" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" @@ -13,13 +13,13 @@ For a list of available configuration options, see npm help config. The four relevant files are: .RS 0 .IP \(bu 4 -per-project config file (/path/to/my/project/.npmrc) +per-project config file (\fB/path/to/my/project/.npmrc\fR) .IP \(bu 4 -per-user config file (~/.npmrc) +per-user config file (\fB~/.npmrc\fR) .IP \(bu 4 -global config file ($PREFIX/etc/npmrc) +global config file (\fB$PREFIX/etc/npmrc\fR) .IP \(bu 4 -npm builtin config file (/path/to/npm/npmrc) +npm builtin config file (\fB/path/to/npm/npmrc\fR) .RE 0 .P diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 8054048290285e..eb82f2539897a8 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "November 2023" "" "" +.TH "PACKAGE.JSON" "5" "January 2024" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -263,40 +263,63 @@ The file(s) in the "bin" field .P \fBREADME\fR & \fBLICENSE\fR can have any case and extension. .P -Conversely, some files are always ignored: +Some files are always ignored by default: .RS 0 .IP \(bu 4 -\fB.git\fR +\fB*.orig\fR .IP \(bu 4 -\fBCVS\fR +\fB.*.swp\fR .IP \(bu 4 -\fB.svn\fR +\fB.DS_Store\fR +.IP \(bu 4 +\fB._*\fR +.IP \(bu 4 +\fB.git\fR .IP \(bu 4 \fB.hg\fR .IP \(bu 4 \fB.lock-wscript\fR .IP \(bu 4 +\fB.npmrc\fR +.IP \(bu 4 +\fB.svn\fR +.IP \(bu 4 \fB.wafpickle-N\fR .IP \(bu 4 -\fB.*.swp\fR +\fBCVS\fR .IP \(bu 4 -\fB.DS_Store\fR +\fBconfig.gypi\fR .IP \(bu 4 -\fB._*\fR +\fBnode_modules\fR .IP \(bu 4 \fBnpm-debug.log\fR .IP \(bu 4 +\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published) +.IP \(bu 4 +\fBpnpm-lock.yaml\fR +.IP \(bu 4 +\fByarn.lock\fR +.RE 0 + +.P +Most of these ignored files can be included specifically if included in the \fBfiles\fR globs. Exceptions to this are: +.RS 0 +.IP \(bu 4 +\fB.git\fR +.IP \(bu 4 \fB.npmrc\fR .IP \(bu 4 \fBnode_modules\fR .IP \(bu 4 -\fBconfig.gypi\fR +\fBpackage-lock.json\fR .IP \(bu 4 -\fB*.orig\fR +\fBpnpm-lock.yaml\fR .IP \(bu 4 -\fBpackage-lock.json\fR (use \fB\fBnpm-shrinkwrap.json\fR\fR \fI\(la/configuring-npm/npm-shrinkwrap-json\(ra\fR if you wish it to be published) +\fByarn.lock\fR .RE 0 +.P +These can not be included. .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 2f303d610ff127..6172a8ba0e8340 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "November 2023" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "January 2024" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 73552f74e29746..308e1632b43215 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "November 2023" "" "" +.TH "CONFIG" "7" "January 2024" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" @@ -865,6 +865,16 @@ If a package cannot be installed because of overly strict \fBpeerDependencies\fR This differs from \fB--omit=peer\fR, in that \fB--omit=peer\fR will avoid unpacking \fBpeerDependencies\fR on disk, but will still design a tree such that \fBpeerDependencies\fR \fIcould\fR be unpacked in a correct place. .P Use of \fBlegacy-peer-deps\fR is not recommended, as it will not enforce the \fBpeerDependencies\fR contract that meta-dependencies may rely on. +.SS "\fBlibc\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override libc of native modules to install. Acceptable values are same as \fBlibc\fR field of package.json .SS "\fBlink\fR" .RS 0 .IP \(bu 4 @@ -1374,7 +1384,7 @@ Type: "library", "application", or "framework" .RE 0 .P -The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR fieled. For CycloneDX, this is the value for the \fBtype\fR field. +The type of package described by the generated SBOM. For SPDX, this is the value for the \fBprimaryPackagePurpose\fR field. For CycloneDX, this is the value for the \fBtype\fR field. .SS "\fBscope\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index 78aaa5a18a9fff..489f33f3f38a3b 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "November 2023" "" "" +.TH "QUERYING" "7" "January 2024" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index 9516ee1030c22a..b259049ab72e5c 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "November 2023" "" "" +.TH "DEVELOPERS" "7" "January 2024" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index a6fb699796ea5d..c0340e820fa731 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "November 2023" "" "" +.TH "LOGGING" "7" "January 2024" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index 2f282bc29165de..b37092693719f6 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "November 2023" "" "" +.TH "ORGS" "7" "January 2024" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 6c38ccdcbf8434..d189b6273127d6 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "November 2023" "" "" +.TH "PACKAGE-SPEC" "7" "January 2024" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index a1dc9352fc706b..08a7778a18d54d 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "November 2023" "" "" +.TH "REGISTRY" "7" "January 2024" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 0047ac99fd5832..af0e2320303c8d 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "November 2023" "" "" +.TH "REMOVAL" "7" "January 2024" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index b7c6d0104ac3b8..7a753f73aec1dc 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "November 2023" "" "" +.TH "SCOPE" "7" "January 2024" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 881e8202e086f3..60f252bd137b62 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "November 2023" "" "" +.TH "SCRIPTS" "7" "January 2024" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index a81dd84a928fb0..4d261469a1835f 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "November 2023" "" "" +.TH "WORKSPACES" "7" "January 2024" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 8c4e148464d33a..def00dc74f039c 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -333,7 +333,7 @@ module.exports = cls => class ActualLoader extends cls { async #loadFSTree (node) { const did = this.#actualTreeLoaded - if (!did.has(node.target.realpath)) { + if (!node.isLink && !did.has(node.target.realpath)) { did.add(node.target.realpath) await this.#loadFSChildren(node.target) return Promise.all( diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 0981afdae6ece7..7ce3bc2a9db1dd 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -628,7 +628,7 @@ module.exports = cls => class Reifier extends cls { process.emit('time', timer) this.addTracker('reify', node.name, node.location) - const { npmVersion, nodeVersion, cpu, os } = this.options + const { npmVersion, nodeVersion, cpu, os, libc } = this.options const p = Promise.resolve().then(async () => { // when we reify an optional node, check the engine and platform // first. be sure to ignore the --force and --engine-strict flags, @@ -638,7 +638,7 @@ module.exports = cls => class Reifier extends cls { // eslint-disable-next-line promise/always-return if (node.optional) { checkEngine(node.package, npmVersion, nodeVersion, false) - checkPlatform(node.package, false, { cpu, os }) + checkPlatform(node.package, false, { cpu, os, libc }) } await this[_checkBins](node) await this[_extractOrLink](node) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js index a0fda5a4b567a9..e6525ffe67b65d 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -48,7 +48,7 @@ const { resolve, basename, relative } = require('path') const specFromLock = require('./spec-from-lock.js') const versionFromTgz = require('./version-from-tgz.js') const npa = require('npm-package-arg') -const rpj = require('read-package-json-fast') +const pkgJson = require('@npmcli/package-json') const parseJSON = require('parse-conflict-json') const stringify = require('json-stringify-nice') @@ -81,28 +81,6 @@ const relpath = require('./relpath.js') const consistentResolve = require('./consistent-resolve.js') const { overrideResolves } = require('./override-resolves.js') -const maybeReadFile = file => { - return readFile(file, 'utf8').then(d => d, er => { - /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') { - return '' - } else { - throw er - } - }) -} - -const maybeStatFile = file => { - return stat(file).then(st => st.isFile(), er => { - /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') { - return null - } else { - throw er - } - }) -} - const pkgMetaKeys = [ // note: name is included if necessary, for alias packages 'version', @@ -134,81 +112,72 @@ const nodeMetaKeys = [ const metaFieldFromPkg = (pkg, key) => { const val = pkg[key] - // get the license type, not an object - return (key === 'license' && val && typeof val === 'object' && val.type) - ? val.type + if (val) { + // get only the license type, not the full object + if (key === 'license' && typeof val === 'object' && val.type) { + return val.type + } // skip empty objects and falsey values - : (val && !(typeof val === 'object' && !Object.keys(val).length)) ? val - : null + if (typeof val !== 'object' || Object.keys(val).length) { + return val + } + } + return null } -// check to make sure that there are no packages newer than the hidden lockfile -const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => { +// check to make sure that there are no packages newer than or missing from the hidden lockfile +const assertNoNewer = async (path, data, lockTime, dir, seen) => { const base = basename(dir) const isNM = dir !== path && base === 'node_modules' - const isScope = dir !== path && !isNM && base.charAt(0) === '@' - const isParent = dir === path || isNM || isScope + const isScope = dir !== path && base.startsWith('@') + const isParent = (dir === path) || isNM || isScope + const parent = isParent ? dir : resolve(dir, 'node_modules') const rel = relpath(path, dir) - if (dir !== path) { - const dirTime = (await stat(dir)).mtime + seen.add(rel) + let entries + if (dir === path) { + entries = [{ name: 'node_modules', isDirectory: () => true }] + } else { + const { mtime: dirTime } = await stat(dir) if (dirTime > lockTime) { - throw 'out of date, updated: ' + rel + throw new Error(`out of date, updated: ${rel}`) } if (!isScope && !isNM && !data.packages[rel]) { - throw 'missing from lockfile: ' + rel + throw new Error(`missing from lockfile: ${rel}`) } - seen.add(rel) - } else { - seen = new Set([rel]) + entries = await readdir(parent, { withFileTypes: true }).catch(() => []) } - const parent = isParent ? dir : resolve(dir, 'node_modules') - const children = dir === path - ? Promise.resolve([{ name: 'node_modules', isDirectory: () => true }]) - : readdir(parent, { withFileTypes: true }) - - const ents = await children.catch(() => []) - await Promise.all(ents.map(async ent => { - const child = resolve(parent, ent.name) - if (ent.isDirectory() && !/^\./.test(ent.name)) { + // TODO limit concurrency here, this is recursive + await Promise.all(entries.map(async dirent => { + const child = resolve(parent, dirent.name) + if (dirent.isDirectory() && !dirent.name.startsWith('.')) { await assertNoNewer(path, data, lockTime, child, seen) - } else if (ent.isSymbolicLink()) { + } else if (dirent.isSymbolicLink()) { const target = resolve(parent, await readlink(child)) const tstat = await stat(target).catch( /* istanbul ignore next - windows */ () => null) seen.add(relpath(path, child)) /* istanbul ignore next - windows cannot do this */ - if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) { + if (tstat?.isDirectory() && !seen.has(relpath(path, target))) { await assertNoNewer(path, data, lockTime, target, seen) } } })) + if (dir !== path) { return } // assert that all the entries in the lockfile were seen - for (const loc of new Set(Object.keys(data.packages))) { + for (const loc in data.packages) { if (!seen.has(loc)) { - throw 'missing from node_modules: ' + loc + throw new Error(`missing from node_modules: ${loc}`) } } } -const _awaitingUpdate = Symbol('_awaitingUpdate') -const _updateWaitingNode = Symbol('_updateWaitingNode') -const _lockFromLoc = Symbol('_lockFromLoc') -const _pathToLoc = Symbol('_pathToLoc') -const _loadAll = Symbol('_loadAll') -const _metaFromLock = Symbol('_metaFromLock') -const _resolveMetaNode = Symbol('_resolveMetaNode') -const _fixDependencies = Symbol('_fixDependencies') -const _buildLegacyLockfile = Symbol('_buildLegacyLockfile') -const _filenameSet = Symbol('_filenameSet') -const _maybeRead = Symbol('_maybeRead') -const _maybeStat = Symbol('_maybeStat') - class Shrinkwrap { static get defaultLockfileVersion () { return defaultLockfileVersion @@ -228,13 +197,18 @@ class Shrinkwrap { const s = new Shrinkwrap(options) s.reset() - const [sw, lock] = await s[_maybeStat]() + const [sw, lock] = await s.resetFiles - s.filename = resolve(s.path, - (s.hiddenLockfile ? 'node_modules/.package-lock' - : s.shrinkwrapOnly || sw ? 'npm-shrinkwrap' - : 'package-lock') + '.json') + // XXX this is duplicated in this.load(), but using loadFiles instead of resetFiles + if (s.hiddenLockfile) { + s.filename = resolve(s.path, 'node_modules/.package-lock.json') + } else if (s.shrinkwrapOnly || sw) { + s.filename = resolve(s.path, 'npm-shrinkwrap.json') + } else { + s.filename = resolve(s.path, 'package-lock.json') + } s.loadedFromDisk = !!(sw || lock) + // TODO what uses this? s.type = basename(s.filename) return s @@ -249,12 +223,12 @@ class Shrinkwrap { } const meta = {} - pkgMetaKeys.forEach(key => { + for (const key of pkgMetaKeys) { const val = metaFieldFromPkg(node.package, key) if (val) { meta[key.replace(/^_/, '')] = val } - }) + } // we only include name if different from the node path name, and for the // root to help prevent churn based on the name of the directory the // project is in @@ -267,11 +241,11 @@ class Shrinkwrap { meta.devDependencies = node.package.devDependencies } - nodeMetaKeys.forEach(key => { + for (const key of nodeMetaKeys) { if (node[key]) { meta[key] = node[key] } - }) + } const resolved = consistentResolve(node.resolved, node.path, path, true) // hide resolved from registry dependencies. @@ -302,6 +276,8 @@ class Shrinkwrap { return meta } + #awaitingUpdate = new Map() + constructor (options = {}) { const { path, @@ -313,11 +289,14 @@ class Shrinkwrap { resolveOptions = {}, } = options - this.lockfileVersion = hiddenLockfile ? 3 - : lockfileVersion ? parseInt(lockfileVersion, 10) - : null + if (hiddenLockfile) { + this.lockfileVersion = 3 + } else if (lockfileVersion) { + this.lockfileVersion = parseInt(lockfileVersion, 10) + } else { + this.lockfileVersion = null + } - this[_awaitingUpdate] = new Map() this.tree = null this.path = resolve(path || '.') this.filename = null @@ -354,9 +333,12 @@ class Shrinkwrap { // don't use the simple version if the "registry" url is // something else entirely! const tgz = isReg && versionFromTgz(spec.name, resolved) || {} - const yspec = tgz.name === spec.name && tgz.version === version ? version - : isReg && tgz.name && tgz.version ? `npm:${tgz.name}@${tgz.version}` - : resolved + let yspec = resolved + if (tgz.name === spec.name && tgz.version === version) { + yspec = version + } else if (isReg && tgz.name && tgz.version) { + yspec = `npm:${tgz.name}@${tgz.version}` + } if (yspec) { options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') options.integrity = integrity @@ -370,7 +352,7 @@ class Shrinkwrap { // still worth doing a load() first so we know which files to write. reset () { this.tree = null - this[_awaitingUpdate] = new Map() + this.#awaitingUpdate = new Map() const lockfileVersion = this.lockfileVersion || defaultLockfileVersion this.originalLockfileVersion = lockfileVersion @@ -382,58 +364,83 @@ class Shrinkwrap { } } - [_filenameSet] () { - return this.shrinkwrapOnly ? [ - this.path + '/npm-shrinkwrap.json', - ] : this.hiddenLockfile ? [ - null, - this.path + '/node_modules/.package-lock.json', - ] : [ - this.path + '/npm-shrinkwrap.json', - this.path + '/package-lock.json', - this.path + '/yarn.lock', + // files to potentially read from and write to, in order of priority + get #filenameSet () { + if (this.shrinkwrapOnly) { + return [`${this.path}/npm-shrinkwrap.json`] + } + if (this.hiddenLockfile) { + return [`${this.path}/node_modules/.package-lock.json`] + } + return [ + `${this.path}/npm-shrinkwrap.json`, + `${this.path}/package-lock.json`, + `${this.path}/yarn.lock`, ] } - [_maybeRead] () { - return Promise.all(this[_filenameSet]().map(fn => fn && maybeReadFile(fn))) + get loadFiles () { + return Promise.all( + this.#filenameSet.map(file => file && readFile(file, 'utf8').then(d => d, er => { + /* istanbul ignore else - can't test without breaking module itself */ + if (er.code === 'ENOENT') { + return '' + } else { + throw er + } + })) + ) } - [_maybeStat] () { - // throw away yarn, we only care about lock or shrinkwrap when checking + get resetFiles () { + // slice out yarn, we only care about lock or shrinkwrap when checking // this way, since we're not actually loading the full lock metadata - return Promise.all(this[_filenameSet]().slice(0, 2) - .map(fn => fn && maybeStatFile(fn))) + return Promise.all(this.#filenameSet.slice(0, 2) + .map(file => file && stat(file).then(st => st.isFile(), er => { + /* istanbul ignore else - can't test without breaking module itself */ + if (er.code === 'ENOENT') { + return null + } else { + throw er + } + }) + ) + ) } inferFormattingOptions (packageJSONData) { - // don't use detect-indent, just pick the first line. - // if the file starts with {" then we have an indent of '', ie, none - // which will default to 2 at save time. const { [Symbol.for('indent')]: indent, [Symbol.for('newline')]: newline, } = packageJSONData - this.indent = indent !== undefined ? indent : this.indent - this.newline = newline !== undefined ? newline : this.newline + if (indent !== undefined) { + this.indent = indent + } + if (newline !== undefined) { + this.newline = newline + } } async load () { // we don't need to load package-lock.json except for top of tree nodes, // only npm-shrinkwrap.json. - return this[_maybeRead]().then(([sw, lock, yarn]) => { - const data = sw || lock || '' + let data + try { + const [sw, lock, yarn] = await this.loadFiles + data = sw || lock || '{}' // use shrinkwrap only for deps, otherwise prefer package-lock // and ignore npm-shrinkwrap if both are present. // TODO: emit a warning here or something if both are present. - this.filename = resolve(this.path, - (this.hiddenLockfile ? 'node_modules/.package-lock' - : this.shrinkwrapOnly || sw ? 'npm-shrinkwrap' - : 'package-lock') + '.json') - + if (this.hiddenLockfile) { + this.filename = resolve(this.path, 'node_modules/.package-lock.json') + } else if (this.shrinkwrapOnly || sw) { + this.filename = resolve(this.path, 'npm-shrinkwrap.json') + } else { + this.filename = resolve(this.path, 'package-lock.json') + } this.type = basename(this.filename) - this.loadedFromDisk = !!data + this.loadedFromDisk = Boolean(sw || lock) if (yarn) { this.yarnLock = new YarnLock() @@ -445,85 +452,84 @@ class Shrinkwrap { } } - return data ? parseJSON(data) : {} - }).then(async data => { + data = parseJSON(data) this.inferFormattingOptions(data) - if (!this.hiddenLockfile || !data.packages) { - return data + if (this.hiddenLockfile && data.packages) { + // add a few ms just to account for jitter + const lockTime = +(await stat(this.filename)).mtime + 10 + await assertNoNewer(this.path, data, lockTime, this.path, new Set()) } - // add a few ms just to account for jitter - const lockTime = +(await stat(this.filename)).mtime + 10 - await assertNoNewer(this.path, data, lockTime) - // all good! hidden lockfile is the newest thing in here. - return data - }).catch(er => { + } catch (er) { /* istanbul ignore else */ if (typeof this.filename === 'string') { const rel = relpath(this.path, this.filename) - log.verbose('shrinkwrap', `failed to load ${rel}`, er) + log.verbose('shrinkwrap', `failed to load ${rel}`, er.message) } else { - log.verbose('shrinkwrap', `failed to load ${this.path}`, er) + log.verbose('shrinkwrap', `failed to load ${this.path}`, er.message) } this.loadingError = er this.loadedFromDisk = false this.ancientLockfile = false - return {} - }).then(lock => { - // auto convert v1 lockfiles to v3 - // leave v2 in place unless configured - // v3 by default - const lockfileVersion = - this.lockfileVersion ? this.lockfileVersion - : lock.lockfileVersion === 1 ? defaultLockfileVersion - : lock.lockfileVersion || defaultLockfileVersion - - this.data = { - ...lock, - lockfileVersion: lockfileVersion, - requires: true, - packages: lock.packages || {}, - dependencies: lock.dependencies || {}, - } + data = {} + } + // auto convert v1 lockfiles to v3 + // leave v2 in place unless configured + // v3 by default + let lockfileVersion = defaultLockfileVersion + if (this.lockfileVersion) { + lockfileVersion = this.lockfileVersion + } else if (data.lockfileVersion && data.lockfileVersion !== 1) { + lockfileVersion = data.lockfileVersion + } + + this.data = { + ...data, + lockfileVersion, + requires: true, + packages: data.packages || {}, + dependencies: data.dependencies || {}, + } - this.originalLockfileVersion = lock.lockfileVersion + this.originalLockfileVersion = data.lockfileVersion - // use default if it wasn't explicitly set, and the current file is - // less than our default. otherwise, keep whatever is in the file, - // unless we had an explicit setting already. - if (!this.lockfileVersion) { - this.lockfileVersion = this.data.lockfileVersion = lockfileVersion - } - this.ancientLockfile = this.loadedFromDisk && - !(lock.lockfileVersion >= 2) && !lock.requires - - // load old lockfile deps into the packages listing - // eslint-disable-next-line promise/always-return - if (lock.dependencies && !lock.packages) { - return rpj(this.path + '/package.json').then(pkg => pkg, er => ({})) - // eslint-disable-next-line promise/always-return - .then(pkg => { - this[_loadAll]('', null, this.data) - this[_fixDependencies](pkg) - }) + // use default if it wasn't explicitly set, and the current file is + // less than our default. otherwise, keep whatever is in the file, + // unless we had an explicit setting already. + if (!this.lockfileVersion) { + this.lockfileVersion = this.data.lockfileVersion = lockfileVersion + } + this.ancientLockfile = this.loadedFromDisk && + !(data.lockfileVersion >= 2) && !data.requires + + // load old lockfile deps into the packages listing + if (data.dependencies && !data.packages) { + let pkg + try { + pkg = await pkgJson.normalize(this.path) + pkg = pkg.content + } catch { + pkg = {} } - }) - .then(() => this) + this.#loadAll('', null, this.data) + this.#fixDependencies(pkg) + } + return this } - [_loadAll] (location, name, lock) { + #loadAll (location, name, lock) { // migrate a v1 package lock to the new format. - const meta = this[_metaFromLock](location, name, lock) + const meta = this.#metaFromLock(location, name, lock) // dependencies nested under a link are actually under the link target if (meta.link) { location = meta.resolved } if (lock.dependencies) { - for (const [name, dep] of Object.entries(lock.dependencies)) { + for (const name in lock.dependencies) { const loc = location + (location ? '/' : '') + 'node_modules/' + name - this[_loadAll](loc, name, dep) + this.#loadAll(loc, name, lock.dependencies[name]) } } } @@ -531,20 +537,20 @@ class Shrinkwrap { // v1 lockfiles track the optional/dev flags, but they don't tell us // which thing had what kind of dep on what other thing, so we need // to correct that now, or every link will be considered prod - [_fixDependencies] (pkg) { + #fixDependencies (pkg) { // we need the root package.json because legacy shrinkwraps just // have requires:true at the root level, which is even less useful // than merging all dep types into one object. const root = this.data.packages[''] - pkgMetaKeys.forEach(key => { + for (const key of pkgMetaKeys) { const val = metaFieldFromPkg(pkg, key) - const k = key.replace(/^_/, '') if (val) { - root[k] = val + root[key.replace(/^_/, '')] = val } - }) + } - for (const [loc, meta] of Object.entries(this.data.packages)) { + for (const loc in this.data.packages) { + const meta = this.data.packages[loc] if (!meta.requires || !loc) { continue } @@ -555,25 +561,30 @@ class Shrinkwrap { // This isn't perfect, but it's a pretty good approximation, and at // least gets us out of having all 'prod' edges, which throws off the // buildIdealTree process - for (const [name, spec] of Object.entries(meta.requires)) { - const dep = this[_resolveMetaNode](loc, name) + for (const name in meta.requires) { + const dep = this.#resolveMetaNode(loc, name) // this overwrites the false value set above - const depType = dep && dep.optional && !meta.optional - ? 'optionalDependencies' - : /* istanbul ignore next - dev deps are only for the root level */ - dep && dep.dev && !meta.dev ? 'devDependencies' - // also land here if the dep just isn't in the tree, which maybe - // should be an error, since it means that the shrinkwrap is - // invalid, but we can't do much better without any info. - : 'dependencies' - meta[depType] = meta[depType] || {} - meta[depType][name] = spec + // default to dependencies if the dep just isn't in the tree, which + // maybe should be an error, since it means that the shrinkwrap is + // invalid, but we can't do much better without any info. + let depType = 'dependencies' + /* istanbul ignore else - dev deps are only for the root level */ + if (dep?.optional && !meta.optional) { + depType = 'optionalDependencies' + } else if (dep?.dev && !meta.dev) { + // XXX is this even reachable? + depType = 'devDependencies' + } + if (!meta[depType]) { + meta[depType] = {} + } + meta[depType][name] = meta.requires[name] } delete meta.requires } } - [_resolveMetaNode] (loc, name) { + #resolveMetaNode (loc, name) { for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) { const check = `${path}${path ? '/' : ''}node_modules/${name}` if (this.data.packages[check]) { @@ -587,7 +598,7 @@ class Shrinkwrap { return null } - [_lockFromLoc] (lock, path, i = 0) { + #lockFromLoc (lock, path, i = 0) { if (!lock) { return null } @@ -604,12 +615,12 @@ class Shrinkwrap { return null } - return this[_lockFromLoc](lock.dependencies[path[i]], path, i + 1) + return this.#lockFromLoc(lock.dependencies[path[i]], path, i + 1) } // pass in a path relative to the root path, or an absolute path, // get back a /-normalized location based on root path. - [_pathToLoc] (path) { + #pathToLoc (path) { return relpath(this.path, resolve(this.path, path)) } @@ -617,13 +628,13 @@ class Shrinkwrap { if (!this.data) { throw new Error('run load() before getting or setting data') } - const location = this[_pathToLoc](nodePath) - this[_awaitingUpdate].delete(location) + const location = this.#pathToLoc(nodePath) + this.#awaitingUpdate.delete(location) delete this.data.packages[location] const path = location.split(/(?:^|\/)node_modules\//) const name = path.pop() - const pLock = this[_lockFromLoc](this.data, path) + const pLock = this.#lockFromLoc(this.data, path) if (pLock && pLock.dependencies) { delete pLock.dependencies[name] } @@ -634,9 +645,9 @@ class Shrinkwrap { throw new Error('run load() before getting or setting data') } - const location = this[_pathToLoc](nodePath) - if (this[_awaitingUpdate].has(location)) { - this[_updateWaitingNode](location) + const location = this.#pathToLoc(nodePath) + if (this.#awaitingUpdate.has(location)) { + this.#updateWaitingNode(location) } // first try to get from the newer spot, which we know has @@ -649,12 +660,12 @@ class Shrinkwrap { // get the node in the shrinkwrap corresponding to this spot const path = location.split(/(?:^|\/)node_modules\//) const name = path[path.length - 1] - const lock = this[_lockFromLoc](this.data, path) + const lock = this.#lockFromLoc(this.data, path) - return this[_metaFromLock](location, name, lock) + return this.#metaFromLock(location, name, lock) } - [_metaFromLock] (location, name, lock) { + #metaFromLock (location, name, lock) { // This function tries as hard as it can to figure out the metadata // from a lockfile which may be outdated or incomplete. Since v1 // lockfiles used the "version" field to contain a variety of @@ -679,7 +690,7 @@ class Shrinkwrap { // also save the link target, omitting version since we don't know // what it is, but we know it isn't a link to itself! if (!this.data.packages[target]) { - this[_metaFromLock](target, name, { ...lock, version: null }) + this.#metaFromLock(target, name, { ...lock, version: null }) } return this.data.packages[location] } @@ -799,10 +810,14 @@ class Shrinkwrap { version, } = this.get(node.path) - const pathFixed = !resolved ? null - : !/^file:/.test(resolved) ? resolved - // resolve onto the metadata path - : `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}` + let pathFixed = null + if (resolved) { + if (!/^file:/.test(resolved)) { + pathFixed = resolved + } else { + pathFixed = `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}` + } + } // if we have one, only set the other if it matches // otherwise it could be for a completely different thing. @@ -831,7 +846,7 @@ class Shrinkwrap { node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false } } - this[_awaitingUpdate].set(loc, node) + this.#awaitingUpdate.set(loc, node) } addEdge (edge) { @@ -852,10 +867,15 @@ class Shrinkwrap { } // we relativize the path here because that's how it shows up in the lock - // XXX how is this different from pathFixed above?? - const pathFixed = !node.resolved ? null - : !/file:/.test(node.resolved) ? node.resolved - : consistentResolve(node.resolved, node.path, this.path, true) + // XXX why is this different from pathFixed in this.add?? + let pathFixed = null + if (node.resolved) { + if (!/file:/.test(node.resolved)) { + pathFixed = node.resolved + } else { + pathFixed = consistentResolve(node.resolved, node.path, this.path, true) + } + } const spec = npa(`${node.name}@${edge.spec}`) const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`) @@ -875,12 +895,12 @@ class Shrinkwrap { node.resolved = node.resolved || consistentResolve(entry.resolved, this.path, node.path) || null - this[_awaitingUpdate].set(relpath(this.path, node.path), node) + this.#awaitingUpdate.set(relpath(this.path, node.path), node) } - [_updateWaitingNode] (loc) { - const node = this[_awaitingUpdate].get(loc) - this[_awaitingUpdate].delete(loc) + #updateWaitingNode (loc) { + const node = this.#awaitingUpdate.get(loc) + this.#awaitingUpdate.delete(loc) this.data.packages[loc] = Shrinkwrap.metaFromNode( node, this.path, @@ -911,9 +931,9 @@ class Shrinkwrap { this.path, this.resolveOptions) } - } else if (this[_awaitingUpdate].size > 0) { - for (const loc of this[_awaitingUpdate].keys()) { - this[_updateWaitingNode](loc) + } else if (this.#awaitingUpdate.size > 0) { + for (const loc of this.#awaitingUpdate.keys()) { + this.#updateWaitingNode(loc) } } @@ -928,7 +948,7 @@ class Shrinkwrap { delete this.data.packages[''] delete this.data.dependencies } else if (this.tree && this.lockfileVersion <= 3) { - this[_buildLegacyLockfile](this.tree, this.data) + this.#buildLegacyLockfile(this.tree, this.data) } // lf version 1 = dependencies only @@ -945,7 +965,7 @@ class Shrinkwrap { } } - [_buildLegacyLockfile] (node, lock, path = []) { + #buildLegacyLockfile (node, lock, path = []) { if (node === this.tree) { // the root node lock.name = node.packageName || node.name @@ -966,9 +986,13 @@ class Shrinkwrap { const aloc = a.from.location.split('node_modules') const bloc = b.from.location.split('node_modules') /* istanbul ignore next - sort calling order is indeterminate */ - return aloc.length > bloc.length ? 1 - : bloc.length > aloc.length ? -1 - : localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1]) + if (aloc.length > bloc.length) { + return 1 + } + if (bloc.length > aloc.length) { + return -1 + } + return localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1]) })[0] const res = consistentResolve(node.resolved, this.path, this.path, true) @@ -979,8 +1003,10 @@ class Shrinkwrap { // if we don't have either, just an empty object so nothing matches below. // This will effectively just save the version and resolved, as if it's // a standard version/range dep, which is a reasonable default. - const spec = !edge ? rSpec - : npa.resolve(node.name, edge.spec, edge.from.realpath) + let spec = rSpec + if (edge) { + spec = npa.resolve(node.name, edge.spec, edge.from.realpath) + } if (node.isLink) { lock.version = `file:${relpath(this.path, node.realpath).replace(/#/g, '%23')}` @@ -1086,7 +1112,7 @@ class Shrinkwrap { if (path.includes(kid.realpath)) { continue } - dependencies[name] = this[_buildLegacyLockfile](kid, {}, kidPath) + dependencies[name] = this.#buildLegacyLockfile(kid, {}, kidPath) found = true } if (found) { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js index 44b5484c68240c..62a50bc75bdb58 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js @@ -90,7 +90,7 @@ const checkTree = (tree, checkUnreachable = true) => { }) } - if (node.path === tree.root.path && node !== tree.root) { + if (node.path === tree.root.path && node !== tree.root && !tree.root.isLink) { throw Object.assign(new Error('node with same path as root'), { node: node.path, tree: tree.path, diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js index 887d776f85d04e..d5693a3eff943a 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js @@ -341,10 +341,10 @@ class YarnLock { } } -const _specs = Symbol('_specs') class YarnLockEntry { + #specs constructor (specs) { - this[_specs] = new Set(specs) + this.#specs = new Set(specs) this.resolved = null this.version = null this.integrity = null @@ -354,7 +354,7 @@ class YarnLockEntry { toString () { // sort objects to the bottom, then alphabetical - return ([...this[_specs]] + return ([...this.#specs] .sort(localeCompare) .map(quoteIfNeeded).join(', ') + ':\n' + @@ -370,7 +370,7 @@ class YarnLockEntry { } addSpec (spec) { - this[_specs].add(spec) + this.#specs.add(spec) } } diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index a4d47d5627031c..1ba9c92e3fdc01 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "7.2.1", + "version": "7.3.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", @@ -39,7 +39,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "benchmark": "^2.1.4", "minify-registry-metadata": "^3.0.0", "nock": "^13.3.3", @@ -49,11 +49,11 @@ }, "scripts": { "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "snap": "tap", "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot", - "lint": "eslint \"**/*.js\"", - "lintfix": "node ../.. run lint -- --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", "benchmark": "node scripts/benchmark.js", "benchclean": "rm -rf scripts/benchmark/*/", "postlint": "template-oss-check", @@ -90,7 +90,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js index c5b8d4f779b92b..6f8760fce1d3e7 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js +++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js @@ -494,6 +494,16 @@ define('os', { flatten, }) +define('libc', { + default: null, + type: [null, String], + description: ` + Override libc of native modules to install. + Acceptable values are same as \`libc\` field of package.json + `, + flatten, +}) + define('depth', { default: null, defaultDescription: ` @@ -1234,7 +1244,7 @@ define('sbom-type', { ], description: ` The type of package described by the generated SBOM. For SPDX, this is the - value for the \`primaryPackagePurpose\` fieled. For CycloneDX, this is the + value for the \`primaryPackagePurpose\` field. For CycloneDX, this is the value for the \`type\` field. `, flatten, diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index c4eabca7d1b8c1..80eb210b19e166 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "8.0.2", + "version": "8.1.0", "files": [ "bin/", "lib/" @@ -17,7 +17,7 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "postlint": "template-oss-check", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", @@ -32,7 +32,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-globals": "^1.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "tap": "^16.3.8" }, "dependencies": { @@ -50,8 +50,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", - "content": "../../scripts/template-oss/index.js", - "npm": "npm" + "version": "4.21.3", + "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/@npmcli/git/lib/spawn.js b/deps/npm/node_modules/@npmcli/git/lib/spawn.js index 7098d7b8729427..5e96eb5542b5a6 100644 --- a/deps/npm/node_modules/@npmcli/git/lib/spawn.js +++ b/deps/npm/node_modules/@npmcli/git/lib/spawn.js @@ -2,10 +2,10 @@ const spawn = require('@npmcli/promise-spawn') const promiseRetry = require('promise-retry') const log = require('proc-log') const makeError = require('./make-error.js') -const whichGit = require('./which.js') const makeOpts = require('./opts.js') module.exports = (gitArgs, opts = {}) => { + const whichGit = require('./which.js') const gitPath = whichGit(opts) if (gitPath instanceof Error) { diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json index 6ab037d841cc34..485c1f43dddb90 100644 --- a/deps/npm/node_modules/@npmcli/git/package.json +++ b/deps/npm/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "5.0.3", + "version": "5.0.4", "main": "lib/index.js", "files": [ "bin/", @@ -14,7 +14,7 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "snap": "tap", "test": "tap", "posttest": "npm run lint", @@ -31,7 +31,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.21.3", "npm-package-arg": "^11.0.0", "slash": "^3.0.0", "tap": "^16.0.1" @@ -51,13 +51,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.18.0", - "publish": true, - "ciVersions": [ - "16.14.0", - "16.x", - "18.0.0", - "18.x" - ] + "version": "4.21.3", + "publish": true } } diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js index 571ff6b9169c9b..b31395ebb5bcd4 100644 --- a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js +++ b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js @@ -100,8 +100,8 @@ const spawnWithShell = (cmd, args, opts, extra) => { let pathToInitial try { pathToInitial = which.sync(initialCmd, { - path: (options.env && options.env.PATH) || process.env.PATH, - pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT, + path: (options.env && findInObject(options.env, 'PATH')) || process.env.PATH, + pathext: (options.env && findInObject(options.env, 'PATHEXT')) || process.env.PATHEXT, }).toLowerCase() } catch (err) { pathToInitial = initialCmd.toLowerCase() @@ -192,4 +192,14 @@ const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => { return result } +// case insensitive lookup in an object +const findInObject = (obj, key) => { + key = key.toLowerCase() + for (const objKey of Object.keys(obj).sort()) { + if (objKey.toLowerCase() === key) { + return obj[objKey] + } + } +} + module.exports = promiseSpawn diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json index ffd89f1083341c..6e161b7404b858 100644 --- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json +++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "7.0.0", + "version": "7.0.1", "files": [ "bin/", "lib/" @@ -16,7 +16,7 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", @@ -32,7 +32,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.21.3", "spawk": "^1.7.1", "tap": "^16.0.1" }, @@ -41,13 +41,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "ciVersions": [ - "16.14.0", - "16.x", - "18.0.0", - "18.x" - ], - "version": "4.18.0", + "version": "4.21.3", "publish": true }, "dependencies": { diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js b/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js index efc00b488063ff..a099a4af2b9be3 100644 --- a/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js +++ b/deps/npm/node_modules/@npmcli/run-script/lib/signal-manager.js @@ -1,9 +1,6 @@ const runningProcs = new Set() let handlersInstalled = false -// NOTE: these signals aren't actually forwarded anywhere. they're trapped and -// ignored until all child processes have exited. in our next breaking change -// we should rename this const forwardedSignals = [ 'SIGINT', 'SIGTERM', @@ -12,8 +9,12 @@ const forwardedSignals = [ // no-op, this is so receiving the signal doesn't cause us to exit immediately // instead, we exit after all children have exited when we re-send the signal // to ourselves. see the catch handler at the bottom of run-script-pkg.js -// istanbul ignore next - this function does nothing -const handleSignal = () => {} +const handleSignal = signal => { + for (const proc of runningProcs) { + proc.kill(signal) + } +} + const setupListeners = () => { for (const signal of forwardedSignals) { process.on(signal, handleSignal) diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json index 21f00c7f1cbfbb..c090e52cf11278 100644 --- a/deps/npm/node_modules/@npmcli/run-script/package.json +++ b/deps/npm/node_modules/@npmcli/run-script/package.json @@ -1,13 +1,13 @@ { "name": "@npmcli/run-script", - "version": "7.0.2", + "version": "7.0.3", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", "eslint": "eslint", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "snap": "tap", @@ -16,7 +16,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -41,7 +41,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/abort-controller/LICENSE b/deps/npm/node_modules/abort-controller/LICENSE deleted file mode 100644 index c914149a6f845c..00000000000000 --- a/deps/npm/node_modules/abort-controller/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Toru Nagashima - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/abort-controller/browser.js b/deps/npm/node_modules/abort-controller/browser.js deleted file mode 100644 index b0c5ec37d9b76c..00000000000000 --- a/deps/npm/node_modules/abort-controller/browser.js +++ /dev/null @@ -1,13 +0,0 @@ -/*globals self, window */ -"use strict" - -/*eslint-disable @mysticatea/prettier */ -const { AbortController, AbortSignal } = - typeof self !== "undefined" ? self : - typeof window !== "undefined" ? window : - /* otherwise */ undefined -/*eslint-enable @mysticatea/prettier */ - -module.exports = AbortController -module.exports.AbortSignal = AbortSignal -module.exports.default = AbortController diff --git a/deps/npm/node_modules/abort-controller/browser.mjs b/deps/npm/node_modules/abort-controller/browser.mjs deleted file mode 100644 index a8f321afed6755..00000000000000 --- a/deps/npm/node_modules/abort-controller/browser.mjs +++ /dev/null @@ -1,11 +0,0 @@ -/*globals self, window */ - -/*eslint-disable @mysticatea/prettier */ -const { AbortController, AbortSignal } = - typeof self !== "undefined" ? self : - typeof window !== "undefined" ? window : - /* otherwise */ undefined -/*eslint-enable @mysticatea/prettier */ - -export default AbortController -export { AbortController, AbortSignal } diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.js b/deps/npm/node_modules/abort-controller/dist/abort-controller.js deleted file mode 100644 index 49af73955859f7..00000000000000 --- a/deps/npm/node_modules/abort-controller/dist/abort-controller.js +++ /dev/null @@ -1,127 +0,0 @@ -/** - * @author Toru Nagashima - * See LICENSE file in root directory for full license. - */ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -var eventTargetShim = require('event-target-shim'); - -/** - * The signal class. - * @see https://dom.spec.whatwg.org/#abortsignal - */ -class AbortSignal extends eventTargetShim.EventTarget { - /** - * AbortSignal cannot be constructed directly. - */ - constructor() { - super(); - throw new TypeError("AbortSignal cannot be constructed directly"); - } - /** - * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. - */ - get aborted() { - const aborted = abortedFlags.get(this); - if (typeof aborted !== "boolean") { - throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); - } - return aborted; - } -} -eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); -/** - * Create an AbortSignal object. - */ -function createAbortSignal() { - const signal = Object.create(AbortSignal.prototype); - eventTargetShim.EventTarget.call(signal); - abortedFlags.set(signal, false); - return signal; -} -/** - * Abort a given signal. - */ -function abortSignal(signal) { - if (abortedFlags.get(signal) !== false) { - return; - } - abortedFlags.set(signal, true); - signal.dispatchEvent({ type: "abort" }); -} -/** - * Aborted flag for each instances. - */ -const abortedFlags = new WeakMap(); -// Properties should be enumerable. -Object.defineProperties(AbortSignal.prototype, { - aborted: { enumerable: true }, -}); -// `toString()` should return `"[object AbortSignal]"` -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortSignal", - }); -} - -/** - * The AbortController. - * @see https://dom.spec.whatwg.org/#abortcontroller - */ -class AbortController { - /** - * Initialize this controller. - */ - constructor() { - signals.set(this, createAbortSignal()); - } - /** - * Returns the `AbortSignal` object associated with this object. - */ - get signal() { - return getSignal(this); - } - /** - * Abort and signal to any observers that the associated activity is to be aborted. - */ - abort() { - abortSignal(getSignal(this)); - } -} -/** - * Associated signals. - */ -const signals = new WeakMap(); -/** - * Get the associated signal of a given controller. - */ -function getSignal(controller) { - const signal = signals.get(controller); - if (signal == null) { - throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); - } - return signal; -} -// Properties should be enumerable. -Object.defineProperties(AbortController.prototype, { - signal: { enumerable: true }, - abort: { enumerable: true }, -}); -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortController", - }); -} - -exports.AbortController = AbortController; -exports.AbortSignal = AbortSignal; -exports.default = AbortController; - -module.exports = AbortController -module.exports.AbortController = module.exports["default"] = AbortController -module.exports.AbortSignal = AbortSignal -//# sourceMappingURL=abort-controller.js.map diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs b/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs deleted file mode 100644 index 88ba22d5574edc..00000000000000 --- a/deps/npm/node_modules/abort-controller/dist/abort-controller.mjs +++ /dev/null @@ -1,118 +0,0 @@ -/** - * @author Toru Nagashima - * See LICENSE file in root directory for full license. - */ -import { EventTarget, defineEventAttribute } from 'event-target-shim'; - -/** - * The signal class. - * @see https://dom.spec.whatwg.org/#abortsignal - */ -class AbortSignal extends EventTarget { - /** - * AbortSignal cannot be constructed directly. - */ - constructor() { - super(); - throw new TypeError("AbortSignal cannot be constructed directly"); - } - /** - * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. - */ - get aborted() { - const aborted = abortedFlags.get(this); - if (typeof aborted !== "boolean") { - throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); - } - return aborted; - } -} -defineEventAttribute(AbortSignal.prototype, "abort"); -/** - * Create an AbortSignal object. - */ -function createAbortSignal() { - const signal = Object.create(AbortSignal.prototype); - EventTarget.call(signal); - abortedFlags.set(signal, false); - return signal; -} -/** - * Abort a given signal. - */ -function abortSignal(signal) { - if (abortedFlags.get(signal) !== false) { - return; - } - abortedFlags.set(signal, true); - signal.dispatchEvent({ type: "abort" }); -} -/** - * Aborted flag for each instances. - */ -const abortedFlags = new WeakMap(); -// Properties should be enumerable. -Object.defineProperties(AbortSignal.prototype, { - aborted: { enumerable: true }, -}); -// `toString()` should return `"[object AbortSignal]"` -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortSignal", - }); -} - -/** - * The AbortController. - * @see https://dom.spec.whatwg.org/#abortcontroller - */ -class AbortController { - /** - * Initialize this controller. - */ - constructor() { - signals.set(this, createAbortSignal()); - } - /** - * Returns the `AbortSignal` object associated with this object. - */ - get signal() { - return getSignal(this); - } - /** - * Abort and signal to any observers that the associated activity is to be aborted. - */ - abort() { - abortSignal(getSignal(this)); - } -} -/** - * Associated signals. - */ -const signals = new WeakMap(); -/** - * Get the associated signal of a given controller. - */ -function getSignal(controller) { - const signal = signals.get(controller); - if (signal == null) { - throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); - } - return signal; -} -// Properties should be enumerable. -Object.defineProperties(AbortController.prototype, { - signal: { enumerable: true }, - abort: { enumerable: true }, -}); -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortController", - }); -} - -export default AbortController; -export { AbortController, AbortSignal }; -//# sourceMappingURL=abort-controller.mjs.map diff --git a/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js b/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js deleted file mode 100644 index f643cfd6b67110..00000000000000 --- a/deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js +++ /dev/null @@ -1,5 +0,0 @@ -/** - * @author Toru Nagashima - * See LICENSE file in root directory for full license. - */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d=6.5" - }, - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "browser": "./browser.js", - "devDependencies": { - "@babel/core": "^7.2.2", - "@babel/plugin-transform-modules-commonjs": "^7.2.0", - "@babel/preset-env": "^7.3.0", - "@babel/register": "^7.0.0", - "@mysticatea/eslint-plugin": "^8.0.1", - "@mysticatea/spy": "^0.1.2", - "@types/mocha": "^5.2.5", - "@types/node": "^10.12.18", - "assert": "^1.4.1", - "codecov": "^3.1.0", - "dts-bundle-generator": "^2.0.0", - "eslint": "^5.12.1", - "karma": "^3.1.4", - "karma-chrome-launcher": "^2.2.0", - "karma-coverage": "^1.1.2", - "karma-firefox-launcher": "^1.1.0", - "karma-growl-reporter": "^1.0.0", - "karma-ie-launcher": "^1.0.0", - "karma-mocha": "^1.3.0", - "karma-rollup-preprocessor": "^7.0.0-rc.2", - "mocha": "^5.2.0", - "npm-run-all": "^4.1.5", - "nyc": "^13.1.0", - "opener": "^1.5.1", - "rimraf": "^2.6.3", - "rollup": "^1.1.2", - "rollup-plugin-babel": "^4.3.2", - "rollup-plugin-babel-minify": "^7.0.0", - "rollup-plugin-commonjs": "^9.2.0", - "rollup-plugin-node-resolve": "^4.0.0", - "rollup-plugin-sourcemaps": "^0.4.2", - "rollup-plugin-typescript": "^1.0.0", - "rollup-watch": "^4.3.1", - "ts-node": "^8.0.1", - "type-tester": "^1.0.0", - "typescript": "^3.2.4" - }, - "scripts": { - "preversion": "npm test", - "version": "npm run -s build && git add dist/*", - "postversion": "git push && git push --tags", - "clean": "rimraf .nyc_output coverage", - "coverage": "opener coverage/lcov-report/index.html", - "lint": "eslint . --ext .ts", - "build": "run-s -s build:*", - "build:rollup": "rollup -c", - "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts", - "test": "run-s -s lint test:*", - "test:mocha": "nyc mocha test/*.ts", - "test:karma": "karma start --single-run", - "watch": "run-p -s watch:*", - "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl", - "watch:karma": "karma start --watch", - "codecov": "codecov" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/mysticatea/abort-controller.git" - }, - "keywords": [ - "w3c", - "whatwg", - "event", - "events", - "abort", - "cancel", - "abortcontroller", - "abortsignal", - "controller", - "signal", - "shim" - ], - "author": "Toru Nagashima (https://github.com/mysticatea)", - "license": "MIT", - "bugs": { - "url": "https://github.com/mysticatea/abort-controller/issues" - }, - "homepage": "https://github.com/mysticatea/abort-controller#readme" -} diff --git a/deps/npm/node_modules/abort-controller/polyfill.js b/deps/npm/node_modules/abort-controller/polyfill.js deleted file mode 100644 index 3ca892330b1e51..00000000000000 --- a/deps/npm/node_modules/abort-controller/polyfill.js +++ /dev/null @@ -1,21 +0,0 @@ -/*globals require, self, window */ -"use strict" - -const ac = require("./dist/abort-controller") - -/*eslint-disable @mysticatea/prettier */ -const g = - typeof self !== "undefined" ? self : - typeof window !== "undefined" ? window : - typeof global !== "undefined" ? global : - /* otherwise */ undefined -/*eslint-enable @mysticatea/prettier */ - -if (g) { - if (typeof g.AbortController === "undefined") { - g.AbortController = ac.AbortController - } - if (typeof g.AbortSignal === "undefined") { - g.AbortSignal = ac.AbortSignal - } -} diff --git a/deps/npm/node_modules/abort-controller/polyfill.mjs b/deps/npm/node_modules/abort-controller/polyfill.mjs deleted file mode 100644 index 0602a64dddfd2f..00000000000000 --- a/deps/npm/node_modules/abort-controller/polyfill.mjs +++ /dev/null @@ -1,19 +0,0 @@ -/*globals self, window */ -import * as ac from "./dist/abort-controller" - -/*eslint-disable @mysticatea/prettier */ -const g = - typeof self !== "undefined" ? self : - typeof window !== "undefined" ? window : - typeof global !== "undefined" ? global : - /* otherwise */ undefined -/*eslint-enable @mysticatea/prettier */ - -if (g) { - if (typeof g.AbortController === "undefined") { - g.AbortController = ac.AbortController - } - if (typeof g.AbortSignal === "undefined") { - g.AbortSignal = ac.AbortSignal - } -} diff --git a/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js index 4b111b6bae8a81..75e44df309150f 100644 --- a/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js +++ b/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js @@ -1,6 +1,5 @@ 'use strict' -const stream = require('readable-stream') -const delegate = require('delegates') +const stream = require('stream') const Tracker = require('./tracker.js') class TrackerStream extends stream.Transform { @@ -9,7 +8,11 @@ class TrackerStream extends stream.Transform { this.tracker = new Tracker(name, size) this.name = name this.id = this.tracker.id - this.tracker.on('change', delegateChange(this)) + this.tracker.on('change', this.trackerChange.bind(this)) + } + + trackerChange (name, completion) { + this.emit('change', name, completion, this) } _transform (data, encoding, cb) { @@ -22,17 +25,18 @@ class TrackerStream extends stream.Transform { this.tracker.finish() cb() } -} -function delegateChange (trackerStream) { - return function (name, completion, tracker) { - trackerStream.emit('change', name, completion, trackerStream) + completed () { + return this.tracker.completed() } -} -delegate(TrackerStream.prototype, 'tracker') - .method('completed') - .method('addWork') - .method('finish') + addWork (work) { + return this.tracker.addWork(work) + } + + finish () { + return this.tracker.finish() + } +} module.exports = TrackerStream diff --git a/deps/npm/node_modules/are-we-there-yet/package.json b/deps/npm/node_modules/are-we-there-yet/package.json index e238c6581df667..f072a21abb444b 100644 --- a/deps/npm/node_modules/are-we-there-yet/package.json +++ b/deps/npm/node_modules/are-we-there-yet/package.json @@ -1,11 +1,11 @@ { "name": "are-we-there-yet", - "version": "4.0.1", + "version": "4.0.2", "description": "Keep track of the overall completion of many disparate processes", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", @@ -25,13 +25,9 @@ "homepage": "https://github.com/npm/are-we-there-yet", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.17.0", + "@npmcli/template-oss": "4.21.3", "tap": "^16.0.1" }, - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^4.1.0" - }, "files": [ "bin/", "lib/" @@ -51,7 +47,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.17.0", + "version": "4.21.3", "publish": true } } diff --git a/deps/npm/node_modules/base64-js/LICENSE b/deps/npm/node_modules/base64-js/LICENSE deleted file mode 100644 index 6d52b8acfbe771..00000000000000 --- a/deps/npm/node_modules/base64-js/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Jameson Little - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/npm/node_modules/base64-js/base64js.min.js b/deps/npm/node_modules/base64-js/base64js.min.js deleted file mode 100644 index 908ac83fd12400..00000000000000 --- a/deps/npm/node_modules/base64-js/base64js.min.js +++ /dev/null @@ -1 +0,0 @@ -(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { - throw new Error('Invalid string. Length must be a multiple of 4') - } - - // Trim off extra bytes after placeholder bytes are found - // See: https://github.com/beatgammit/base64-js/issues/42 - var validLen = b64.indexOf('=') - if (validLen === -1) validLen = len - - var placeHoldersLen = validLen === len - ? 0 - : 4 - (validLen % 4) - - return [validLen, placeHoldersLen] -} - -// base64 is 4/3 + up to two characters of the original data -function byteLength (b64) { - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} - -function _byteLength (b64, validLen, placeHoldersLen) { - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} - -function toByteArray (b64) { - var tmp - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - - var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) - - var curByte = 0 - - // if there are placeholders, only get up to the last complete 4 chars - var len = placeHoldersLen > 0 - ? validLen - 4 - : validLen - - var i - for (i = 0; i < len; i += 4) { - tmp = - (revLookup[b64.charCodeAt(i)] << 18) | - (revLookup[b64.charCodeAt(i + 1)] << 12) | - (revLookup[b64.charCodeAt(i + 2)] << 6) | - revLookup[b64.charCodeAt(i + 3)] - arr[curByte++] = (tmp >> 16) & 0xFF - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - - if (placeHoldersLen === 2) { - tmp = - (revLookup[b64.charCodeAt(i)] << 2) | - (revLookup[b64.charCodeAt(i + 1)] >> 4) - arr[curByte++] = tmp & 0xFF - } - - if (placeHoldersLen === 1) { - tmp = - (revLookup[b64.charCodeAt(i)] << 10) | - (revLookup[b64.charCodeAt(i + 1)] << 4) | - (revLookup[b64.charCodeAt(i + 2)] >> 2) - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - - return arr -} - -function tripletToBase64 (num) { - return lookup[num >> 18 & 0x3F] + - lookup[num >> 12 & 0x3F] + - lookup[num >> 6 & 0x3F] + - lookup[num & 0x3F] -} - -function encodeChunk (uint8, start, end) { - var tmp - var output = [] - for (var i = start; i < end; i += 3) { - tmp = - ((uint8[i] << 16) & 0xFF0000) + - ((uint8[i + 1] << 8) & 0xFF00) + - (uint8[i + 2] & 0xFF) - output.push(tripletToBase64(tmp)) - } - return output.join('') -} - -function fromByteArray (uint8) { - var tmp - var len = uint8.length - var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes - var parts = [] - var maxChunkLength = 16383 // must be multiple of 3 - - // go through the array every three bytes, we'll deal with trailing stuff later - for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { - parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) - } - - // pad the end with zeros, but make sure to not forget the extra bytes - if (extraBytes === 1) { - tmp = uint8[len - 1] - parts.push( - lookup[tmp >> 2] + - lookup[(tmp << 4) & 0x3F] + - '==' - ) - } else if (extraBytes === 2) { - tmp = (uint8[len - 2] << 8) + uint8[len - 1] - parts.push( - lookup[tmp >> 10] + - lookup[(tmp >> 4) & 0x3F] + - lookup[(tmp << 2) & 0x3F] + - '=' - ) - } - - return parts.join('') -} diff --git a/deps/npm/node_modules/base64-js/package.json b/deps/npm/node_modules/base64-js/package.json deleted file mode 100644 index c3972e39f2be5d..00000000000000 --- a/deps/npm/node_modules/base64-js/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "base64-js", - "description": "Base64 encoding/decoding in pure JS", - "version": "1.5.1", - "author": "T. Jameson Little ", - "typings": "index.d.ts", - "bugs": { - "url": "https://github.com/beatgammit/base64-js/issues" - }, - "devDependencies": { - "babel-minify": "^0.5.1", - "benchmark": "^2.1.4", - "browserify": "^16.3.0", - "standard": "*", - "tape": "4.x" - }, - "homepage": "https://github.com/beatgammit/base64-js", - "keywords": [ - "base64" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/beatgammit/base64-js.git" - }, - "scripts": { - "build": "browserify -s base64js -r ./ | minify > base64js.min.js", - "lint": "standard", - "test": "npm run lint && npm run unit", - "unit": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/deps/npm/node_modules/buffer/AUTHORS.md b/deps/npm/node_modules/buffer/AUTHORS.md deleted file mode 100644 index 468aa1908c3796..00000000000000 --- a/deps/npm/node_modules/buffer/AUTHORS.md +++ /dev/null @@ -1,73 +0,0 @@ -# Authors - -#### Ordered by first contribution. - -- Romain Beauxis (toots@rastageeks.org) -- Tobias Koppers (tobias.koppers@googlemail.com) -- Janus (ysangkok@gmail.com) -- Rainer Dreyer (rdrey1@gmail.com) -- Tõnis Tiigi (tonistiigi@gmail.com) -- James Halliday (mail@substack.net) -- Michael Williamson (mike@zwobble.org) -- elliottcable (github@elliottcable.name) -- rafael (rvalle@livelens.net) -- Andrew Kelley (superjoe30@gmail.com) -- Andreas Madsen (amwebdk@gmail.com) -- Mike Brevoort (mike.brevoort@pearson.com) -- Brian White (mscdex@mscdex.net) -- Feross Aboukhadijeh (feross@feross.org) -- Ruben Verborgh (ruben@verborgh.org) -- eliang (eliang.cs@gmail.com) -- Jesse Tane (jesse.tane@gmail.com) -- Alfonso Boza (alfonso@cloud.com) -- Mathias Buus (mathiasbuus@gmail.com) -- Devon Govett (devongovett@gmail.com) -- Daniel Cousens (github@dcousens.com) -- Joseph Dykstra (josephdykstra@gmail.com) -- Parsha Pourkhomami (parshap+git@gmail.com) -- Damjan Košir (damjan.kosir@gmail.com) -- daverayment (dave.rayment@gmail.com) -- kawanet (u-suke@kawa.net) -- Linus Unnebäck (linus@folkdatorn.se) -- Nolan Lawson (nolan.lawson@gmail.com) -- Calvin Metcalf (calvin.metcalf@gmail.com) -- Koki Takahashi (hakatasiloving@gmail.com) -- Guy Bedford (guybedford@gmail.com) -- Jan Schär (jscissr@gmail.com) -- RaulTsc (tomescu.raul@gmail.com) -- Matthieu Monsch (monsch@alum.mit.edu) -- Dan Ehrenberg (littledan@chromium.org) -- Kirill Fomichev (fanatid@ya.ru) -- Yusuke Kawasaki (u-suke@kawa.net) -- DC (dcposch@dcpos.ch) -- John-David Dalton (john.david.dalton@gmail.com) -- adventure-yunfei (adventure030@gmail.com) -- Emil Bay (github@tixz.dk) -- Sam Sudar (sudar.sam@gmail.com) -- Volker Mische (volker.mische@gmail.com) -- David Walton (support@geekstocks.com) -- Сковорода Никита Андреевич (chalkerx@gmail.com) -- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) -- ukstv (sergey.ukustov@machinomy.com) -- Renée Kooi (renee@kooi.me) -- ranbochen (ranbochen@qq.com) -- Vladimir Borovik (bobahbdb@gmail.com) -- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) -- kumavis (aaron@kumavis.me) -- Sergey Ukustov (sergey.ukustov@machinomy.com) -- Fei Liu (liu.feiwood@gmail.com) -- Blaine Bublitz (blaine.bublitz@gmail.com) -- clement (clement@seald.io) -- Koushik Dutta (koushd@gmail.com) -- Jordan Harband (ljharb@gmail.com) -- Niklas Mischkulnig (mischnic@users.noreply.github.com) -- Nikolai Vavilov (vvnicholas@gmail.com) -- Fedor Nezhivoi (gyzerok@users.noreply.github.com) -- shuse2 (shus.toda@gmail.com) -- Peter Newman (peternewman@users.noreply.github.com) -- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) -- jkkang (jkkang@smartauth.kr) -- Deklan Webster (deklanw@gmail.com) -- Martin Heidegger (martin.heidegger@gmail.com) - -#### Generated by bin/update-authors.sh. diff --git a/deps/npm/node_modules/buffer/LICENSE b/deps/npm/node_modules/buffer/LICENSE deleted file mode 100644 index d6bf75dcf1f6f7..00000000000000 --- a/deps/npm/node_modules/buffer/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh, and other contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/npm/node_modules/buffer/index.js b/deps/npm/node_modules/buffer/index.js deleted file mode 100644 index 7a0e9c2a123bc9..00000000000000 --- a/deps/npm/node_modules/buffer/index.js +++ /dev/null @@ -1,2106 +0,0 @@ -/*! - * The buffer module from node.js, for the browser. - * - * @author Feross Aboukhadijeh - * @license MIT - */ -/* eslint-disable no-proto */ - -'use strict' - -const base64 = require('base64-js') -const ieee754 = require('ieee754') -const customInspectSymbol = - (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation - ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation - : null - -exports.Buffer = Buffer -exports.SlowBuffer = SlowBuffer -exports.INSPECT_MAX_BYTES = 50 - -const K_MAX_LENGTH = 0x7fffffff -exports.kMaxLength = K_MAX_LENGTH - -/** - * If `Buffer.TYPED_ARRAY_SUPPORT`: - * === true Use Uint8Array implementation (fastest) - * === false Print warning and recommend using `buffer` v4.x which has an Object - * implementation (most compatible, even IE6) - * - * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, - * Opera 11.6+, iOS 4.2+. - * - * We report that the browser does not support typed arrays if the are not subclassable - * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` - * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support - * for __proto__ and has a buggy typed array implementation. - */ -Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() - -if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && - typeof console.error === 'function') { - console.error( - 'This browser lacks typed array (Uint8Array) support which is required by ' + - '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' - ) -} - -function typedArraySupport () { - // Can typed array instances can be augmented? - try { - const arr = new Uint8Array(1) - const proto = { foo: function () { return 42 } } - Object.setPrototypeOf(proto, Uint8Array.prototype) - Object.setPrototypeOf(arr, proto) - return arr.foo() === 42 - } catch (e) { - return false - } -} - -Object.defineProperty(Buffer.prototype, 'parent', { - enumerable: true, - get: function () { - if (!Buffer.isBuffer(this)) return undefined - return this.buffer - } -}) - -Object.defineProperty(Buffer.prototype, 'offset', { - enumerable: true, - get: function () { - if (!Buffer.isBuffer(this)) return undefined - return this.byteOffset - } -}) - -function createBuffer (length) { - if (length > K_MAX_LENGTH) { - throw new RangeError('The value "' + length + '" is invalid for option "size"') - } - // Return an augmented `Uint8Array` instance - const buf = new Uint8Array(length) - Object.setPrototypeOf(buf, Buffer.prototype) - return buf -} - -/** - * The Buffer constructor returns instances of `Uint8Array` that have their - * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of - * `Uint8Array`, so the returned instances will have all the node `Buffer` methods - * and the `Uint8Array` methods. Square bracket notation works as expected -- it - * returns a single octet. - * - * The `Uint8Array` prototype remains unmodified. - */ - -function Buffer (arg, encodingOrOffset, length) { - // Common case. - if (typeof arg === 'number') { - if (typeof encodingOrOffset === 'string') { - throw new TypeError( - 'The "string" argument must be of type string. Received type number' - ) - } - return allocUnsafe(arg) - } - return from(arg, encodingOrOffset, length) -} - -Buffer.poolSize = 8192 // not used by this implementation - -function from (value, encodingOrOffset, length) { - if (typeof value === 'string') { - return fromString(value, encodingOrOffset) - } - - if (ArrayBuffer.isView(value)) { - return fromArrayView(value) - } - - if (value == null) { - throw new TypeError( - 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + - 'or Array-like Object. Received type ' + (typeof value) - ) - } - - if (isInstance(value, ArrayBuffer) || - (value && isInstance(value.buffer, ArrayBuffer))) { - return fromArrayBuffer(value, encodingOrOffset, length) - } - - if (typeof SharedArrayBuffer !== 'undefined' && - (isInstance(value, SharedArrayBuffer) || - (value && isInstance(value.buffer, SharedArrayBuffer)))) { - return fromArrayBuffer(value, encodingOrOffset, length) - } - - if (typeof value === 'number') { - throw new TypeError( - 'The "value" argument must not be of type number. Received type number' - ) - } - - const valueOf = value.valueOf && value.valueOf() - if (valueOf != null && valueOf !== value) { - return Buffer.from(valueOf, encodingOrOffset, length) - } - - const b = fromObject(value) - if (b) return b - - if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && - typeof value[Symbol.toPrimitive] === 'function') { - return Buffer.from(value[Symbol.toPrimitive]('string'), encodingOrOffset, length) - } - - throw new TypeError( - 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + - 'or Array-like Object. Received type ' + (typeof value) - ) -} - -/** - * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError - * if value is a number. - * Buffer.from(str[, encoding]) - * Buffer.from(array) - * Buffer.from(buffer) - * Buffer.from(arrayBuffer[, byteOffset[, length]]) - **/ -Buffer.from = function (value, encodingOrOffset, length) { - return from(value, encodingOrOffset, length) -} - -// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: -// https://github.com/feross/buffer/pull/148 -Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) -Object.setPrototypeOf(Buffer, Uint8Array) - -function assertSize (size) { - if (typeof size !== 'number') { - throw new TypeError('"size" argument must be of type number') - } else if (size < 0) { - throw new RangeError('The value "' + size + '" is invalid for option "size"') - } -} - -function alloc (size, fill, encoding) { - assertSize(size) - if (size <= 0) { - return createBuffer(size) - } - if (fill !== undefined) { - // Only pay attention to encoding if it's a string. This - // prevents accidentally sending in a number that would - // be interpreted as a start offset. - return typeof encoding === 'string' - ? createBuffer(size).fill(fill, encoding) - : createBuffer(size).fill(fill) - } - return createBuffer(size) -} - -/** - * Creates a new filled Buffer instance. - * alloc(size[, fill[, encoding]]) - **/ -Buffer.alloc = function (size, fill, encoding) { - return alloc(size, fill, encoding) -} - -function allocUnsafe (size) { - assertSize(size) - return createBuffer(size < 0 ? 0 : checked(size) | 0) -} - -/** - * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. - * */ -Buffer.allocUnsafe = function (size) { - return allocUnsafe(size) -} -/** - * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. - */ -Buffer.allocUnsafeSlow = function (size) { - return allocUnsafe(size) -} - -function fromString (string, encoding) { - if (typeof encoding !== 'string' || encoding === '') { - encoding = 'utf8' - } - - if (!Buffer.isEncoding(encoding)) { - throw new TypeError('Unknown encoding: ' + encoding) - } - - const length = byteLength(string, encoding) | 0 - let buf = createBuffer(length) - - const actual = buf.write(string, encoding) - - if (actual !== length) { - // Writing a hex string, for example, that contains invalid characters will - // cause everything after the first invalid character to be ignored. (e.g. - // 'abxxcd' will be treated as 'ab') - buf = buf.slice(0, actual) - } - - return buf -} - -function fromArrayLike (array) { - const length = array.length < 0 ? 0 : checked(array.length) | 0 - const buf = createBuffer(length) - for (let i = 0; i < length; i += 1) { - buf[i] = array[i] & 255 - } - return buf -} - -function fromArrayView (arrayView) { - if (isInstance(arrayView, Uint8Array)) { - const copy = new Uint8Array(arrayView) - return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) - } - return fromArrayLike(arrayView) -} - -function fromArrayBuffer (array, byteOffset, length) { - if (byteOffset < 0 || array.byteLength < byteOffset) { - throw new RangeError('"offset" is outside of buffer bounds') - } - - if (array.byteLength < byteOffset + (length || 0)) { - throw new RangeError('"length" is outside of buffer bounds') - } - - let buf - if (byteOffset === undefined && length === undefined) { - buf = new Uint8Array(array) - } else if (length === undefined) { - buf = new Uint8Array(array, byteOffset) - } else { - buf = new Uint8Array(array, byteOffset, length) - } - - // Return an augmented `Uint8Array` instance - Object.setPrototypeOf(buf, Buffer.prototype) - - return buf -} - -function fromObject (obj) { - if (Buffer.isBuffer(obj)) { - const len = checked(obj.length) | 0 - const buf = createBuffer(len) - - if (buf.length === 0) { - return buf - } - - obj.copy(buf, 0, 0, len) - return buf - } - - if (obj.length !== undefined) { - if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { - return createBuffer(0) - } - return fromArrayLike(obj) - } - - if (obj.type === 'Buffer' && Array.isArray(obj.data)) { - return fromArrayLike(obj.data) - } -} - -function checked (length) { - // Note: cannot use `length < K_MAX_LENGTH` here because that fails when - // length is NaN (which is otherwise coerced to zero.) - if (length >= K_MAX_LENGTH) { - throw new RangeError('Attempt to allocate Buffer larger than maximum ' + - 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') - } - return length | 0 -} - -function SlowBuffer (length) { - if (+length != length) { // eslint-disable-line eqeqeq - length = 0 - } - return Buffer.alloc(+length) -} - -Buffer.isBuffer = function isBuffer (b) { - return b != null && b._isBuffer === true && - b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false -} - -Buffer.compare = function compare (a, b) { - if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) - if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - throw new TypeError( - 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' - ) - } - - if (a === b) return 0 - - let x = a.length - let y = b.length - - for (let i = 0, len = Math.min(x, y); i < len; ++i) { - if (a[i] !== b[i]) { - x = a[i] - y = b[i] - break - } - } - - if (x < y) return -1 - if (y < x) return 1 - return 0 -} - -Buffer.isEncoding = function isEncoding (encoding) { - switch (String(encoding).toLowerCase()) { - case 'hex': - case 'utf8': - case 'utf-8': - case 'ascii': - case 'latin1': - case 'binary': - case 'base64': - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return true - default: - return false - } -} - -Buffer.concat = function concat (list, length) { - if (!Array.isArray(list)) { - throw new TypeError('"list" argument must be an Array of Buffers') - } - - if (list.length === 0) { - return Buffer.alloc(0) - } - - let i - if (length === undefined) { - length = 0 - for (i = 0; i < list.length; ++i) { - length += list[i].length - } - } - - const buffer = Buffer.allocUnsafe(length) - let pos = 0 - for (i = 0; i < list.length; ++i) { - let buf = list[i] - if (isInstance(buf, Uint8Array)) { - if (pos + buf.length > buffer.length) { - if (!Buffer.isBuffer(buf)) buf = Buffer.from(buf) - buf.copy(buffer, pos) - } else { - Uint8Array.prototype.set.call( - buffer, - buf, - pos - ) - } - } else if (!Buffer.isBuffer(buf)) { - throw new TypeError('"list" argument must be an Array of Buffers') - } else { - buf.copy(buffer, pos) - } - pos += buf.length - } - return buffer -} - -function byteLength (string, encoding) { - if (Buffer.isBuffer(string)) { - return string.length - } - if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { - return string.byteLength - } - if (typeof string !== 'string') { - throw new TypeError( - 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + - 'Received type ' + typeof string - ) - } - - const len = string.length - const mustMatch = (arguments.length > 2 && arguments[2] === true) - if (!mustMatch && len === 0) return 0 - - // Use a for loop to avoid recursion - let loweredCase = false - for (;;) { - switch (encoding) { - case 'ascii': - case 'latin1': - case 'binary': - return len - case 'utf8': - case 'utf-8': - return utf8ToBytes(string).length - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return len * 2 - case 'hex': - return len >>> 1 - case 'base64': - return base64ToBytes(string).length - default: - if (loweredCase) { - return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 - } - encoding = ('' + encoding).toLowerCase() - loweredCase = true - } - } -} -Buffer.byteLength = byteLength - -function slowToString (encoding, start, end) { - let loweredCase = false - - // No need to verify that "this.length <= MAX_UINT32" since it's a read-only - // property of a typed array. - - // This behaves neither like String nor Uint8Array in that we set start/end - // to their upper/lower bounds if the value passed is out of range. - // undefined is handled specially as per ECMA-262 6th Edition, - // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. - if (start === undefined || start < 0) { - start = 0 - } - // Return early if start > this.length. Done here to prevent potential uint32 - // coercion fail below. - if (start > this.length) { - return '' - } - - if (end === undefined || end > this.length) { - end = this.length - } - - if (end <= 0) { - return '' - } - - // Force coercion to uint32. This will also coerce falsey/NaN values to 0. - end >>>= 0 - start >>>= 0 - - if (end <= start) { - return '' - } - - if (!encoding) encoding = 'utf8' - - while (true) { - switch (encoding) { - case 'hex': - return hexSlice(this, start, end) - - case 'utf8': - case 'utf-8': - return utf8Slice(this, start, end) - - case 'ascii': - return asciiSlice(this, start, end) - - case 'latin1': - case 'binary': - return latin1Slice(this, start, end) - - case 'base64': - return base64Slice(this, start, end) - - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return utf16leSlice(this, start, end) - - default: - if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) - encoding = (encoding + '').toLowerCase() - loweredCase = true - } - } -} - -// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) -// to detect a Buffer instance. It's not possible to use `instanceof Buffer` -// reliably in a browserify context because there could be multiple different -// copies of the 'buffer' package in use. This method works even for Buffer -// instances that were created from another copy of the `buffer` package. -// See: https://github.com/feross/buffer/issues/154 -Buffer.prototype._isBuffer = true - -function swap (b, n, m) { - const i = b[n] - b[n] = b[m] - b[m] = i -} - -Buffer.prototype.swap16 = function swap16 () { - const len = this.length - if (len % 2 !== 0) { - throw new RangeError('Buffer size must be a multiple of 16-bits') - } - for (let i = 0; i < len; i += 2) { - swap(this, i, i + 1) - } - return this -} - -Buffer.prototype.swap32 = function swap32 () { - const len = this.length - if (len % 4 !== 0) { - throw new RangeError('Buffer size must be a multiple of 32-bits') - } - for (let i = 0; i < len; i += 4) { - swap(this, i, i + 3) - swap(this, i + 1, i + 2) - } - return this -} - -Buffer.prototype.swap64 = function swap64 () { - const len = this.length - if (len % 8 !== 0) { - throw new RangeError('Buffer size must be a multiple of 64-bits') - } - for (let i = 0; i < len; i += 8) { - swap(this, i, i + 7) - swap(this, i + 1, i + 6) - swap(this, i + 2, i + 5) - swap(this, i + 3, i + 4) - } - return this -} - -Buffer.prototype.toString = function toString () { - const length = this.length - if (length === 0) return '' - if (arguments.length === 0) return utf8Slice(this, 0, length) - return slowToString.apply(this, arguments) -} - -Buffer.prototype.toLocaleString = Buffer.prototype.toString - -Buffer.prototype.equals = function equals (b) { - if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') - if (this === b) return true - return Buffer.compare(this, b) === 0 -} - -Buffer.prototype.inspect = function inspect () { - let str = '' - const max = exports.INSPECT_MAX_BYTES - str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() - if (this.length > max) str += ' ... ' - return '' -} -if (customInspectSymbol) { - Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect -} - -Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { - if (isInstance(target, Uint8Array)) { - target = Buffer.from(target, target.offset, target.byteLength) - } - if (!Buffer.isBuffer(target)) { - throw new TypeError( - 'The "target" argument must be one of type Buffer or Uint8Array. ' + - 'Received type ' + (typeof target) - ) - } - - if (start === undefined) { - start = 0 - } - if (end === undefined) { - end = target ? target.length : 0 - } - if (thisStart === undefined) { - thisStart = 0 - } - if (thisEnd === undefined) { - thisEnd = this.length - } - - if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { - throw new RangeError('out of range index') - } - - if (thisStart >= thisEnd && start >= end) { - return 0 - } - if (thisStart >= thisEnd) { - return -1 - } - if (start >= end) { - return 1 - } - - start >>>= 0 - end >>>= 0 - thisStart >>>= 0 - thisEnd >>>= 0 - - if (this === target) return 0 - - let x = thisEnd - thisStart - let y = end - start - const len = Math.min(x, y) - - const thisCopy = this.slice(thisStart, thisEnd) - const targetCopy = target.slice(start, end) - - for (let i = 0; i < len; ++i) { - if (thisCopy[i] !== targetCopy[i]) { - x = thisCopy[i] - y = targetCopy[i] - break - } - } - - if (x < y) return -1 - if (y < x) return 1 - return 0 -} - -// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, -// OR the last index of `val` in `buffer` at offset <= `byteOffset`. -// -// Arguments: -// - buffer - a Buffer to search -// - val - a string, Buffer, or number -// - byteOffset - an index into `buffer`; will be clamped to an int32 -// - encoding - an optional encoding, relevant is val is a string -// - dir - true for indexOf, false for lastIndexOf -function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { - // Empty buffer means no match - if (buffer.length === 0) return -1 - - // Normalize byteOffset - if (typeof byteOffset === 'string') { - encoding = byteOffset - byteOffset = 0 - } else if (byteOffset > 0x7fffffff) { - byteOffset = 0x7fffffff - } else if (byteOffset < -0x80000000) { - byteOffset = -0x80000000 - } - byteOffset = +byteOffset // Coerce to Number. - if (numberIsNaN(byteOffset)) { - // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer - byteOffset = dir ? 0 : (buffer.length - 1) - } - - // Normalize byteOffset: negative offsets start from the end of the buffer - if (byteOffset < 0) byteOffset = buffer.length + byteOffset - if (byteOffset >= buffer.length) { - if (dir) return -1 - else byteOffset = buffer.length - 1 - } else if (byteOffset < 0) { - if (dir) byteOffset = 0 - else return -1 - } - - // Normalize val - if (typeof val === 'string') { - val = Buffer.from(val, encoding) - } - - // Finally, search either indexOf (if dir is true) or lastIndexOf - if (Buffer.isBuffer(val)) { - // Special case: looking for empty string/buffer always fails - if (val.length === 0) { - return -1 - } - return arrayIndexOf(buffer, val, byteOffset, encoding, dir) - } else if (typeof val === 'number') { - val = val & 0xFF // Search for a byte value [0-255] - if (typeof Uint8Array.prototype.indexOf === 'function') { - if (dir) { - return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) - } else { - return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) - } - } - return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) - } - - throw new TypeError('val must be string, number or Buffer') -} - -function arrayIndexOf (arr, val, byteOffset, encoding, dir) { - let indexSize = 1 - let arrLength = arr.length - let valLength = val.length - - if (encoding !== undefined) { - encoding = String(encoding).toLowerCase() - if (encoding === 'ucs2' || encoding === 'ucs-2' || - encoding === 'utf16le' || encoding === 'utf-16le') { - if (arr.length < 2 || val.length < 2) { - return -1 - } - indexSize = 2 - arrLength /= 2 - valLength /= 2 - byteOffset /= 2 - } - } - - function read (buf, i) { - if (indexSize === 1) { - return buf[i] - } else { - return buf.readUInt16BE(i * indexSize) - } - } - - let i - if (dir) { - let foundIndex = -1 - for (i = byteOffset; i < arrLength; i++) { - if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { - if (foundIndex === -1) foundIndex = i - if (i - foundIndex + 1 === valLength) return foundIndex * indexSize - } else { - if (foundIndex !== -1) i -= i - foundIndex - foundIndex = -1 - } - } - } else { - if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength - for (i = byteOffset; i >= 0; i--) { - let found = true - for (let j = 0; j < valLength; j++) { - if (read(arr, i + j) !== read(val, j)) { - found = false - break - } - } - if (found) return i - } - } - - return -1 -} - -Buffer.prototype.includes = function includes (val, byteOffset, encoding) { - return this.indexOf(val, byteOffset, encoding) !== -1 -} - -Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { - return bidirectionalIndexOf(this, val, byteOffset, encoding, true) -} - -Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { - return bidirectionalIndexOf(this, val, byteOffset, encoding, false) -} - -function hexWrite (buf, string, offset, length) { - offset = Number(offset) || 0 - const remaining = buf.length - offset - if (!length) { - length = remaining - } else { - length = Number(length) - if (length > remaining) { - length = remaining - } - } - - const strLen = string.length - - if (length > strLen / 2) { - length = strLen / 2 - } - let i - for (i = 0; i < length; ++i) { - const parsed = parseInt(string.substr(i * 2, 2), 16) - if (numberIsNaN(parsed)) return i - buf[offset + i] = parsed - } - return i -} - -function utf8Write (buf, string, offset, length) { - return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) -} - -function asciiWrite (buf, string, offset, length) { - return blitBuffer(asciiToBytes(string), buf, offset, length) -} - -function base64Write (buf, string, offset, length) { - return blitBuffer(base64ToBytes(string), buf, offset, length) -} - -function ucs2Write (buf, string, offset, length) { - return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) -} - -Buffer.prototype.write = function write (string, offset, length, encoding) { - // Buffer#write(string) - if (offset === undefined) { - encoding = 'utf8' - length = this.length - offset = 0 - // Buffer#write(string, encoding) - } else if (length === undefined && typeof offset === 'string') { - encoding = offset - length = this.length - offset = 0 - // Buffer#write(string, offset[, length][, encoding]) - } else if (isFinite(offset)) { - offset = offset >>> 0 - if (isFinite(length)) { - length = length >>> 0 - if (encoding === undefined) encoding = 'utf8' - } else { - encoding = length - length = undefined - } - } else { - throw new Error( - 'Buffer.write(string, encoding, offset[, length]) is no longer supported' - ) - } - - const remaining = this.length - offset - if (length === undefined || length > remaining) length = remaining - - if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { - throw new RangeError('Attempt to write outside buffer bounds') - } - - if (!encoding) encoding = 'utf8' - - let loweredCase = false - for (;;) { - switch (encoding) { - case 'hex': - return hexWrite(this, string, offset, length) - - case 'utf8': - case 'utf-8': - return utf8Write(this, string, offset, length) - - case 'ascii': - case 'latin1': - case 'binary': - return asciiWrite(this, string, offset, length) - - case 'base64': - // Warning: maxLength not taken into account in base64Write - return base64Write(this, string, offset, length) - - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return ucs2Write(this, string, offset, length) - - default: - if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) - encoding = ('' + encoding).toLowerCase() - loweredCase = true - } - } -} - -Buffer.prototype.toJSON = function toJSON () { - return { - type: 'Buffer', - data: Array.prototype.slice.call(this._arr || this, 0) - } -} - -function base64Slice (buf, start, end) { - if (start === 0 && end === buf.length) { - return base64.fromByteArray(buf) - } else { - return base64.fromByteArray(buf.slice(start, end)) - } -} - -function utf8Slice (buf, start, end) { - end = Math.min(buf.length, end) - const res = [] - - let i = start - while (i < end) { - const firstByte = buf[i] - let codePoint = null - let bytesPerSequence = (firstByte > 0xEF) - ? 4 - : (firstByte > 0xDF) - ? 3 - : (firstByte > 0xBF) - ? 2 - : 1 - - if (i + bytesPerSequence <= end) { - let secondByte, thirdByte, fourthByte, tempCodePoint - - switch (bytesPerSequence) { - case 1: - if (firstByte < 0x80) { - codePoint = firstByte - } - break - case 2: - secondByte = buf[i + 1] - if ((secondByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) - if (tempCodePoint > 0x7F) { - codePoint = tempCodePoint - } - } - break - case 3: - secondByte = buf[i + 1] - thirdByte = buf[i + 2] - if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) - if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { - codePoint = tempCodePoint - } - } - break - case 4: - secondByte = buf[i + 1] - thirdByte = buf[i + 2] - fourthByte = buf[i + 3] - if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { - tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) - if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { - codePoint = tempCodePoint - } - } - } - } - - if (codePoint === null) { - // we did not generate a valid codePoint so insert a - // replacement char (U+FFFD) and advance only 1 byte - codePoint = 0xFFFD - bytesPerSequence = 1 - } else if (codePoint > 0xFFFF) { - // encode to utf16 (surrogate pair dance) - codePoint -= 0x10000 - res.push(codePoint >>> 10 & 0x3FF | 0xD800) - codePoint = 0xDC00 | codePoint & 0x3FF - } - - res.push(codePoint) - i += bytesPerSequence - } - - return decodeCodePointsArray(res) -} - -// Based on http://stackoverflow.com/a/22747272/680742, the browser with -// the lowest limit is Chrome, with 0x10000 args. -// We go 1 magnitude less, for safety -const MAX_ARGUMENTS_LENGTH = 0x1000 - -function decodeCodePointsArray (codePoints) { - const len = codePoints.length - if (len <= MAX_ARGUMENTS_LENGTH) { - return String.fromCharCode.apply(String, codePoints) // avoid extra slice() - } - - // Decode in chunks to avoid "call stack size exceeded". - let res = '' - let i = 0 - while (i < len) { - res += String.fromCharCode.apply( - String, - codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) - ) - } - return res -} - -function asciiSlice (buf, start, end) { - let ret = '' - end = Math.min(buf.length, end) - - for (let i = start; i < end; ++i) { - ret += String.fromCharCode(buf[i] & 0x7F) - } - return ret -} - -function latin1Slice (buf, start, end) { - let ret = '' - end = Math.min(buf.length, end) - - for (let i = start; i < end; ++i) { - ret += String.fromCharCode(buf[i]) - } - return ret -} - -function hexSlice (buf, start, end) { - const len = buf.length - - if (!start || start < 0) start = 0 - if (!end || end < 0 || end > len) end = len - - let out = '' - for (let i = start; i < end; ++i) { - out += hexSliceLookupTable[buf[i]] - } - return out -} - -function utf16leSlice (buf, start, end) { - const bytes = buf.slice(start, end) - let res = '' - // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) - for (let i = 0; i < bytes.length - 1; i += 2) { - res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) - } - return res -} - -Buffer.prototype.slice = function slice (start, end) { - const len = this.length - start = ~~start - end = end === undefined ? len : ~~end - - if (start < 0) { - start += len - if (start < 0) start = 0 - } else if (start > len) { - start = len - } - - if (end < 0) { - end += len - if (end < 0) end = 0 - } else if (end > len) { - end = len - } - - if (end < start) end = start - - const newBuf = this.subarray(start, end) - // Return an augmented `Uint8Array` instance - Object.setPrototypeOf(newBuf, Buffer.prototype) - - return newBuf -} - -/* - * Need to make sure that buffer isn't trying to write out of bounds. - */ -function checkOffset (offset, ext, length) { - if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') - if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') -} - -Buffer.prototype.readUintLE = -Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - let val = this[offset] - let mul = 1 - let i = 0 - while (++i < byteLength && (mul *= 0x100)) { - val += this[offset + i] * mul - } - - return val -} - -Buffer.prototype.readUintBE = -Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - checkOffset(offset, byteLength, this.length) - } - - let val = this[offset + --byteLength] - let mul = 1 - while (byteLength > 0 && (mul *= 0x100)) { - val += this[offset + --byteLength] * mul - } - - return val -} - -Buffer.prototype.readUint8 = -Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 1, this.length) - return this[offset] -} - -Buffer.prototype.readUint16LE = -Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - return this[offset] | (this[offset + 1] << 8) -} - -Buffer.prototype.readUint16BE = -Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - return (this[offset] << 8) | this[offset + 1] -} - -Buffer.prototype.readUint32LE = -Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return ((this[offset]) | - (this[offset + 1] << 8) | - (this[offset + 2] << 16)) + - (this[offset + 3] * 0x1000000) -} - -Buffer.prototype.readUint32BE = -Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset] * 0x1000000) + - ((this[offset + 1] << 16) | - (this[offset + 2] << 8) | - this[offset + 3]) -} - -Buffer.prototype.readBigUInt64LE = defineBigIntMethod(function readBigUInt64LE (offset) { - offset = offset >>> 0 - validateNumber(offset, 'offset') - const first = this[offset] - const last = this[offset + 7] - if (first === undefined || last === undefined) { - boundsError(offset, this.length - 8) - } - - const lo = first + - this[++offset] * 2 ** 8 + - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 24 - - const hi = this[++offset] + - this[++offset] * 2 ** 8 + - this[++offset] * 2 ** 16 + - last * 2 ** 24 - - return BigInt(lo) + (BigInt(hi) << BigInt(32)) -}) - -Buffer.prototype.readBigUInt64BE = defineBigIntMethod(function readBigUInt64BE (offset) { - offset = offset >>> 0 - validateNumber(offset, 'offset') - const first = this[offset] - const last = this[offset + 7] - if (first === undefined || last === undefined) { - boundsError(offset, this.length - 8) - } - - const hi = first * 2 ** 24 + - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 8 + - this[++offset] - - const lo = this[++offset] * 2 ** 24 + - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 8 + - last - - return (BigInt(hi) << BigInt(32)) + BigInt(lo) -}) - -Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - let val = this[offset] - let mul = 1 - let i = 0 - while (++i < byteLength && (mul *= 0x100)) { - val += this[offset + i] * mul - } - mul *= 0x80 - - if (val >= mul) val -= Math.pow(2, 8 * byteLength) - - return val -} - -Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) checkOffset(offset, byteLength, this.length) - - let i = byteLength - let mul = 1 - let val = this[offset + --i] - while (i > 0 && (mul *= 0x100)) { - val += this[offset + --i] * mul - } - mul *= 0x80 - - if (val >= mul) val -= Math.pow(2, 8 * byteLength) - - return val -} - -Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 1, this.length) - if (!(this[offset] & 0x80)) return (this[offset]) - return ((0xff - this[offset] + 1) * -1) -} - -Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - const val = this[offset] | (this[offset + 1] << 8) - return (val & 0x8000) ? val | 0xFFFF0000 : val -} - -Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 2, this.length) - const val = this[offset + 1] | (this[offset] << 8) - return (val & 0x8000) ? val | 0xFFFF0000 : val -} - -Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset]) | - (this[offset + 1] << 8) | - (this[offset + 2] << 16) | - (this[offset + 3] << 24) -} - -Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - - return (this[offset] << 24) | - (this[offset + 1] << 16) | - (this[offset + 2] << 8) | - (this[offset + 3]) -} - -Buffer.prototype.readBigInt64LE = defineBigIntMethod(function readBigInt64LE (offset) { - offset = offset >>> 0 - validateNumber(offset, 'offset') - const first = this[offset] - const last = this[offset + 7] - if (first === undefined || last === undefined) { - boundsError(offset, this.length - 8) - } - - const val = this[offset + 4] + - this[offset + 5] * 2 ** 8 + - this[offset + 6] * 2 ** 16 + - (last << 24) // Overflow - - return (BigInt(val) << BigInt(32)) + - BigInt(first + - this[++offset] * 2 ** 8 + - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 24) -}) - -Buffer.prototype.readBigInt64BE = defineBigIntMethod(function readBigInt64BE (offset) { - offset = offset >>> 0 - validateNumber(offset, 'offset') - const first = this[offset] - const last = this[offset + 7] - if (first === undefined || last === undefined) { - boundsError(offset, this.length - 8) - } - - const val = (first << 24) + // Overflow - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 8 + - this[++offset] - - return (BigInt(val) << BigInt(32)) + - BigInt(this[++offset] * 2 ** 24 + - this[++offset] * 2 ** 16 + - this[++offset] * 2 ** 8 + - last) -}) - -Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - return ieee754.read(this, offset, true, 23, 4) -} - -Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 4, this.length) - return ieee754.read(this, offset, false, 23, 4) -} - -Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 8, this.length) - return ieee754.read(this, offset, true, 52, 8) -} - -Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { - offset = offset >>> 0 - if (!noAssert) checkOffset(offset, 8, this.length) - return ieee754.read(this, offset, false, 52, 8) -} - -function checkInt (buf, value, offset, ext, max, min) { - if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') - if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') - if (offset + ext > buf.length) throw new RangeError('Index out of range') -} - -Buffer.prototype.writeUintLE = -Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - const maxBytes = Math.pow(2, 8 * byteLength) - 1 - checkInt(this, value, offset, byteLength, maxBytes, 0) - } - - let mul = 1 - let i = 0 - this[offset] = value & 0xFF - while (++i < byteLength && (mul *= 0x100)) { - this[offset + i] = (value / mul) & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeUintBE = -Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - byteLength = byteLength >>> 0 - if (!noAssert) { - const maxBytes = Math.pow(2, 8 * byteLength) - 1 - checkInt(this, value, offset, byteLength, maxBytes, 0) - } - - let i = byteLength - 1 - let mul = 1 - this[offset + i] = value & 0xFF - while (--i >= 0 && (mul *= 0x100)) { - this[offset + i] = (value / mul) & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeUint8 = -Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) - this[offset] = (value & 0xff) - return offset + 1 -} - -Buffer.prototype.writeUint16LE = -Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - return offset + 2 -} - -Buffer.prototype.writeUint16BE = -Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) - this[offset] = (value >>> 8) - this[offset + 1] = (value & 0xff) - return offset + 2 -} - -Buffer.prototype.writeUint32LE = -Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) - this[offset + 3] = (value >>> 24) - this[offset + 2] = (value >>> 16) - this[offset + 1] = (value >>> 8) - this[offset] = (value & 0xff) - return offset + 4 -} - -Buffer.prototype.writeUint32BE = -Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) - this[offset] = (value >>> 24) - this[offset + 1] = (value >>> 16) - this[offset + 2] = (value >>> 8) - this[offset + 3] = (value & 0xff) - return offset + 4 -} - -function wrtBigUInt64LE (buf, value, offset, min, max) { - checkIntBI(value, min, max, buf, offset, 7) - - let lo = Number(value & BigInt(0xffffffff)) - buf[offset++] = lo - lo = lo >> 8 - buf[offset++] = lo - lo = lo >> 8 - buf[offset++] = lo - lo = lo >> 8 - buf[offset++] = lo - let hi = Number(value >> BigInt(32) & BigInt(0xffffffff)) - buf[offset++] = hi - hi = hi >> 8 - buf[offset++] = hi - hi = hi >> 8 - buf[offset++] = hi - hi = hi >> 8 - buf[offset++] = hi - return offset -} - -function wrtBigUInt64BE (buf, value, offset, min, max) { - checkIntBI(value, min, max, buf, offset, 7) - - let lo = Number(value & BigInt(0xffffffff)) - buf[offset + 7] = lo - lo = lo >> 8 - buf[offset + 6] = lo - lo = lo >> 8 - buf[offset + 5] = lo - lo = lo >> 8 - buf[offset + 4] = lo - let hi = Number(value >> BigInt(32) & BigInt(0xffffffff)) - buf[offset + 3] = hi - hi = hi >> 8 - buf[offset + 2] = hi - hi = hi >> 8 - buf[offset + 1] = hi - hi = hi >> 8 - buf[offset] = hi - return offset + 8 -} - -Buffer.prototype.writeBigUInt64LE = defineBigIntMethod(function writeBigUInt64LE (value, offset = 0) { - return wrtBigUInt64LE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff')) -}) - -Buffer.prototype.writeBigUInt64BE = defineBigIntMethod(function writeBigUInt64BE (value, offset = 0) { - return wrtBigUInt64BE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff')) -}) - -Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - const limit = Math.pow(2, (8 * byteLength) - 1) - - checkInt(this, value, offset, byteLength, limit - 1, -limit) - } - - let i = 0 - let mul = 1 - let sub = 0 - this[offset] = value & 0xFF - while (++i < byteLength && (mul *= 0x100)) { - if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { - sub = 1 - } - this[offset + i] = ((value / mul) >> 0) - sub & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - const limit = Math.pow(2, (8 * byteLength) - 1) - - checkInt(this, value, offset, byteLength, limit - 1, -limit) - } - - let i = byteLength - 1 - let mul = 1 - let sub = 0 - this[offset + i] = value & 0xFF - while (--i >= 0 && (mul *= 0x100)) { - if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { - sub = 1 - } - this[offset + i] = ((value / mul) >> 0) - sub & 0xFF - } - - return offset + byteLength -} - -Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) - if (value < 0) value = 0xff + value + 1 - this[offset] = (value & 0xff) - return offset + 1 -} - -Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - return offset + 2 -} - -Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) - this[offset] = (value >>> 8) - this[offset + 1] = (value & 0xff) - return offset + 2 -} - -Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) - this[offset] = (value & 0xff) - this[offset + 1] = (value >>> 8) - this[offset + 2] = (value >>> 16) - this[offset + 3] = (value >>> 24) - return offset + 4 -} - -Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) - if (value < 0) value = 0xffffffff + value + 1 - this[offset] = (value >>> 24) - this[offset + 1] = (value >>> 16) - this[offset + 2] = (value >>> 8) - this[offset + 3] = (value & 0xff) - return offset + 4 -} - -Buffer.prototype.writeBigInt64LE = defineBigIntMethod(function writeBigInt64LE (value, offset = 0) { - return wrtBigUInt64LE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff')) -}) - -Buffer.prototype.writeBigInt64BE = defineBigIntMethod(function writeBigInt64BE (value, offset = 0) { - return wrtBigUInt64BE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff')) -}) - -function checkIEEE754 (buf, value, offset, ext, max, min) { - if (offset + ext > buf.length) throw new RangeError('Index out of range') - if (offset < 0) throw new RangeError('Index out of range') -} - -function writeFloat (buf, value, offset, littleEndian, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) - } - ieee754.write(buf, value, offset, littleEndian, 23, 4) - return offset + 4 -} - -Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { - return writeFloat(this, value, offset, true, noAssert) -} - -Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { - return writeFloat(this, value, offset, false, noAssert) -} - -function writeDouble (buf, value, offset, littleEndian, noAssert) { - value = +value - offset = offset >>> 0 - if (!noAssert) { - checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) - } - ieee754.write(buf, value, offset, littleEndian, 52, 8) - return offset + 8 -} - -Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { - return writeDouble(this, value, offset, true, noAssert) -} - -Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { - return writeDouble(this, value, offset, false, noAssert) -} - -// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) -Buffer.prototype.copy = function copy (target, targetStart, start, end) { - if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') - if (!start) start = 0 - if (!end && end !== 0) end = this.length - if (targetStart >= target.length) targetStart = target.length - if (!targetStart) targetStart = 0 - if (end > 0 && end < start) end = start - - // Copy 0 bytes; we're done - if (end === start) return 0 - if (target.length === 0 || this.length === 0) return 0 - - // Fatal error conditions - if (targetStart < 0) { - throw new RangeError('targetStart out of bounds') - } - if (start < 0 || start >= this.length) throw new RangeError('Index out of range') - if (end < 0) throw new RangeError('sourceEnd out of bounds') - - // Are we oob? - if (end > this.length) end = this.length - if (target.length - targetStart < end - start) { - end = target.length - targetStart + start - } - - const len = end - start - - if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { - // Use built-in when available, missing from IE11 - this.copyWithin(targetStart, start, end) - } else { - Uint8Array.prototype.set.call( - target, - this.subarray(start, end), - targetStart - ) - } - - return len -} - -// Usage: -// buffer.fill(number[, offset[, end]]) -// buffer.fill(buffer[, offset[, end]]) -// buffer.fill(string[, offset[, end]][, encoding]) -Buffer.prototype.fill = function fill (val, start, end, encoding) { - // Handle string cases: - if (typeof val === 'string') { - if (typeof start === 'string') { - encoding = start - start = 0 - end = this.length - } else if (typeof end === 'string') { - encoding = end - end = this.length - } - if (encoding !== undefined && typeof encoding !== 'string') { - throw new TypeError('encoding must be a string') - } - if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { - throw new TypeError('Unknown encoding: ' + encoding) - } - if (val.length === 1) { - const code = val.charCodeAt(0) - if ((encoding === 'utf8' && code < 128) || - encoding === 'latin1') { - // Fast path: If `val` fits into a single byte, use that numeric value. - val = code - } - } - } else if (typeof val === 'number') { - val = val & 255 - } else if (typeof val === 'boolean') { - val = Number(val) - } - - // Invalid ranges are not set to a default, so can range check early. - if (start < 0 || this.length < start || this.length < end) { - throw new RangeError('Out of range index') - } - - if (end <= start) { - return this - } - - start = start >>> 0 - end = end === undefined ? this.length : end >>> 0 - - if (!val) val = 0 - - let i - if (typeof val === 'number') { - for (i = start; i < end; ++i) { - this[i] = val - } - } else { - const bytes = Buffer.isBuffer(val) - ? val - : Buffer.from(val, encoding) - const len = bytes.length - if (len === 0) { - throw new TypeError('The value "' + val + - '" is invalid for argument "value"') - } - for (i = 0; i < end - start; ++i) { - this[i + start] = bytes[i % len] - } - } - - return this -} - -// CUSTOM ERRORS -// ============= - -// Simplified versions from Node, changed for Buffer-only usage -const errors = {} -function E (sym, getMessage, Base) { - errors[sym] = class NodeError extends Base { - constructor () { - super() - - Object.defineProperty(this, 'message', { - value: getMessage.apply(this, arguments), - writable: true, - configurable: true - }) - - // Add the error code to the name to include it in the stack trace. - this.name = `${this.name} [${sym}]` - // Access the stack to generate the error message including the error code - // from the name. - this.stack // eslint-disable-line no-unused-expressions - // Reset the name to the actual name. - delete this.name - } - - get code () { - return sym - } - - set code (value) { - Object.defineProperty(this, 'code', { - configurable: true, - enumerable: true, - value, - writable: true - }) - } - - toString () { - return `${this.name} [${sym}]: ${this.message}` - } - } -} - -E('ERR_BUFFER_OUT_OF_BOUNDS', - function (name) { - if (name) { - return `${name} is outside of buffer bounds` - } - - return 'Attempt to access memory outside buffer bounds' - }, RangeError) -E('ERR_INVALID_ARG_TYPE', - function (name, actual) { - return `The "${name}" argument must be of type number. Received type ${typeof actual}` - }, TypeError) -E('ERR_OUT_OF_RANGE', - function (str, range, input) { - let msg = `The value of "${str}" is out of range.` - let received = input - if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { - received = addNumericalSeparator(String(input)) - } else if (typeof input === 'bigint') { - received = String(input) - if (input > BigInt(2) ** BigInt(32) || input < -(BigInt(2) ** BigInt(32))) { - received = addNumericalSeparator(received) - } - received += 'n' - } - msg += ` It must be ${range}. Received ${received}` - return msg - }, RangeError) - -function addNumericalSeparator (val) { - let res = '' - let i = val.length - const start = val[0] === '-' ? 1 : 0 - for (; i >= start + 4; i -= 3) { - res = `_${val.slice(i - 3, i)}${res}` - } - return `${val.slice(0, i)}${res}` -} - -// CHECK FUNCTIONS -// =============== - -function checkBounds (buf, offset, byteLength) { - validateNumber(offset, 'offset') - if (buf[offset] === undefined || buf[offset + byteLength] === undefined) { - boundsError(offset, buf.length - (byteLength + 1)) - } -} - -function checkIntBI (value, min, max, buf, offset, byteLength) { - if (value > max || value < min) { - const n = typeof min === 'bigint' ? 'n' : '' - let range - if (byteLength > 3) { - if (min === 0 || min === BigInt(0)) { - range = `>= 0${n} and < 2${n} ** ${(byteLength + 1) * 8}${n}` - } else { - range = `>= -(2${n} ** ${(byteLength + 1) * 8 - 1}${n}) and < 2 ** ` + - `${(byteLength + 1) * 8 - 1}${n}` - } - } else { - range = `>= ${min}${n} and <= ${max}${n}` - } - throw new errors.ERR_OUT_OF_RANGE('value', range, value) - } - checkBounds(buf, offset, byteLength) -} - -function validateNumber (value, name) { - if (typeof value !== 'number') { - throw new errors.ERR_INVALID_ARG_TYPE(name, 'number', value) - } -} - -function boundsError (value, length, type) { - if (Math.floor(value) !== value) { - validateNumber(value, type) - throw new errors.ERR_OUT_OF_RANGE(type || 'offset', 'an integer', value) - } - - if (length < 0) { - throw new errors.ERR_BUFFER_OUT_OF_BOUNDS() - } - - throw new errors.ERR_OUT_OF_RANGE(type || 'offset', - `>= ${type ? 1 : 0} and <= ${length}`, - value) -} - -// HELPER FUNCTIONS -// ================ - -const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g - -function base64clean (str) { - // Node takes equal signs as end of the Base64 encoding - str = str.split('=')[0] - // Node strips out invalid characters like \n and \t from the string, base64-js does not - str = str.trim().replace(INVALID_BASE64_RE, '') - // Node converts strings with length < 2 to '' - if (str.length < 2) return '' - // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not - while (str.length % 4 !== 0) { - str = str + '=' - } - return str -} - -function utf8ToBytes (string, units) { - units = units || Infinity - let codePoint - const length = string.length - let leadSurrogate = null - const bytes = [] - - for (let i = 0; i < length; ++i) { - codePoint = string.charCodeAt(i) - - // is surrogate component - if (codePoint > 0xD7FF && codePoint < 0xE000) { - // last char was a lead - if (!leadSurrogate) { - // no lead yet - if (codePoint > 0xDBFF) { - // unexpected trail - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - continue - } else if (i + 1 === length) { - // unpaired lead - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - continue - } - - // valid lead - leadSurrogate = codePoint - - continue - } - - // 2 leads in a row - if (codePoint < 0xDC00) { - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - leadSurrogate = codePoint - continue - } - - // valid surrogate pair - codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 - } else if (leadSurrogate) { - // valid bmp char, but last char was a lead - if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) - } - - leadSurrogate = null - - // encode utf8 - if (codePoint < 0x80) { - if ((units -= 1) < 0) break - bytes.push(codePoint) - } else if (codePoint < 0x800) { - if ((units -= 2) < 0) break - bytes.push( - codePoint >> 0x6 | 0xC0, - codePoint & 0x3F | 0x80 - ) - } else if (codePoint < 0x10000) { - if ((units -= 3) < 0) break - bytes.push( - codePoint >> 0xC | 0xE0, - codePoint >> 0x6 & 0x3F | 0x80, - codePoint & 0x3F | 0x80 - ) - } else if (codePoint < 0x110000) { - if ((units -= 4) < 0) break - bytes.push( - codePoint >> 0x12 | 0xF0, - codePoint >> 0xC & 0x3F | 0x80, - codePoint >> 0x6 & 0x3F | 0x80, - codePoint & 0x3F | 0x80 - ) - } else { - throw new Error('Invalid code point') - } - } - - return bytes -} - -function asciiToBytes (str) { - const byteArray = [] - for (let i = 0; i < str.length; ++i) { - // Node's code seems to be doing this and not & 0x7F.. - byteArray.push(str.charCodeAt(i) & 0xFF) - } - return byteArray -} - -function utf16leToBytes (str, units) { - let c, hi, lo - const byteArray = [] - for (let i = 0; i < str.length; ++i) { - if ((units -= 2) < 0) break - - c = str.charCodeAt(i) - hi = c >> 8 - lo = c % 256 - byteArray.push(lo) - byteArray.push(hi) - } - - return byteArray -} - -function base64ToBytes (str) { - return base64.toByteArray(base64clean(str)) -} - -function blitBuffer (src, dst, offset, length) { - let i - for (i = 0; i < length; ++i) { - if ((i + offset >= dst.length) || (i >= src.length)) break - dst[i + offset] = src[i] - } - return i -} - -// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass -// the `instanceof` check but they should be treated as of that type. -// See: https://github.com/feross/buffer/issues/166 -function isInstance (obj, type) { - return obj instanceof type || - (obj != null && obj.constructor != null && obj.constructor.name != null && - obj.constructor.name === type.name) -} -function numberIsNaN (obj) { - // For IE11 support - return obj !== obj // eslint-disable-line no-self-compare -} - -// Create lookup table for `toString('hex')` -// See: https://github.com/feross/buffer/issues/219 -const hexSliceLookupTable = (function () { - const alphabet = '0123456789abcdef' - const table = new Array(256) - for (let i = 0; i < 16; ++i) { - const i16 = i * 16 - for (let j = 0; j < 16; ++j) { - table[i16 + j] = alphabet[i] + alphabet[j] - } - } - return table -})() - -// Return not function with Error if BigInt not supported -function defineBigIntMethod (fn) { - return typeof BigInt === 'undefined' ? BufferBigIntNotDefined : fn -} - -function BufferBigIntNotDefined () { - throw new Error('BigInt not supported') -} diff --git a/deps/npm/node_modules/buffer/package.json b/deps/npm/node_modules/buffer/package.json deleted file mode 100644 index ca1ad9a7078842..00000000000000 --- a/deps/npm/node_modules/buffer/package.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "name": "buffer", - "description": "Node.js Buffer API, for the browser", - "version": "6.0.3", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/buffer/issues" - }, - "contributors": [ - "Romain Beauxis ", - "James Halliday " - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - }, - "devDependencies": { - "airtap": "^3.0.0", - "benchmark": "^2.1.4", - "browserify": "^17.0.0", - "concat-stream": "^2.0.0", - "hyperquest": "^2.1.3", - "is-buffer": "^2.0.5", - "is-nan": "^1.3.0", - "split": "^1.0.1", - "standard": "*", - "tape": "^5.0.1", - "through2": "^4.0.2", - "uglify-js": "^3.11.5" - }, - "homepage": "https://github.com/feross/buffer", - "jspm": { - "map": { - "./index.js": { - "node": "@node/buffer" - } - } - }, - "keywords": [ - "arraybuffer", - "browser", - "browserify", - "buffer", - "compatible", - "dataview", - "uint8array" - ], - "license": "MIT", - "main": "index.js", - "types": "index.d.ts", - "repository": { - "type": "git", - "url": "git://github.com/feross/buffer.git" - }, - "scripts": { - "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", - "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", - "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", - "test": "standard && node ./bin/test.js", - "test-browser-old": "airtap -- test/*.js", - "test-browser-old-local": "airtap --local -- test/*.js", - "test-browser-new": "airtap -- test/*.js test/node/*.js", - "test-browser-new-local": "airtap --local -- test/*.js test/node/*.js", - "test-node": "tape test/*.js test/node/*.js", - "update-authors": "./bin/update-authors.sh" - }, - "standard": { - "ignore": [ - "test/node/**/*.js", - "test/common.js", - "test/_polyfill.js", - "perf/**/*.js" - ] - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/deps/npm/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/cacache/lib/content/read.js index f41b539df65dce..a1fa8a08cc0f93 100644 --- a/deps/npm/node_modules/cacache/lib/content/read.js +++ b/deps/npm/node_modules/cacache/lib/content/read.js @@ -13,18 +13,20 @@ async function read (cache, integrity, opts = {}) { const { size } = opts const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { // get size - const stat = await fs.stat(cpath) + const stat = size ? { size } : await fs.stat(cpath) return { stat, cpath, sri } }) - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } if (stat.size > MAX_SINGLE_READ_SIZE) { return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() } const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + if (!ssri.checkData(data, sri)) { throw integrityError(sri, cpath) } @@ -55,13 +57,10 @@ function readStream (cache, integrity, opts = {}) { // Set all this up to run on the stream and then just return the stream Promise.resolve().then(async () => { const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // just stat to ensure it exists - const stat = await fs.stat(cpath) + // get size + const stat = size ? { size } : await fs.stat(cpath) return { stat, cpath, sri } }) - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } return readPipeline(cpath, stat.size, sri, stream) }).catch(err => stream.emit('error', err)) diff --git a/deps/npm/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/cacache/lib/content/write.js index 71461465812878..09ca4e4e5a4d3f 100644 --- a/deps/npm/node_modules/cacache/lib/content/write.js +++ b/deps/npm/node_modules/cacache/lib/content/write.js @@ -67,6 +67,7 @@ class CacacheWriteStream extends Flush { this.cache, this.opts ) + this.handleContentP.catch(error => this.emit('error', error)) } return this.inputStream.write(chunk, encoding, cb) } diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index 1b14bf4bd14904..3f87af3e7dbcee 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "18.0.0", + "version": "18.0.2", "cache-version": { "content": "2", "index": "5" @@ -16,7 +16,7 @@ "snap": "tap", "coverage": "tap", "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "npmclilint": "npmcli-lint", "lintfix": "npm run lint -- --fix", "postsnap": "npm run lintfix --", @@ -50,7 +50,7 @@ "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", - "minipass-collect": "^1.0.2", + "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^4.0.0", @@ -60,7 +60,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.21.3", "tap": "^16.0.0" }, "engines": { @@ -69,14 +69,8 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.18.0", - "publish": "true", - "ciVersions": [ - "16.14.0", - "16.x", - "18.0.0", - "18.x" - ] + "version": "4.21.3", + "publish": "true" }, "author": "GitHub Inc.", "tap": { diff --git a/deps/npm/node_modules/delegates/History.md b/deps/npm/node_modules/delegates/History.md deleted file mode 100644 index 25959eab67b840..00000000000000 --- a/deps/npm/node_modules/delegates/History.md +++ /dev/null @@ -1,22 +0,0 @@ - -1.0.0 / 2015-12-14 -================== - - * Merge pull request #12 from kasicka/master - * Add license text - -0.1.0 / 2014-10-17 -================== - - * adds `.fluent()` to api - -0.0.3 / 2014-01-13 -================== - - * fix receiver for .method() - -0.0.2 / 2014-01-13 -================== - - * Object.defineProperty() sucks - * Initial commit diff --git a/deps/npm/node_modules/delegates/License b/deps/npm/node_modules/delegates/License deleted file mode 100644 index 60de60addbe7e9..00000000000000 --- a/deps/npm/node_modules/delegates/License +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2015 TJ Holowaychuk - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/delegates/Makefile b/deps/npm/node_modules/delegates/Makefile deleted file mode 100644 index a9dcfd50dbdb22..00000000000000 --- a/deps/npm/node_modules/delegates/Makefile +++ /dev/null @@ -1,8 +0,0 @@ - -test: - @./node_modules/.bin/mocha \ - --require should \ - --reporter spec \ - --bail - -.PHONY: test \ No newline at end of file diff --git a/deps/npm/node_modules/delegates/index.js b/deps/npm/node_modules/delegates/index.js deleted file mode 100644 index 17c222d52935c6..00000000000000 --- a/deps/npm/node_modules/delegates/index.js +++ /dev/null @@ -1,121 +0,0 @@ - -/** - * Expose `Delegator`. - */ - -module.exports = Delegator; - -/** - * Initialize a delegator. - * - * @param {Object} proto - * @param {String} target - * @api public - */ - -function Delegator(proto, target) { - if (!(this instanceof Delegator)) return new Delegator(proto, target); - this.proto = proto; - this.target = target; - this.methods = []; - this.getters = []; - this.setters = []; - this.fluents = []; -} - -/** - * Delegate method `name`. - * - * @param {String} name - * @return {Delegator} self - * @api public - */ - -Delegator.prototype.method = function(name){ - var proto = this.proto; - var target = this.target; - this.methods.push(name); - - proto[name] = function(){ - return this[target][name].apply(this[target], arguments); - }; - - return this; -}; - -/** - * Delegator accessor `name`. - * - * @param {String} name - * @return {Delegator} self - * @api public - */ - -Delegator.prototype.access = function(name){ - return this.getter(name).setter(name); -}; - -/** - * Delegator getter `name`. - * - * @param {String} name - * @return {Delegator} self - * @api public - */ - -Delegator.prototype.getter = function(name){ - var proto = this.proto; - var target = this.target; - this.getters.push(name); - - proto.__defineGetter__(name, function(){ - return this[target][name]; - }); - - return this; -}; - -/** - * Delegator setter `name`. - * - * @param {String} name - * @return {Delegator} self - * @api public - */ - -Delegator.prototype.setter = function(name){ - var proto = this.proto; - var target = this.target; - this.setters.push(name); - - proto.__defineSetter__(name, function(val){ - return this[target][name] = val; - }); - - return this; -}; - -/** - * Delegator fluent accessor - * - * @param {String} name - * @return {Delegator} self - * @api public - */ - -Delegator.prototype.fluent = function (name) { - var proto = this.proto; - var target = this.target; - this.fluents.push(name); - - proto[name] = function(val){ - if ('undefined' != typeof val) { - this[target][name] = val; - return this; - } else { - return this[target][name]; - } - }; - - return this; -}; diff --git a/deps/npm/node_modules/delegates/package.json b/deps/npm/node_modules/delegates/package.json deleted file mode 100644 index 17240384fd43b4..00000000000000 --- a/deps/npm/node_modules/delegates/package.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "delegates", - "version": "1.0.0", - "repository": "visionmedia/node-delegates", - "description": "delegate methods and accessors to another property", - "keywords": ["delegate", "delegation"], - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/delegates/test/index.js b/deps/npm/node_modules/delegates/test/index.js deleted file mode 100644 index 7b6e3d4df19d90..00000000000000 --- a/deps/npm/node_modules/delegates/test/index.js +++ /dev/null @@ -1,94 +0,0 @@ - -var assert = require('assert'); -var delegate = require('..'); - -describe('.method(name)', function(){ - it('should delegate methods', function(){ - var obj = {}; - - obj.request = { - foo: function(bar){ - assert(this == obj.request); - return bar; - } - }; - - delegate(obj, 'request').method('foo'); - - obj.foo('something').should.equal('something'); - }) -}) - -describe('.getter(name)', function(){ - it('should delegate getters', function(){ - var obj = {}; - - obj.request = { - get type() { - return 'text/html'; - } - } - - delegate(obj, 'request').getter('type'); - - obj.type.should.equal('text/html'); - }) -}) - -describe('.setter(name)', function(){ - it('should delegate setters', function(){ - var obj = {}; - - obj.request = { - get type() { - return this._type.toUpperCase(); - }, - - set type(val) { - this._type = val; - } - } - - delegate(obj, 'request').setter('type'); - - obj.type = 'hey'; - obj.request.type.should.equal('HEY'); - }) -}) - -describe('.access(name)', function(){ - it('should delegate getters and setters', function(){ - var obj = {}; - - obj.request = { - get type() { - return this._type.toUpperCase(); - }, - - set type(val) { - this._type = val; - } - } - - delegate(obj, 'request').access('type'); - - obj.type = 'hey'; - obj.type.should.equal('HEY'); - }) -}) - -describe('.fluent(name)', function () { - it('should delegate in a fluent fashion', function () { - var obj = { - settings: { - env: 'development' - } - }; - - delegate(obj, 'settings').fluent('env'); - - obj.env().should.equal('development'); - obj.env('production').should.equal(obj); - obj.settings.env.should.equal('production'); - }) -}) diff --git a/deps/npm/node_modules/event-target-shim/LICENSE b/deps/npm/node_modules/event-target-shim/LICENSE deleted file mode 100644 index b71bf4e29d62c5..00000000000000 --- a/deps/npm/node_modules/event-target-shim/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Toru Nagashima - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js b/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js deleted file mode 100644 index 53ce22036e35ef..00000000000000 --- a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.js +++ /dev/null @@ -1,871 +0,0 @@ -/** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. - */ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -/** - * @typedef {object} PrivateData - * @property {EventTarget} eventTarget The event target. - * @property {{type:string}} event The original event object. - * @property {number} eventPhase The current event phase. - * @property {EventTarget|null} currentTarget The current event target. - * @property {boolean} canceled The flag to prevent default. - * @property {boolean} stopped The flag to stop propagation. - * @property {boolean} immediateStopped The flag to stop propagation immediately. - * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. - * @property {number} timeStamp The unix time. - * @private - */ - -/** - * Private data for event wrappers. - * @type {WeakMap} - * @private - */ -const privateData = new WeakMap(); - -/** - * Cache for wrapper classes. - * @type {WeakMap} - * @private - */ -const wrappers = new WeakMap(); - -/** - * Get private data. - * @param {Event} event The event object to get private data. - * @returns {PrivateData} The private data of the event. - * @private - */ -function pd(event) { - const retv = privateData.get(event); - console.assert( - retv != null, - "'this' is expected an Event object, but got", - event - ); - return retv -} - -/** - * https://dom.spec.whatwg.org/#set-the-canceled-flag - * @param data {PrivateData} private data. - */ -function setCancelFlag(data) { - if (data.passiveListener != null) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error( - "Unable to preventDefault inside passive event listener invocation.", - data.passiveListener - ); - } - return - } - if (!data.event.cancelable) { - return - } - - data.canceled = true; - if (typeof data.event.preventDefault === "function") { - data.event.preventDefault(); - } -} - -/** - * @see https://dom.spec.whatwg.org/#interface-event - * @private - */ -/** - * The event wrapper. - * @constructor - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Event|{type:string}} event The original event to wrap. - */ -function Event(eventTarget, event) { - privateData.set(this, { - eventTarget, - event, - eventPhase: 2, - currentTarget: eventTarget, - canceled: false, - stopped: false, - immediateStopped: false, - passiveListener: null, - timeStamp: event.timeStamp || Date.now(), - }); - - // https://heycam.github.io/webidl/#Unforgeable - Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); - - // Define accessors - const keys = Object.keys(event); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in this)) { - Object.defineProperty(this, key, defineRedirectDescriptor(key)); - } - } -} - -// Should be enumerable, but class methods are not enumerable. -Event.prototype = { - /** - * The type of this event. - * @type {string} - */ - get type() { - return pd(this).event.type - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get target() { - return pd(this).eventTarget - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get currentTarget() { - return pd(this).currentTarget - }, - - /** - * @returns {EventTarget[]} The composed path of this event. - */ - composedPath() { - const currentTarget = pd(this).currentTarget; - if (currentTarget == null) { - return [] - } - return [currentTarget] - }, - - /** - * Constant of NONE. - * @type {number} - */ - get NONE() { - return 0 - }, - - /** - * Constant of CAPTURING_PHASE. - * @type {number} - */ - get CAPTURING_PHASE() { - return 1 - }, - - /** - * Constant of AT_TARGET. - * @type {number} - */ - get AT_TARGET() { - return 2 - }, - - /** - * Constant of BUBBLING_PHASE. - * @type {number} - */ - get BUBBLING_PHASE() { - return 3 - }, - - /** - * The target of this event. - * @type {number} - */ - get eventPhase() { - return pd(this).eventPhase - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopPropagation() { - const data = pd(this); - - data.stopped = true; - if (typeof data.event.stopPropagation === "function") { - data.event.stopPropagation(); - } - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopImmediatePropagation() { - const data = pd(this); - - data.stopped = true; - data.immediateStopped = true; - if (typeof data.event.stopImmediatePropagation === "function") { - data.event.stopImmediatePropagation(); - } - }, - - /** - * The flag to be bubbling. - * @type {boolean} - */ - get bubbles() { - return Boolean(pd(this).event.bubbles) - }, - - /** - * The flag to be cancelable. - * @type {boolean} - */ - get cancelable() { - return Boolean(pd(this).event.cancelable) - }, - - /** - * Cancel this event. - * @returns {void} - */ - preventDefault() { - setCancelFlag(pd(this)); - }, - - /** - * The flag to indicate cancellation state. - * @type {boolean} - */ - get defaultPrevented() { - return pd(this).canceled - }, - - /** - * The flag to be composed. - * @type {boolean} - */ - get composed() { - return Boolean(pd(this).event.composed) - }, - - /** - * The unix time of this event. - * @type {number} - */ - get timeStamp() { - return pd(this).timeStamp - }, - - /** - * The target of this event. - * @type {EventTarget} - * @deprecated - */ - get srcElement() { - return pd(this).eventTarget - }, - - /** - * The flag to stop event bubbling. - * @type {boolean} - * @deprecated - */ - get cancelBubble() { - return pd(this).stopped - }, - set cancelBubble(value) { - if (!value) { - return - } - const data = pd(this); - - data.stopped = true; - if (typeof data.event.cancelBubble === "boolean") { - data.event.cancelBubble = true; - } - }, - - /** - * The flag to indicate cancellation state. - * @type {boolean} - * @deprecated - */ - get returnValue() { - return !pd(this).canceled - }, - set returnValue(value) { - if (!value) { - setCancelFlag(pd(this)); - } - }, - - /** - * Initialize this event object. But do nothing under event dispatching. - * @param {string} type The event type. - * @param {boolean} [bubbles=false] The flag to be possible to bubble up. - * @param {boolean} [cancelable=false] The flag to be possible to cancel. - * @deprecated - */ - initEvent() { - // Do nothing. - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(Event.prototype, "constructor", { - value: Event, - configurable: true, - writable: true, -}); - -// Ensure `event instanceof window.Event` is `true`. -if (typeof window !== "undefined" && typeof window.Event !== "undefined") { - Object.setPrototypeOf(Event.prototype, window.Event.prototype); - - // Make association for wrappers. - wrappers.set(window.Event.prototype, Event); -} - -/** - * Get the property descriptor to redirect a given property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to redirect the property. - * @private - */ -function defineRedirectDescriptor(key) { - return { - get() { - return pd(this).event[key] - }, - set(value) { - pd(this).event[key] = value; - }, - configurable: true, - enumerable: true, - } -} - -/** - * Get the property descriptor to call a given method property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to call the method property. - * @private - */ -function defineCallDescriptor(key) { - return { - value() { - const event = pd(this).event; - return event[key].apply(event, arguments) - }, - configurable: true, - enumerable: true, - } -} - -/** - * Define new wrapper class. - * @param {Function} BaseEvent The base wrapper class. - * @param {Object} proto The prototype of the original event. - * @returns {Function} The defined wrapper class. - * @private - */ -function defineWrapper(BaseEvent, proto) { - const keys = Object.keys(proto); - if (keys.length === 0) { - return BaseEvent - } - - /** CustomEvent */ - function CustomEvent(eventTarget, event) { - BaseEvent.call(this, eventTarget, event); - } - - CustomEvent.prototype = Object.create(BaseEvent.prototype, { - constructor: { value: CustomEvent, configurable: true, writable: true }, - }); - - // Define accessors. - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in BaseEvent.prototype)) { - const descriptor = Object.getOwnPropertyDescriptor(proto, key); - const isFunc = typeof descriptor.value === "function"; - Object.defineProperty( - CustomEvent.prototype, - key, - isFunc - ? defineCallDescriptor(key) - : defineRedirectDescriptor(key) - ); - } - } - - return CustomEvent -} - -/** - * Get the wrapper class of a given prototype. - * @param {Object} proto The prototype of the original event to get its wrapper. - * @returns {Function} The wrapper class. - * @private - */ -function getWrapper(proto) { - if (proto == null || proto === Object.prototype) { - return Event - } - - let wrapper = wrappers.get(proto); - if (wrapper == null) { - wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); - wrappers.set(proto, wrapper); - } - return wrapper -} - -/** - * Wrap a given event to management a dispatching. - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Object} event The event to wrap. - * @returns {Event} The wrapper instance. - * @private - */ -function wrapEvent(eventTarget, event) { - const Wrapper = getWrapper(Object.getPrototypeOf(event)); - return new Wrapper(eventTarget, event) -} - -/** - * Get the immediateStopped flag of a given event. - * @param {Event} event The event to get. - * @returns {boolean} The flag to stop propagation immediately. - * @private - */ -function isStopped(event) { - return pd(event).immediateStopped -} - -/** - * Set the current event phase of a given event. - * @param {Event} event The event to set current target. - * @param {number} eventPhase New event phase. - * @returns {void} - * @private - */ -function setEventPhase(event, eventPhase) { - pd(event).eventPhase = eventPhase; -} - -/** - * Set the current target of a given event. - * @param {Event} event The event to set current target. - * @param {EventTarget|null} currentTarget New current target. - * @returns {void} - * @private - */ -function setCurrentTarget(event, currentTarget) { - pd(event).currentTarget = currentTarget; -} - -/** - * Set a passive listener of a given event. - * @param {Event} event The event to set current target. - * @param {Function|null} passiveListener New passive listener. - * @returns {void} - * @private - */ -function setPassiveListener(event, passiveListener) { - pd(event).passiveListener = passiveListener; -} - -/** - * @typedef {object} ListenerNode - * @property {Function} listener - * @property {1|2|3} listenerType - * @property {boolean} passive - * @property {boolean} once - * @property {ListenerNode|null} next - * @private - */ - -/** - * @type {WeakMap>} - * @private - */ -const listenersMap = new WeakMap(); - -// Listener types -const CAPTURE = 1; -const BUBBLE = 2; -const ATTRIBUTE = 3; - -/** - * Check whether a given value is an object or not. - * @param {any} x The value to check. - * @returns {boolean} `true` if the value is an object. - */ -function isObject(x) { - return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax -} - -/** - * Get listeners. - * @param {EventTarget} eventTarget The event target to get. - * @returns {Map} The listeners. - * @private - */ -function getListeners(eventTarget) { - const listeners = listenersMap.get(eventTarget); - if (listeners == null) { - throw new TypeError( - "'this' is expected an EventTarget object, but got another value." - ) - } - return listeners -} - -/** - * Get the property descriptor for the event attribute of a given event. - * @param {string} eventName The event name to get property descriptor. - * @returns {PropertyDescriptor} The property descriptor. - * @private - */ -function defineEventAttributeDescriptor(eventName) { - return { - get() { - const listeners = getListeners(this); - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - return node.listener - } - node = node.next; - } - return null - }, - - set(listener) { - if (typeof listener !== "function" && !isObject(listener)) { - listener = null; // eslint-disable-line no-param-reassign - } - const listeners = getListeners(this); - - // Traverse to the tail while removing old value. - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - // Remove old value. - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - node = node.next; - } - - // Add new value. - if (listener !== null) { - const newNode = { - listener, - listenerType: ATTRIBUTE, - passive: false, - once: false, - next: null, - }; - if (prev === null) { - listeners.set(eventName, newNode); - } else { - prev.next = newNode; - } - } - }, - configurable: true, - enumerable: true, - } -} - -/** - * Define an event attribute (e.g. `eventTarget.onclick`). - * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. - * @param {string} eventName The event name to define. - * @returns {void} - */ -function defineEventAttribute(eventTargetPrototype, eventName) { - Object.defineProperty( - eventTargetPrototype, - `on${eventName}`, - defineEventAttributeDescriptor(eventName) - ); -} - -/** - * Define a custom EventTarget with event attributes. - * @param {string[]} eventNames Event names for event attributes. - * @returns {EventTarget} The custom EventTarget. - * @private - */ -function defineCustomEventTarget(eventNames) { - /** CustomEventTarget */ - function CustomEventTarget() { - EventTarget.call(this); - } - - CustomEventTarget.prototype = Object.create(EventTarget.prototype, { - constructor: { - value: CustomEventTarget, - configurable: true, - writable: true, - }, - }); - - for (let i = 0; i < eventNames.length; ++i) { - defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); - } - - return CustomEventTarget -} - -/** - * EventTarget. - * - * - This is constructor if no arguments. - * - This is a function which returns a CustomEventTarget constructor if there are arguments. - * - * For example: - * - * class A extends EventTarget {} - * class B extends EventTarget("message") {} - * class C extends EventTarget("message", "error") {} - * class D extends EventTarget(["message", "error"]) {} - */ -function EventTarget() { - /*eslint-disable consistent-return */ - if (this instanceof EventTarget) { - listenersMap.set(this, new Map()); - return - } - if (arguments.length === 1 && Array.isArray(arguments[0])) { - return defineCustomEventTarget(arguments[0]) - } - if (arguments.length > 0) { - const types = new Array(arguments.length); - for (let i = 0; i < arguments.length; ++i) { - types[i] = arguments[i]; - } - return defineCustomEventTarget(types) - } - throw new TypeError("Cannot call a class as a function") - /*eslint-enable consistent-return */ -} - -// Should be enumerable, but class methods are not enumerable. -EventTarget.prototype = { - /** - * Add a given listener to this event target. - * @param {string} eventName The event name to add. - * @param {Function} listener The listener to add. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} - */ - addEventListener(eventName, listener, options) { - if (listener == null) { - return - } - if (typeof listener !== "function" && !isObject(listener)) { - throw new TypeError("'listener' should be a function or an object.") - } - - const listeners = getListeners(this); - const optionsIsObj = isObject(options); - const capture = optionsIsObj - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - const newNode = { - listener, - listenerType, - passive: optionsIsObj && Boolean(options.passive), - once: optionsIsObj && Boolean(options.once), - next: null, - }; - - // Set it as the first node if the first node is null. - let node = listeners.get(eventName); - if (node === undefined) { - listeners.set(eventName, newNode); - return - } - - // Traverse to the tail while checking duplication.. - let prev = null; - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - // Should ignore duplication. - return - } - prev = node; - node = node.next; - } - - // Add it. - prev.next = newNode; - }, - - /** - * Remove a given listener from this event target. - * @param {string} eventName The event name to remove. - * @param {Function} listener The listener to remove. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} - */ - removeEventListener(eventName, listener, options) { - if (listener == null) { - return - } - - const listeners = getListeners(this); - const capture = isObject(options) - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - return - } - - prev = node; - node = node.next; - } - }, - - /** - * Dispatch a given event. - * @param {Event|{type:string}} event The event to dispatch. - * @returns {boolean} `false` if canceled. - */ - dispatchEvent(event) { - if (event == null || typeof event.type !== "string") { - throw new TypeError('"event.type" should be a string.') - } - - // If listeners aren't registered, terminate. - const listeners = getListeners(this); - const eventName = event.type; - let node = listeners.get(eventName); - if (node == null) { - return true - } - - // Since we cannot rewrite several properties, so wrap object. - const wrappedEvent = wrapEvent(this, event); - - // This doesn't process capturing phase and bubbling phase. - // This isn't participating in a tree. - let prev = null; - while (node != null) { - // Remove this listener if it's once - if (node.once) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - // Call this listener - setPassiveListener( - wrappedEvent, - node.passive ? node.listener : null - ); - if (typeof node.listener === "function") { - try { - node.listener.call(this, wrappedEvent); - } catch (err) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error(err); - } - } - } else if ( - node.listenerType !== ATTRIBUTE && - typeof node.listener.handleEvent === "function" - ) { - node.listener.handleEvent(wrappedEvent); - } - - // Break if `event.stopImmediatePropagation` was called. - if (isStopped(wrappedEvent)) { - break - } - - node = node.next; - } - setPassiveListener(wrappedEvent, null); - setEventPhase(wrappedEvent, 0); - setCurrentTarget(wrappedEvent, null); - - return !wrappedEvent.defaultPrevented - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(EventTarget.prototype, "constructor", { - value: EventTarget, - configurable: true, - writable: true, -}); - -// Ensure `eventTarget instanceof window.EventTarget` is `true`. -if ( - typeof window !== "undefined" && - typeof window.EventTarget !== "undefined" -) { - Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); -} - -exports.defineEventAttribute = defineEventAttribute; -exports.EventTarget = EventTarget; -exports.default = EventTarget; - -module.exports = EventTarget -module.exports.EventTarget = module.exports["default"] = EventTarget -module.exports.defineEventAttribute = defineEventAttribute -//# sourceMappingURL=event-target-shim.js.map diff --git a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.mjs b/deps/npm/node_modules/event-target-shim/dist/event-target-shim.mjs deleted file mode 100644 index 114f3a1711059d..00000000000000 --- a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.mjs +++ /dev/null @@ -1,862 +0,0 @@ -/** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. - */ -/** - * @typedef {object} PrivateData - * @property {EventTarget} eventTarget The event target. - * @property {{type:string}} event The original event object. - * @property {number} eventPhase The current event phase. - * @property {EventTarget|null} currentTarget The current event target. - * @property {boolean} canceled The flag to prevent default. - * @property {boolean} stopped The flag to stop propagation. - * @property {boolean} immediateStopped The flag to stop propagation immediately. - * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. - * @property {number} timeStamp The unix time. - * @private - */ - -/** - * Private data for event wrappers. - * @type {WeakMap} - * @private - */ -const privateData = new WeakMap(); - -/** - * Cache for wrapper classes. - * @type {WeakMap} - * @private - */ -const wrappers = new WeakMap(); - -/** - * Get private data. - * @param {Event} event The event object to get private data. - * @returns {PrivateData} The private data of the event. - * @private - */ -function pd(event) { - const retv = privateData.get(event); - console.assert( - retv != null, - "'this' is expected an Event object, but got", - event - ); - return retv -} - -/** - * https://dom.spec.whatwg.org/#set-the-canceled-flag - * @param data {PrivateData} private data. - */ -function setCancelFlag(data) { - if (data.passiveListener != null) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error( - "Unable to preventDefault inside passive event listener invocation.", - data.passiveListener - ); - } - return - } - if (!data.event.cancelable) { - return - } - - data.canceled = true; - if (typeof data.event.preventDefault === "function") { - data.event.preventDefault(); - } -} - -/** - * @see https://dom.spec.whatwg.org/#interface-event - * @private - */ -/** - * The event wrapper. - * @constructor - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Event|{type:string}} event The original event to wrap. - */ -function Event(eventTarget, event) { - privateData.set(this, { - eventTarget, - event, - eventPhase: 2, - currentTarget: eventTarget, - canceled: false, - stopped: false, - immediateStopped: false, - passiveListener: null, - timeStamp: event.timeStamp || Date.now(), - }); - - // https://heycam.github.io/webidl/#Unforgeable - Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); - - // Define accessors - const keys = Object.keys(event); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in this)) { - Object.defineProperty(this, key, defineRedirectDescriptor(key)); - } - } -} - -// Should be enumerable, but class methods are not enumerable. -Event.prototype = { - /** - * The type of this event. - * @type {string} - */ - get type() { - return pd(this).event.type - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get target() { - return pd(this).eventTarget - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get currentTarget() { - return pd(this).currentTarget - }, - - /** - * @returns {EventTarget[]} The composed path of this event. - */ - composedPath() { - const currentTarget = pd(this).currentTarget; - if (currentTarget == null) { - return [] - } - return [currentTarget] - }, - - /** - * Constant of NONE. - * @type {number} - */ - get NONE() { - return 0 - }, - - /** - * Constant of CAPTURING_PHASE. - * @type {number} - */ - get CAPTURING_PHASE() { - return 1 - }, - - /** - * Constant of AT_TARGET. - * @type {number} - */ - get AT_TARGET() { - return 2 - }, - - /** - * Constant of BUBBLING_PHASE. - * @type {number} - */ - get BUBBLING_PHASE() { - return 3 - }, - - /** - * The target of this event. - * @type {number} - */ - get eventPhase() { - return pd(this).eventPhase - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopPropagation() { - const data = pd(this); - - data.stopped = true; - if (typeof data.event.stopPropagation === "function") { - data.event.stopPropagation(); - } - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopImmediatePropagation() { - const data = pd(this); - - data.stopped = true; - data.immediateStopped = true; - if (typeof data.event.stopImmediatePropagation === "function") { - data.event.stopImmediatePropagation(); - } - }, - - /** - * The flag to be bubbling. - * @type {boolean} - */ - get bubbles() { - return Boolean(pd(this).event.bubbles) - }, - - /** - * The flag to be cancelable. - * @type {boolean} - */ - get cancelable() { - return Boolean(pd(this).event.cancelable) - }, - - /** - * Cancel this event. - * @returns {void} - */ - preventDefault() { - setCancelFlag(pd(this)); - }, - - /** - * The flag to indicate cancellation state. - * @type {boolean} - */ - get defaultPrevented() { - return pd(this).canceled - }, - - /** - * The flag to be composed. - * @type {boolean} - */ - get composed() { - return Boolean(pd(this).event.composed) - }, - - /** - * The unix time of this event. - * @type {number} - */ - get timeStamp() { - return pd(this).timeStamp - }, - - /** - * The target of this event. - * @type {EventTarget} - * @deprecated - */ - get srcElement() { - return pd(this).eventTarget - }, - - /** - * The flag to stop event bubbling. - * @type {boolean} - * @deprecated - */ - get cancelBubble() { - return pd(this).stopped - }, - set cancelBubble(value) { - if (!value) { - return - } - const data = pd(this); - - data.stopped = true; - if (typeof data.event.cancelBubble === "boolean") { - data.event.cancelBubble = true; - } - }, - - /** - * The flag to indicate cancellation state. - * @type {boolean} - * @deprecated - */ - get returnValue() { - return !pd(this).canceled - }, - set returnValue(value) { - if (!value) { - setCancelFlag(pd(this)); - } - }, - - /** - * Initialize this event object. But do nothing under event dispatching. - * @param {string} type The event type. - * @param {boolean} [bubbles=false] The flag to be possible to bubble up. - * @param {boolean} [cancelable=false] The flag to be possible to cancel. - * @deprecated - */ - initEvent() { - // Do nothing. - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(Event.prototype, "constructor", { - value: Event, - configurable: true, - writable: true, -}); - -// Ensure `event instanceof window.Event` is `true`. -if (typeof window !== "undefined" && typeof window.Event !== "undefined") { - Object.setPrototypeOf(Event.prototype, window.Event.prototype); - - // Make association for wrappers. - wrappers.set(window.Event.prototype, Event); -} - -/** - * Get the property descriptor to redirect a given property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to redirect the property. - * @private - */ -function defineRedirectDescriptor(key) { - return { - get() { - return pd(this).event[key] - }, - set(value) { - pd(this).event[key] = value; - }, - configurable: true, - enumerable: true, - } -} - -/** - * Get the property descriptor to call a given method property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to call the method property. - * @private - */ -function defineCallDescriptor(key) { - return { - value() { - const event = pd(this).event; - return event[key].apply(event, arguments) - }, - configurable: true, - enumerable: true, - } -} - -/** - * Define new wrapper class. - * @param {Function} BaseEvent The base wrapper class. - * @param {Object} proto The prototype of the original event. - * @returns {Function} The defined wrapper class. - * @private - */ -function defineWrapper(BaseEvent, proto) { - const keys = Object.keys(proto); - if (keys.length === 0) { - return BaseEvent - } - - /** CustomEvent */ - function CustomEvent(eventTarget, event) { - BaseEvent.call(this, eventTarget, event); - } - - CustomEvent.prototype = Object.create(BaseEvent.prototype, { - constructor: { value: CustomEvent, configurable: true, writable: true }, - }); - - // Define accessors. - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in BaseEvent.prototype)) { - const descriptor = Object.getOwnPropertyDescriptor(proto, key); - const isFunc = typeof descriptor.value === "function"; - Object.defineProperty( - CustomEvent.prototype, - key, - isFunc - ? defineCallDescriptor(key) - : defineRedirectDescriptor(key) - ); - } - } - - return CustomEvent -} - -/** - * Get the wrapper class of a given prototype. - * @param {Object} proto The prototype of the original event to get its wrapper. - * @returns {Function} The wrapper class. - * @private - */ -function getWrapper(proto) { - if (proto == null || proto === Object.prototype) { - return Event - } - - let wrapper = wrappers.get(proto); - if (wrapper == null) { - wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); - wrappers.set(proto, wrapper); - } - return wrapper -} - -/** - * Wrap a given event to management a dispatching. - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Object} event The event to wrap. - * @returns {Event} The wrapper instance. - * @private - */ -function wrapEvent(eventTarget, event) { - const Wrapper = getWrapper(Object.getPrototypeOf(event)); - return new Wrapper(eventTarget, event) -} - -/** - * Get the immediateStopped flag of a given event. - * @param {Event} event The event to get. - * @returns {boolean} The flag to stop propagation immediately. - * @private - */ -function isStopped(event) { - return pd(event).immediateStopped -} - -/** - * Set the current event phase of a given event. - * @param {Event} event The event to set current target. - * @param {number} eventPhase New event phase. - * @returns {void} - * @private - */ -function setEventPhase(event, eventPhase) { - pd(event).eventPhase = eventPhase; -} - -/** - * Set the current target of a given event. - * @param {Event} event The event to set current target. - * @param {EventTarget|null} currentTarget New current target. - * @returns {void} - * @private - */ -function setCurrentTarget(event, currentTarget) { - pd(event).currentTarget = currentTarget; -} - -/** - * Set a passive listener of a given event. - * @param {Event} event The event to set current target. - * @param {Function|null} passiveListener New passive listener. - * @returns {void} - * @private - */ -function setPassiveListener(event, passiveListener) { - pd(event).passiveListener = passiveListener; -} - -/** - * @typedef {object} ListenerNode - * @property {Function} listener - * @property {1|2|3} listenerType - * @property {boolean} passive - * @property {boolean} once - * @property {ListenerNode|null} next - * @private - */ - -/** - * @type {WeakMap>} - * @private - */ -const listenersMap = new WeakMap(); - -// Listener types -const CAPTURE = 1; -const BUBBLE = 2; -const ATTRIBUTE = 3; - -/** - * Check whether a given value is an object or not. - * @param {any} x The value to check. - * @returns {boolean} `true` if the value is an object. - */ -function isObject(x) { - return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax -} - -/** - * Get listeners. - * @param {EventTarget} eventTarget The event target to get. - * @returns {Map} The listeners. - * @private - */ -function getListeners(eventTarget) { - const listeners = listenersMap.get(eventTarget); - if (listeners == null) { - throw new TypeError( - "'this' is expected an EventTarget object, but got another value." - ) - } - return listeners -} - -/** - * Get the property descriptor for the event attribute of a given event. - * @param {string} eventName The event name to get property descriptor. - * @returns {PropertyDescriptor} The property descriptor. - * @private - */ -function defineEventAttributeDescriptor(eventName) { - return { - get() { - const listeners = getListeners(this); - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - return node.listener - } - node = node.next; - } - return null - }, - - set(listener) { - if (typeof listener !== "function" && !isObject(listener)) { - listener = null; // eslint-disable-line no-param-reassign - } - const listeners = getListeners(this); - - // Traverse to the tail while removing old value. - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - // Remove old value. - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - node = node.next; - } - - // Add new value. - if (listener !== null) { - const newNode = { - listener, - listenerType: ATTRIBUTE, - passive: false, - once: false, - next: null, - }; - if (prev === null) { - listeners.set(eventName, newNode); - } else { - prev.next = newNode; - } - } - }, - configurable: true, - enumerable: true, - } -} - -/** - * Define an event attribute (e.g. `eventTarget.onclick`). - * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. - * @param {string} eventName The event name to define. - * @returns {void} - */ -function defineEventAttribute(eventTargetPrototype, eventName) { - Object.defineProperty( - eventTargetPrototype, - `on${eventName}`, - defineEventAttributeDescriptor(eventName) - ); -} - -/** - * Define a custom EventTarget with event attributes. - * @param {string[]} eventNames Event names for event attributes. - * @returns {EventTarget} The custom EventTarget. - * @private - */ -function defineCustomEventTarget(eventNames) { - /** CustomEventTarget */ - function CustomEventTarget() { - EventTarget.call(this); - } - - CustomEventTarget.prototype = Object.create(EventTarget.prototype, { - constructor: { - value: CustomEventTarget, - configurable: true, - writable: true, - }, - }); - - for (let i = 0; i < eventNames.length; ++i) { - defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); - } - - return CustomEventTarget -} - -/** - * EventTarget. - * - * - This is constructor if no arguments. - * - This is a function which returns a CustomEventTarget constructor if there are arguments. - * - * For example: - * - * class A extends EventTarget {} - * class B extends EventTarget("message") {} - * class C extends EventTarget("message", "error") {} - * class D extends EventTarget(["message", "error"]) {} - */ -function EventTarget() { - /*eslint-disable consistent-return */ - if (this instanceof EventTarget) { - listenersMap.set(this, new Map()); - return - } - if (arguments.length === 1 && Array.isArray(arguments[0])) { - return defineCustomEventTarget(arguments[0]) - } - if (arguments.length > 0) { - const types = new Array(arguments.length); - for (let i = 0; i < arguments.length; ++i) { - types[i] = arguments[i]; - } - return defineCustomEventTarget(types) - } - throw new TypeError("Cannot call a class as a function") - /*eslint-enable consistent-return */ -} - -// Should be enumerable, but class methods are not enumerable. -EventTarget.prototype = { - /** - * Add a given listener to this event target. - * @param {string} eventName The event name to add. - * @param {Function} listener The listener to add. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} - */ - addEventListener(eventName, listener, options) { - if (listener == null) { - return - } - if (typeof listener !== "function" && !isObject(listener)) { - throw new TypeError("'listener' should be a function or an object.") - } - - const listeners = getListeners(this); - const optionsIsObj = isObject(options); - const capture = optionsIsObj - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - const newNode = { - listener, - listenerType, - passive: optionsIsObj && Boolean(options.passive), - once: optionsIsObj && Boolean(options.once), - next: null, - }; - - // Set it as the first node if the first node is null. - let node = listeners.get(eventName); - if (node === undefined) { - listeners.set(eventName, newNode); - return - } - - // Traverse to the tail while checking duplication.. - let prev = null; - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - // Should ignore duplication. - return - } - prev = node; - node = node.next; - } - - // Add it. - prev.next = newNode; - }, - - /** - * Remove a given listener from this event target. - * @param {string} eventName The event name to remove. - * @param {Function} listener The listener to remove. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} - */ - removeEventListener(eventName, listener, options) { - if (listener == null) { - return - } - - const listeners = getListeners(this); - const capture = isObject(options) - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - return - } - - prev = node; - node = node.next; - } - }, - - /** - * Dispatch a given event. - * @param {Event|{type:string}} event The event to dispatch. - * @returns {boolean} `false` if canceled. - */ - dispatchEvent(event) { - if (event == null || typeof event.type !== "string") { - throw new TypeError('"event.type" should be a string.') - } - - // If listeners aren't registered, terminate. - const listeners = getListeners(this); - const eventName = event.type; - let node = listeners.get(eventName); - if (node == null) { - return true - } - - // Since we cannot rewrite several properties, so wrap object. - const wrappedEvent = wrapEvent(this, event); - - // This doesn't process capturing phase and bubbling phase. - // This isn't participating in a tree. - let prev = null; - while (node != null) { - // Remove this listener if it's once - if (node.once) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - // Call this listener - setPassiveListener( - wrappedEvent, - node.passive ? node.listener : null - ); - if (typeof node.listener === "function") { - try { - node.listener.call(this, wrappedEvent); - } catch (err) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error(err); - } - } - } else if ( - node.listenerType !== ATTRIBUTE && - typeof node.listener.handleEvent === "function" - ) { - node.listener.handleEvent(wrappedEvent); - } - - // Break if `event.stopImmediatePropagation` was called. - if (isStopped(wrappedEvent)) { - break - } - - node = node.next; - } - setPassiveListener(wrappedEvent, null); - setEventPhase(wrappedEvent, 0); - setCurrentTarget(wrappedEvent, null); - - return !wrappedEvent.defaultPrevented - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(EventTarget.prototype, "constructor", { - value: EventTarget, - configurable: true, - writable: true, -}); - -// Ensure `eventTarget instanceof window.EventTarget` is `true`. -if ( - typeof window !== "undefined" && - typeof window.EventTarget !== "undefined" -) { - Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); -} - -export default EventTarget; -export { defineEventAttribute, EventTarget }; -//# sourceMappingURL=event-target-shim.mjs.map diff --git a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.umd.js b/deps/npm/node_modules/event-target-shim/dist/event-target-shim.umd.js deleted file mode 100644 index e7cf5d4d5885f9..00000000000000 --- a/deps/npm/node_modules/event-target-shim/dist/event-target-shim.umd.js +++ /dev/null @@ -1,6 +0,0 @@ -/** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. - */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.EventTargetShim={}))})(this,function(a){"use strict";function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a){var b=u.get(a);return console.assert(null!=b,"'this' is expected an Event object, but got",a),b}function d(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function e(a,b){u.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),e=0;e=6" - }, - "scripts": { - "preversion": "npm test", - "version": "npm run build && git add dist/*", - "postversion": "git push && git push --tags", - "clean": "rimraf .nyc_output coverage", - "coverage": "nyc report --reporter lcov && opener coverage/lcov-report/index.html", - "lint": "eslint src test scripts --ext .js,.mjs", - "build": "rollup -c scripts/rollup.config.js", - "pretest": "npm run lint", - "test": "run-s test:*", - "test:mocha": "nyc --require ./scripts/babel-register mocha test/*.mjs", - "test:karma": "karma start scripts/karma.conf.js --single-run", - "watch": "run-p watch:*", - "watch:mocha": "mocha test/*.mjs --require ./scripts/babel-register --watch --watch-extensions js,mjs --growl", - "watch:karma": "karma start scripts/karma.conf.js --watch", - "codecov": "codecov" - }, - "devDependencies": { - "@babel/core": "^7.2.2", - "@babel/plugin-transform-modules-commonjs": "^7.2.0", - "@babel/preset-env": "^7.2.3", - "@babel/register": "^7.0.0", - "@mysticatea/eslint-plugin": "^8.0.1", - "@mysticatea/spy": "^0.1.2", - "assert": "^1.4.1", - "codecov": "^3.1.0", - "eslint": "^5.12.1", - "karma": "^3.1.4", - "karma-chrome-launcher": "^2.2.0", - "karma-coverage": "^1.1.2", - "karma-firefox-launcher": "^1.0.0", - "karma-growl-reporter": "^1.0.0", - "karma-ie-launcher": "^1.0.0", - "karma-mocha": "^1.3.0", - "karma-rollup-preprocessor": "^7.0.0-rc.2", - "mocha": "^5.2.0", - "npm-run-all": "^4.1.5", - "nyc": "^13.1.0", - "opener": "^1.5.1", - "rimraf": "^2.6.3", - "rollup": "^1.1.1", - "rollup-plugin-babel": "^4.3.2", - "rollup-plugin-babel-minify": "^7.0.0", - "rollup-plugin-commonjs": "^9.2.0", - "rollup-plugin-json": "^3.1.0", - "rollup-plugin-node-resolve": "^4.0.0", - "rollup-watch": "^4.3.1", - "type-tester": "^1.0.0", - "typescript": "^3.2.4" - }, - "repository": { - "type": "git", - "url": "https://github.com/mysticatea/event-target-shim.git" - }, - "keywords": [ - "w3c", - "whatwg", - "eventtarget", - "event", - "events", - "shim" - ], - "author": "Toru Nagashima", - "license": "MIT", - "bugs": { - "url": "https://github.com/mysticatea/event-target-shim/issues" - }, - "homepage": "https://github.com/mysticatea/event-target-shim" -} diff --git a/deps/npm/node_modules/events/.airtap.yml b/deps/npm/node_modules/events/.airtap.yml deleted file mode 100644 index c7a8a87d5e99d1..00000000000000 --- a/deps/npm/node_modules/events/.airtap.yml +++ /dev/null @@ -1,15 +0,0 @@ -sauce_connect: true -loopback: airtap.local -browsers: - - name: chrome - version: latest - - name: firefox - version: latest - - name: safari - version: 9..latest - - name: iphone - version: latest - - name: ie - version: 9..latest - - name: microsoftedge - version: 13..latest diff --git a/deps/npm/node_modules/events/History.md b/deps/npm/node_modules/events/History.md deleted file mode 100644 index f48bf210da3ea2..00000000000000 --- a/deps/npm/node_modules/events/History.md +++ /dev/null @@ -1,118 +0,0 @@ -# 3.3.0 - - - Support EventTarget emitters in `events.once` from Node.js 12.11.0. - - Now you can use the `events.once` function with objects that implement the EventTarget interface. This interface is used widely in - the DOM and other web APIs. - - ```js - var events = require('events'); - var assert = require('assert'); - - async function connect() { - var ws = new WebSocket('wss://example.com'); - await events.once(ws, 'open'); - assert(ws.readyState === WebSocket.OPEN); - } - - async function onClick() { - await events.once(document.body, 'click'); - alert('you clicked the page!'); - } - ``` - -# 3.2.0 - - - Add `events.once` from Node.js 11.13.0. - - To use this function, Promises must be supported in the environment. Use a polyfill like `es6-promise` if you support older browsers. - -# 3.1.0 (2020-01-08) - -`events` now matches the Node.js 11.12.0 API. - - - pass through return value in wrapped `emitter.once()` listeners - - Now, this works: - ```js - emitter.once('myevent', function () { return 1; }); - var listener = emitter.rawListeners('myevent')[0] - assert(listener() === 1); - ``` - Previously, `listener()` would return undefined regardless of the implementation. - - Ported from https://github.com/nodejs/node/commit/acc506c2d2771dab8d7bba6d3452bc5180dff7cf - - - Reduce code duplication in listener type check ([#67](https://github.com/Gozala/events/pull/67) by [@friederbluemle](https://github.com/friederbluemle)). - - Improve `emitter.once()` performance in some engines - -# 3.0.0 (2018-05-25) - -**This version drops support for IE8.** `events` no longer includes polyfills -for ES5 features. If you need to support older environments, use an ES5 shim -like [es5-shim](https://npmjs.com/package/es5-shim). Both the shim and sham -versions of es5-shim are necessary. - - - Update to events code from Node.js 10.x - - (semver major) Adds `off()` method - - Port more tests from Node.js - - Switch browser tests to airtap, making things more reliable - -# 2.1.0 (2018-05-25) - - - add Emitter#rawListeners from Node.js v9.4 - -# 2.0.0 (2018-02-02) - - - Update to events code from node.js 8.x - - Adds `prependListener()` and `prependOnceListener()` - - Adds `eventNames()` method - - (semver major) Unwrap `once()` listeners in `listeners()` - - copy tests from node.js - -Note that this version doubles the gzipped size, jumping from 1.1KB to 2.1KB, -due to new methods and runtime performance improvements. Be aware of that when -upgrading. - -# 1.1.1 (2016-06-22) - - - add more context to errors if they are not instanceof Error - -# 1.1.0 (2015-09-29) - - - add Emitter#listerCount (to match node v4 api) - -# 1.0.2 (2014-08-28) - - - remove un-reachable code - - update devDeps - -## 1.0.1 / 2014-05-11 - - - check for console.trace before using it - -## 1.0.0 / 2013-12-10 - - - Update to latest events code from node.js 0.10 - - copy tests from node.js - -## 0.4.0 / 2011-07-03 ## - - - Switching to graphquire@0.8.0 - -## 0.3.0 / 2011-07-03 ## - - - Switching to URL based module require. - -## 0.2.0 / 2011-06-10 ## - - - Simplified package structure. - - Graphquire for dependency management. - -## 0.1.1 / 2011-05-16 ## - - - Unhandled errors are logged via console.error - -## 0.1.0 / 2011-04-22 ## - - - Initial release diff --git a/deps/npm/node_modules/events/LICENSE b/deps/npm/node_modules/events/LICENSE deleted file mode 100644 index 52ed3b0a63274d..00000000000000 --- a/deps/npm/node_modules/events/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -MIT - -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/events/events.js b/deps/npm/node_modules/events/events.js deleted file mode 100644 index 34b69a0b4a6e12..00000000000000 --- a/deps/npm/node_modules/events/events.js +++ /dev/null @@ -1,497 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - -var R = typeof Reflect === 'object' ? Reflect : null -var ReflectApply = R && typeof R.apply === 'function' - ? R.apply - : function ReflectApply(target, receiver, args) { - return Function.prototype.apply.call(target, receiver, args); - } - -var ReflectOwnKeys -if (R && typeof R.ownKeys === 'function') { - ReflectOwnKeys = R.ownKeys -} else if (Object.getOwnPropertySymbols) { - ReflectOwnKeys = function ReflectOwnKeys(target) { - return Object.getOwnPropertyNames(target) - .concat(Object.getOwnPropertySymbols(target)); - }; -} else { - ReflectOwnKeys = function ReflectOwnKeys(target) { - return Object.getOwnPropertyNames(target); - }; -} - -function ProcessEmitWarning(warning) { - if (console && console.warn) console.warn(warning); -} - -var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { - return value !== value; -} - -function EventEmitter() { - EventEmitter.init.call(this); -} -module.exports = EventEmitter; -module.exports.once = once; - -// Backwards-compat with node 0.10.x -EventEmitter.EventEmitter = EventEmitter; - -EventEmitter.prototype._events = undefined; -EventEmitter.prototype._eventsCount = 0; -EventEmitter.prototype._maxListeners = undefined; - -// By default EventEmitters will print a warning if more than 10 listeners are -// added to it. This is a useful default which helps finding memory leaks. -var defaultMaxListeners = 10; - -function checkListener(listener) { - if (typeof listener !== 'function') { - throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); - } -} - -Object.defineProperty(EventEmitter, 'defaultMaxListeners', { - enumerable: true, - get: function() { - return defaultMaxListeners; - }, - set: function(arg) { - if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { - throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); - } - defaultMaxListeners = arg; - } -}); - -EventEmitter.init = function() { - - if (this._events === undefined || - this._events === Object.getPrototypeOf(this)._events) { - this._events = Object.create(null); - this._eventsCount = 0; - } - - this._maxListeners = this._maxListeners || undefined; -}; - -// Obviously not all Emitters should be limited to 10. This function allows -// that to be increased. Set to zero for unlimited. -EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { - if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { - throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); - } - this._maxListeners = n; - return this; -}; - -function _getMaxListeners(that) { - if (that._maxListeners === undefined) - return EventEmitter.defaultMaxListeners; - return that._maxListeners; -} - -EventEmitter.prototype.getMaxListeners = function getMaxListeners() { - return _getMaxListeners(this); -}; - -EventEmitter.prototype.emit = function emit(type) { - var args = []; - for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); - var doError = (type === 'error'); - - var events = this._events; - if (events !== undefined) - doError = (doError && events.error === undefined); - else if (!doError) - return false; - - // If there is no 'error' event listener then throw. - if (doError) { - var er; - if (args.length > 0) - er = args[0]; - if (er instanceof Error) { - // Note: The comments on the `throw` lines are intentional, they show - // up in Node's output if this results in an unhandled exception. - throw er; // Unhandled 'error' event - } - // At least give some kind of context to the user - var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); - err.context = er; - throw err; // Unhandled 'error' event - } - - var handler = events[type]; - - if (handler === undefined) - return false; - - if (typeof handler === 'function') { - ReflectApply(handler, this, args); - } else { - var len = handler.length; - var listeners = arrayClone(handler, len); - for (var i = 0; i < len; ++i) - ReflectApply(listeners[i], this, args); - } - - return true; -}; - -function _addListener(target, type, listener, prepend) { - var m; - var events; - var existing; - - checkListener(listener); - - events = target._events; - if (events === undefined) { - events = target._events = Object.create(null); - target._eventsCount = 0; - } else { - // To avoid recursion in the case that type === "newListener"! Before - // adding it to the listeners, first emit "newListener". - if (events.newListener !== undefined) { - target.emit('newListener', type, - listener.listener ? listener.listener : listener); - - // Re-assign `events` because a newListener handler could have caused the - // this._events to be assigned to a new object - events = target._events; - } - existing = events[type]; - } - - if (existing === undefined) { - // Optimize the case of one listener. Don't need the extra array object. - existing = events[type] = listener; - ++target._eventsCount; - } else { - if (typeof existing === 'function') { - // Adding the second element, need to change to array. - existing = events[type] = - prepend ? [listener, existing] : [existing, listener]; - // If we've already got an array, just append. - } else if (prepend) { - existing.unshift(listener); - } else { - existing.push(listener); - } - - // Check for listener leak - m = _getMaxListeners(target); - if (m > 0 && existing.length > m && !existing.warned) { - existing.warned = true; - // No error code for this since it is a Warning - // eslint-disable-next-line no-restricted-syntax - var w = new Error('Possible EventEmitter memory leak detected. ' + - existing.length + ' ' + String(type) + ' listeners ' + - 'added. Use emitter.setMaxListeners() to ' + - 'increase limit'); - w.name = 'MaxListenersExceededWarning'; - w.emitter = target; - w.type = type; - w.count = existing.length; - ProcessEmitWarning(w); - } - } - - return target; -} - -EventEmitter.prototype.addListener = function addListener(type, listener) { - return _addListener(this, type, listener, false); -}; - -EventEmitter.prototype.on = EventEmitter.prototype.addListener; - -EventEmitter.prototype.prependListener = - function prependListener(type, listener) { - return _addListener(this, type, listener, true); - }; - -function onceWrapper() { - if (!this.fired) { - this.target.removeListener(this.type, this.wrapFn); - this.fired = true; - if (arguments.length === 0) - return this.listener.call(this.target); - return this.listener.apply(this.target, arguments); - } -} - -function _onceWrap(target, type, listener) { - var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; - var wrapped = onceWrapper.bind(state); - wrapped.listener = listener; - state.wrapFn = wrapped; - return wrapped; -} - -EventEmitter.prototype.once = function once(type, listener) { - checkListener(listener); - this.on(type, _onceWrap(this, type, listener)); - return this; -}; - -EventEmitter.prototype.prependOnceListener = - function prependOnceListener(type, listener) { - checkListener(listener); - this.prependListener(type, _onceWrap(this, type, listener)); - return this; - }; - -// Emits a 'removeListener' event if and only if the listener was removed. -EventEmitter.prototype.removeListener = - function removeListener(type, listener) { - var list, events, position, i, originalListener; - - checkListener(listener); - - events = this._events; - if (events === undefined) - return this; - - list = events[type]; - if (list === undefined) - return this; - - if (list === listener || list.listener === listener) { - if (--this._eventsCount === 0) - this._events = Object.create(null); - else { - delete events[type]; - if (events.removeListener) - this.emit('removeListener', type, list.listener || listener); - } - } else if (typeof list !== 'function') { - position = -1; - - for (i = list.length - 1; i >= 0; i--) { - if (list[i] === listener || list[i].listener === listener) { - originalListener = list[i].listener; - position = i; - break; - } - } - - if (position < 0) - return this; - - if (position === 0) - list.shift(); - else { - spliceOne(list, position); - } - - if (list.length === 1) - events[type] = list[0]; - - if (events.removeListener !== undefined) - this.emit('removeListener', type, originalListener || listener); - } - - return this; - }; - -EventEmitter.prototype.off = EventEmitter.prototype.removeListener; - -EventEmitter.prototype.removeAllListeners = - function removeAllListeners(type) { - var listeners, events, i; - - events = this._events; - if (events === undefined) - return this; - - // not listening for removeListener, no need to emit - if (events.removeListener === undefined) { - if (arguments.length === 0) { - this._events = Object.create(null); - this._eventsCount = 0; - } else if (events[type] !== undefined) { - if (--this._eventsCount === 0) - this._events = Object.create(null); - else - delete events[type]; - } - return this; - } - - // emit removeListener for all listeners on all events - if (arguments.length === 0) { - var keys = Object.keys(events); - var key; - for (i = 0; i < keys.length; ++i) { - key = keys[i]; - if (key === 'removeListener') continue; - this.removeAllListeners(key); - } - this.removeAllListeners('removeListener'); - this._events = Object.create(null); - this._eventsCount = 0; - return this; - } - - listeners = events[type]; - - if (typeof listeners === 'function') { - this.removeListener(type, listeners); - } else if (listeners !== undefined) { - // LIFO order - for (i = listeners.length - 1; i >= 0; i--) { - this.removeListener(type, listeners[i]); - } - } - - return this; - }; - -function _listeners(target, type, unwrap) { - var events = target._events; - - if (events === undefined) - return []; - - var evlistener = events[type]; - if (evlistener === undefined) - return []; - - if (typeof evlistener === 'function') - return unwrap ? [evlistener.listener || evlistener] : [evlistener]; - - return unwrap ? - unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); -} - -EventEmitter.prototype.listeners = function listeners(type) { - return _listeners(this, type, true); -}; - -EventEmitter.prototype.rawListeners = function rawListeners(type) { - return _listeners(this, type, false); -}; - -EventEmitter.listenerCount = function(emitter, type) { - if (typeof emitter.listenerCount === 'function') { - return emitter.listenerCount(type); - } else { - return listenerCount.call(emitter, type); - } -}; - -EventEmitter.prototype.listenerCount = listenerCount; -function listenerCount(type) { - var events = this._events; - - if (events !== undefined) { - var evlistener = events[type]; - - if (typeof evlistener === 'function') { - return 1; - } else if (evlistener !== undefined) { - return evlistener.length; - } - } - - return 0; -} - -EventEmitter.prototype.eventNames = function eventNames() { - return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; -}; - -function arrayClone(arr, n) { - var copy = new Array(n); - for (var i = 0; i < n; ++i) - copy[i] = arr[i]; - return copy; -} - -function spliceOne(list, index) { - for (; index + 1 < list.length; index++) - list[index] = list[index + 1]; - list.pop(); -} - -function unwrapListeners(arr) { - var ret = new Array(arr.length); - for (var i = 0; i < ret.length; ++i) { - ret[i] = arr[i].listener || arr[i]; - } - return ret; -} - -function once(emitter, name) { - return new Promise(function (resolve, reject) { - function errorListener(err) { - emitter.removeListener(name, resolver); - reject(err); - } - - function resolver() { - if (typeof emitter.removeListener === 'function') { - emitter.removeListener('error', errorListener); - } - resolve([].slice.call(arguments)); - }; - - eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); - if (name !== 'error') { - addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); - } - }); -} - -function addErrorHandlerIfEventEmitter(emitter, handler, flags) { - if (typeof emitter.on === 'function') { - eventTargetAgnosticAddListener(emitter, 'error', handler, flags); - } -} - -function eventTargetAgnosticAddListener(emitter, name, listener, flags) { - if (typeof emitter.on === 'function') { - if (flags.once) { - emitter.once(name, listener); - } else { - emitter.on(name, listener); - } - } else if (typeof emitter.addEventListener === 'function') { - // EventTarget does not have `error` event semantics like Node - // EventEmitters, we do not listen for `error` events here. - emitter.addEventListener(name, function wrapListener(arg) { - // IE does not have builtin `{ once: true }` support so we - // have to do it manually. - if (flags.once) { - emitter.removeEventListener(name, wrapListener); - } - listener(arg); - }); - } else { - throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter); - } -} diff --git a/deps/npm/node_modules/events/package.json b/deps/npm/node_modules/events/package.json deleted file mode 100644 index b9580d88142d29..00000000000000 --- a/deps/npm/node_modules/events/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "events", - "version": "3.3.0", - "description": "Node's event emitter for all engines.", - "keywords": [ - "events", - "eventEmitter", - "eventDispatcher", - "listeners" - ], - "author": "Irakli Gozalishvili (http://jeditoolkit.com)", - "repository": { - "type": "git", - "url": "git://github.com/Gozala/events.git", - "web": "https://github.com/Gozala/events" - }, - "bugs": { - "url": "http://github.com/Gozala/events/issues/" - }, - "main": "./events.js", - "engines": { - "node": ">=0.8.x" - }, - "devDependencies": { - "airtap": "^1.0.0", - "functions-have-names": "^1.2.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "isarray": "^2.0.5", - "tape": "^5.0.0" - }, - "scripts": { - "test": "node tests/index.js", - "test:browsers": "airtap -- tests/index.js" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/events/security.md b/deps/npm/node_modules/events/security.md deleted file mode 100644 index a14ace6a57db70..00000000000000 --- a/deps/npm/node_modules/events/security.md +++ /dev/null @@ -1,10 +0,0 @@ -# Security Policy - -## Supported Versions -Only the latest major version is supported at any given time. - -## Reporting a Vulnerability - -To report a security vulnerability, please use the -[Tidelift security contact](https://tidelift.com/security). -Tidelift will coordinate the fix and disclosure. diff --git a/deps/npm/node_modules/events/tests/add-listeners.js b/deps/npm/node_modules/events/tests/add-listeners.js deleted file mode 100644 index 9b578272ba889e..00000000000000 --- a/deps/npm/node_modules/events/tests/add-listeners.js +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var common = require('./common'); -var assert = require('assert'); -var EventEmitter = require('../'); - -{ - var ee = new EventEmitter(); - var events_new_listener_emitted = []; - var listeners_new_listener_emitted = []; - - // Sanity check - assert.strictEqual(ee.addListener, ee.on); - - ee.on('newListener', function(event, listener) { - // Don't track newListener listeners. - if (event === 'newListener') - return; - - events_new_listener_emitted.push(event); - listeners_new_listener_emitted.push(listener); - }); - - var hello = common.mustCall(function(a, b) { - assert.strictEqual('a', a); - assert.strictEqual('b', b); - }); - - ee.once('newListener', function(name, listener) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(listener, hello); - - var listeners = this.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - }); - - ee.on('hello', hello); - ee.once('foo', assert.fail); - - assert.ok(Array.isArray(events_new_listener_emitted)); - assert.strictEqual(events_new_listener_emitted.length, 2); - assert.strictEqual(events_new_listener_emitted[0], 'hello'); - assert.strictEqual(events_new_listener_emitted[1], 'foo'); - - assert.ok(Array.isArray(listeners_new_listener_emitted)); - assert.strictEqual(listeners_new_listener_emitted.length, 2); - assert.strictEqual(listeners_new_listener_emitted[0], hello); - assert.strictEqual(listeners_new_listener_emitted[1], assert.fail); - - ee.emit('hello', 'a', 'b'); -} - -// just make sure that this doesn't throw: -{ - var f = new EventEmitter(); - - f.setMaxListeners(0); -} - -{ - var listen1 = function() {}; - var listen2 = function() {}; - var ee = new EventEmitter(); - - ee.once('newListener', function() { - var listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - ee.once('newListener', function() { - var listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - }); - ee.on('hello', listen2); - }); - ee.on('hello', listen1); - // The order of listeners on an event is not always the order in which the - // listeners were added. - var listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 2); - assert.strictEqual(listeners[0], listen2); - assert.strictEqual(listeners[1], listen1); -} - -// Verify that the listener must be a function -assert.throws(function() { - var ee = new EventEmitter(); - - ee.on('foo', null); -}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); diff --git a/deps/npm/node_modules/events/tests/check-listener-leaks.js b/deps/npm/node_modules/events/tests/check-listener-leaks.js deleted file mode 100644 index 7fce48f37bf24c..00000000000000 --- a/deps/npm/node_modules/events/tests/check-listener-leaks.js +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var common = require('./common'); -var assert = require('assert'); -var events = require('../'); - -// Redirect warning output to tape. -var consoleWarn = console.warn; -console.warn = common.test.comment; - -common.test.on('end', function () { - console.warn = consoleWarn; -}); - -// default -{ - var e = new events.EventEmitter(); - - for (var i = 0; i < 10; i++) { - e.on('default', common.mustNotCall()); - } - assert.ok(!e._events['default'].hasOwnProperty('warned')); - e.on('default', common.mustNotCall()); - assert.ok(e._events['default'].warned); - - // specific - e.setMaxListeners(5); - for (var i = 0; i < 5; i++) { - e.on('specific', common.mustNotCall()); - } - assert.ok(!e._events['specific'].hasOwnProperty('warned')); - e.on('specific', common.mustNotCall()); - assert.ok(e._events['specific'].warned); - - // only one - e.setMaxListeners(1); - e.on('only one', common.mustNotCall()); - assert.ok(!e._events['only one'].hasOwnProperty('warned')); - e.on('only one', common.mustNotCall()); - assert.ok(e._events['only one'].hasOwnProperty('warned')); - - // unlimited - e.setMaxListeners(0); - for (var i = 0; i < 1000; i++) { - e.on('unlimited', common.mustNotCall()); - } - assert.ok(!e._events['unlimited'].hasOwnProperty('warned')); -} - -// process-wide -{ - events.EventEmitter.defaultMaxListeners = 42; - var e = new events.EventEmitter(); - - for (var i = 0; i < 42; ++i) { - e.on('fortytwo', common.mustNotCall()); - } - assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); - e.on('fortytwo', common.mustNotCall()); - assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); - delete e._events['fortytwo'].warned; - - events.EventEmitter.defaultMaxListeners = 44; - e.on('fortytwo', common.mustNotCall()); - assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); - e.on('fortytwo', common.mustNotCall()); - assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); -} - -// but _maxListeners still has precedence over defaultMaxListeners -{ - events.EventEmitter.defaultMaxListeners = 42; - var e = new events.EventEmitter(); - e.setMaxListeners(1); - e.on('uno', common.mustNotCall()); - assert.ok(!e._events['uno'].hasOwnProperty('warned')); - e.on('uno', common.mustNotCall()); - assert.ok(e._events['uno'].hasOwnProperty('warned')); - - // chainable - assert.strictEqual(e, e.setMaxListeners(1)); -} diff --git a/deps/npm/node_modules/events/tests/common.js b/deps/npm/node_modules/events/tests/common.js deleted file mode 100644 index 49569b05f59d5a..00000000000000 --- a/deps/npm/node_modules/events/tests/common.js +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var test = require('tape'); -var assert = require('assert'); - -var noop = function() {}; - -var mustCallChecks = []; - -function runCallChecks(exitCode) { - if (exitCode !== 0) return; - - var failed = filter(mustCallChecks, function(context) { - if ('minimum' in context) { - context.messageSegment = 'at least ' + context.minimum; - return context.actual < context.minimum; - } else { - context.messageSegment = 'exactly ' + context.exact; - return context.actual !== context.exact; - } - }); - - for (var i = 0; i < failed.length; i++) { - var context = failed[i]; - console.log('Mismatched %s function calls. Expected %s, actual %d.', - context.name, - context.messageSegment, - context.actual); - // IE8 has no .stack - if (context.stack) console.log(context.stack.split('\n').slice(2).join('\n')); - } - - assert.strictEqual(failed.length, 0); -} - -exports.mustCall = function(fn, exact) { - return _mustCallInner(fn, exact, 'exact'); -}; - -function _mustCallInner(fn, criteria, field) { - if (typeof criteria == 'undefined') criteria = 1; - - if (typeof fn === 'number') { - criteria = fn; - fn = noop; - } else if (fn === undefined) { - fn = noop; - } - - if (typeof criteria !== 'number') - throw new TypeError('Invalid ' + field + ' value: ' + criteria); - - var context = { - actual: 0, - stack: (new Error()).stack, - name: fn.name || '' - }; - - context[field] = criteria; - - // add the exit listener only once to avoid listener leak warnings - if (mustCallChecks.length === 0) test.onFinish(function() { runCallChecks(0); }); - - mustCallChecks.push(context); - - return function() { - context.actual++; - return fn.apply(this, arguments); - }; -} - -exports.mustNotCall = function(msg) { - return function mustNotCall() { - assert.fail(msg || 'function should not have been called'); - }; -}; - -function filter(arr, fn) { - if (arr.filter) return arr.filter(fn); - var filtered = []; - for (var i = 0; i < arr.length; i++) { - if (fn(arr[i], i, arr)) filtered.push(arr[i]); - } - return filtered -} diff --git a/deps/npm/node_modules/events/tests/errors.js b/deps/npm/node_modules/events/tests/errors.js deleted file mode 100644 index a23df437f05d39..00000000000000 --- a/deps/npm/node_modules/events/tests/errors.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict'; -var assert = require('assert'); -var EventEmitter = require('../'); - -var EE = new EventEmitter(); - -assert.throws(function () { - EE.emit('error', 'Accepts a string'); -}, 'Error: Unhandled error. (Accepts a string)'); - -assert.throws(function () { - EE.emit('error', { message: 'Error!' }); -}, 'Unhandled error. ([object Object])'); diff --git a/deps/npm/node_modules/events/tests/events-list.js b/deps/npm/node_modules/events/tests/events-list.js deleted file mode 100644 index 08aa62177e2c29..00000000000000 --- a/deps/npm/node_modules/events/tests/events-list.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -var EventEmitter = require('../'); -var assert = require('assert'); - -var EE = new EventEmitter(); -var m = function() {}; -EE.on('foo', function() {}); -assert.equal(1, EE.eventNames().length); -assert.equal('foo', EE.eventNames()[0]); -EE.on('bar', m); -assert.equal(2, EE.eventNames().length); -assert.equal('foo', EE.eventNames()[0]); -assert.equal('bar', EE.eventNames()[1]); -EE.removeListener('bar', m); -assert.equal(1, EE.eventNames().length); -assert.equal('foo', EE.eventNames()[0]); - -if (typeof Symbol !== 'undefined') { - var s = Symbol('s'); - EE.on(s, m); - assert.equal(2, EE.eventNames().length); - assert.equal('foo', EE.eventNames()[0]); - assert.equal(s, EE.eventNames()[1]); - EE.removeListener(s, m); - assert.equal(1, EE.eventNames().length); - assert.equal('foo', EE.eventNames()[0]); -} diff --git a/deps/npm/node_modules/events/tests/events-once.js b/deps/npm/node_modules/events/tests/events-once.js deleted file mode 100644 index dae864963daae6..00000000000000 --- a/deps/npm/node_modules/events/tests/events-once.js +++ /dev/null @@ -1,234 +0,0 @@ -'use strict'; - -var common = require('./common'); -var EventEmitter = require('../').EventEmitter; -var once = require('../').once; -var has = require('has'); -var assert = require('assert'); - -function Event(type) { - this.type = type; -} - -function EventTargetMock() { - this.events = {}; - - this.addEventListener = common.mustCall(this.addEventListener); - this.removeEventListener = common.mustCall(this.removeEventListener); -} - -EventTargetMock.prototype.addEventListener = function addEventListener(name, listener, options) { - if (!(name in this.events)) { - this.events[name] = { listeners: [], options: options || {} } - } - this.events[name].listeners.push(listener); -}; - -EventTargetMock.prototype.removeEventListener = function removeEventListener(name, callback) { - if (!(name in this.events)) { - return; - } - var event = this.events[name]; - var stack = event.listeners; - - for (var i = 0, l = stack.length; i < l; i++) { - if (stack[i] === callback) { - stack.splice(i, 1); - if (stack.length === 0) { - delete this.events[name]; - } - return; - } - } -}; - -EventTargetMock.prototype.dispatchEvent = function dispatchEvent(arg) { - if (!(arg.type in this.events)) { - return true; - } - - var event = this.events[arg.type]; - var stack = event.listeners.slice(); - - for (var i = 0, l = stack.length; i < l; i++) { - stack[i].call(null, arg); - if (event.options.once) { - this.removeEventListener(arg.type, stack[i]); - } - } - return !arg.defaultPrevented; -}; - -function onceAnEvent() { - var ee = new EventEmitter(); - - process.nextTick(function () { - ee.emit('myevent', 42); - }); - - return once(ee, 'myevent').then(function (args) { - var value = args[0] - assert.strictEqual(value, 42); - assert.strictEqual(ee.listenerCount('error'), 0); - assert.strictEqual(ee.listenerCount('myevent'), 0); - }); -} - -function onceAnEventWithTwoArgs() { - var ee = new EventEmitter(); - - process.nextTick(function () { - ee.emit('myevent', 42, 24); - }); - - return once(ee, 'myevent').then(function (value) { - assert.strictEqual(value.length, 2); - assert.strictEqual(value[0], 42); - assert.strictEqual(value[1], 24); - }); -} - -function catchesErrors() { - var ee = new EventEmitter(); - - var expected = new Error('kaboom'); - var err; - process.nextTick(function () { - ee.emit('error', expected); - }); - - return once(ee, 'myevent').then(function () { - throw new Error('should reject') - }, function (err) { - assert.strictEqual(err, expected); - assert.strictEqual(ee.listenerCount('error'), 0); - assert.strictEqual(ee.listenerCount('myevent'), 0); - }); -} - -function stopListeningAfterCatchingError() { - var ee = new EventEmitter(); - - var expected = new Error('kaboom'); - var err; - process.nextTick(function () { - ee.emit('error', expected); - ee.emit('myevent', 42, 24); - }); - - // process.on('multipleResolves', common.mustNotCall()); - - return once(ee, 'myevent').then(common.mustNotCall, function (err) { - // process.removeAllListeners('multipleResolves'); - assert.strictEqual(err, expected); - assert.strictEqual(ee.listenerCount('error'), 0); - assert.strictEqual(ee.listenerCount('myevent'), 0); - }); -} - -function onceError() { - var ee = new EventEmitter(); - - var expected = new Error('kaboom'); - process.nextTick(function () { - ee.emit('error', expected); - }); - - var promise = once(ee, 'error'); - assert.strictEqual(ee.listenerCount('error'), 1); - return promise.then(function (args) { - var err = args[0] - assert.strictEqual(err, expected); - assert.strictEqual(ee.listenerCount('error'), 0); - assert.strictEqual(ee.listenerCount('myevent'), 0); - }); -} - -function onceWithEventTarget() { - var et = new EventTargetMock(); - var event = new Event('myevent'); - process.nextTick(function () { - et.dispatchEvent(event); - }); - return once(et, 'myevent').then(function (args) { - var value = args[0]; - assert.strictEqual(value, event); - assert.strictEqual(has(et.events, 'myevent'), false); - }); -} - -function onceWithEventTargetError() { - var et = new EventTargetMock(); - var error = new Event('error'); - process.nextTick(function () { - et.dispatchEvent(error); - }); - return once(et, 'error').then(function (args) { - var err = args[0]; - assert.strictEqual(err, error); - assert.strictEqual(has(et.events, 'error'), false); - }); -} - -function prioritizesEventEmitter() { - var ee = new EventEmitter(); - ee.addEventListener = assert.fail; - ee.removeAllListeners = assert.fail; - process.nextTick(function () { - ee.emit('foo'); - }); - return once(ee, 'foo'); -} - -var allTests = [ - onceAnEvent(), - onceAnEventWithTwoArgs(), - catchesErrors(), - stopListeningAfterCatchingError(), - onceError(), - onceWithEventTarget(), - onceWithEventTargetError(), - prioritizesEventEmitter() -]; - -var hasBrowserEventTarget = false; -try { - hasBrowserEventTarget = typeof (new window.EventTarget().addEventListener) === 'function' && - new window.Event('xyz').type === 'xyz'; -} catch (err) {} - -if (hasBrowserEventTarget) { - var onceWithBrowserEventTarget = function onceWithBrowserEventTarget() { - var et = new window.EventTarget(); - var event = new window.Event('myevent'); - process.nextTick(function () { - et.dispatchEvent(event); - }); - return once(et, 'myevent').then(function (args) { - var value = args[0]; - assert.strictEqual(value, event); - assert.strictEqual(has(et.events, 'myevent'), false); - }); - } - - var onceWithBrowserEventTargetError = function onceWithBrowserEventTargetError() { - var et = new window.EventTarget(); - var error = new window.Event('error'); - process.nextTick(function () { - et.dispatchEvent(error); - }); - return once(et, 'error').then(function (args) { - var err = args[0]; - assert.strictEqual(err, error); - assert.strictEqual(has(et.events, 'error'), false); - }); - } - - common.test.comment('Testing with browser built-in EventTarget'); - allTests.push([ - onceWithBrowserEventTarget(), - onceWithBrowserEventTargetError() - ]); -} - -module.exports = Promise.all(allTests); diff --git a/deps/npm/node_modules/events/tests/index.js b/deps/npm/node_modules/events/tests/index.js deleted file mode 100644 index 2d739e670ca028..00000000000000 --- a/deps/npm/node_modules/events/tests/index.js +++ /dev/null @@ -1,64 +0,0 @@ -var test = require('tape'); -var functionsHaveNames = require('functions-have-names'); -var hasSymbols = require('has-symbols'); - -require('./legacy-compat'); -var common = require('./common'); - -// we do this to easily wrap each file in a mocha test -// and also have browserify be able to statically analyze this file -var orig_require = require; -var require = function(file) { - test(file, function(t) { - // Store the tape object so tests can access it. - t.on('end', function () { delete common.test; }); - common.test = t; - - try { - var exp = orig_require(file); - if (exp && exp.then) { - exp.then(function () { t.end(); }, t.fail); - return; - } - } catch (err) { - t.fail(err); - } - t.end(); - }); -}; - -require('./add-listeners.js'); -require('./check-listener-leaks.js'); -require('./errors.js'); -require('./events-list.js'); -if (typeof Promise === 'function') { - require('./events-once.js'); -} else { - // Promise support is not available. - test('./events-once.js', { skip: true }, function () {}); -} -require('./listener-count.js'); -require('./listeners-side-effects.js'); -require('./listeners.js'); -require('./max-listeners.js'); -if (functionsHaveNames()) { - require('./method-names.js'); -} else { - // Function.name is not supported in IE - test('./method-names.js', { skip: true }, function () {}); -} -require('./modify-in-emit.js'); -require('./num-args.js'); -require('./once.js'); -require('./prepend.js'); -require('./set-max-listeners-side-effects.js'); -require('./special-event-names.js'); -require('./subclass.js'); -if (hasSymbols()) { - require('./symbols.js'); -} else { - // Symbol is not available. - test('./symbols.js', { skip: true }, function () {}); -} -require('./remove-all-listeners.js'); -require('./remove-listeners.js'); diff --git a/deps/npm/node_modules/events/tests/legacy-compat.js b/deps/npm/node_modules/events/tests/legacy-compat.js deleted file mode 100644 index a402be6e2f42d1..00000000000000 --- a/deps/npm/node_modules/events/tests/legacy-compat.js +++ /dev/null @@ -1,16 +0,0 @@ -// sigh... life is hard -if (!global.console) { - console = {} -} - -var fns = ['log', 'error', 'trace']; -for (var i=0 ; ifoo should not be emitted'); -} - -e.once('foo', remove); -e.removeListener('foo', remove); -e.emit('foo'); - -e.once('e', common.mustCall(function() { - e.emit('e'); -})); - -e.once('e', common.mustCall()); - -e.emit('e'); - -// Verify that the listener must be a function -assert.throws(function() { - var ee = new EventEmitter(); - - ee.once('foo', null); -}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); - -{ - // once() has different code paths based on the number of arguments being - // emitted. Verify that all of the cases are covered. - var maxArgs = 4; - - for (var i = 0; i <= maxArgs; ++i) { - var ee = new EventEmitter(); - var args = ['foo']; - - for (var j = 0; j < i; ++j) - args.push(j); - - ee.once('foo', common.mustCall(function() { - var params = Array.prototype.slice.call(arguments); - var restArgs = args.slice(1); - assert.ok(Array.isArray(params)); - assert.strictEqual(params.length, restArgs.length); - for (var index = 0; index < params.length; index++) { - var param = params[index]; - assert.strictEqual(param, restArgs[index]); - } - })); - - EventEmitter.prototype.emit.apply(ee, args); - } -} diff --git a/deps/npm/node_modules/events/tests/prepend.js b/deps/npm/node_modules/events/tests/prepend.js deleted file mode 100644 index 79afde0bf3971c..00000000000000 --- a/deps/npm/node_modules/events/tests/prepend.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict'; - -var common = require('./common'); -var EventEmitter = require('../'); -var assert = require('assert'); - -var myEE = new EventEmitter(); -var m = 0; -// This one comes last. -myEE.on('foo', common.mustCall(function () { - assert.strictEqual(m, 2); -})); - -// This one comes second. -myEE.prependListener('foo', common.mustCall(function () { - assert.strictEqual(m++, 1); -})); - -// This one comes first. -myEE.prependOnceListener('foo', - common.mustCall(function () { - assert.strictEqual(m++, 0); - })); - -myEE.emit('foo'); - -// Verify that the listener must be a function -assert.throws(function () { - var ee = new EventEmitter(); - ee.prependOnceListener('foo', null); -}, 'TypeError: The "listener" argument must be of type Function. Received type object'); diff --git a/deps/npm/node_modules/events/tests/remove-all-listeners.js b/deps/npm/node_modules/events/tests/remove-all-listeners.js deleted file mode 100644 index 622941cfa604c0..00000000000000 --- a/deps/npm/node_modules/events/tests/remove-all-listeners.js +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var common = require('./common'); -var assert = require('assert'); -var events = require('../'); -var test = require('tape'); - -function expect(expected) { - var actual = []; - test.onFinish(function() { - var sortedActual = actual.sort(); - var sortedExpected = expected.sort(); - assert.strictEqual(sortedActual.length, sortedExpected.length); - for (var index = 0; index < sortedActual.length; index++) { - var value = sortedActual[index]; - assert.strictEqual(value, sortedExpected[index]); - } - }); - function listener(name) { - actual.push(name); - } - return common.mustCall(listener, expected.length); -} - -{ - var ee = new events.EventEmitter(); - var noop = common.mustNotCall(); - ee.on('foo', noop); - ee.on('bar', noop); - ee.on('baz', noop); - ee.on('baz', noop); - var fooListeners = ee.listeners('foo'); - var barListeners = ee.listeners('bar'); - var bazListeners = ee.listeners('baz'); - ee.on('removeListener', expect(['bar', 'baz', 'baz'])); - ee.removeAllListeners('bar'); - ee.removeAllListeners('baz'); - - var listeners = ee.listeners('foo'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 1); - assert.strictEqual(listeners[0], noop); - - listeners = ee.listeners('bar'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - listeners = ee.listeners('baz'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - // After calling removeAllListeners(), - // the old listeners array should stay unchanged. - assert.strictEqual(fooListeners.length, 1); - assert.strictEqual(fooListeners[0], noop); - assert.strictEqual(barListeners.length, 1); - assert.strictEqual(barListeners[0], noop); - assert.strictEqual(bazListeners.length, 2); - assert.strictEqual(bazListeners[0], noop); - assert.strictEqual(bazListeners[1], noop); - // After calling removeAllListeners(), - // new listeners arrays is different from the old. - assert.notStrictEqual(ee.listeners('bar'), barListeners); - assert.notStrictEqual(ee.listeners('baz'), bazListeners); -} - -{ - var ee = new events.EventEmitter(); - ee.on('foo', common.mustNotCall()); - ee.on('bar', common.mustNotCall()); - // Expect LIFO order - ee.on('removeListener', expect(['foo', 'bar', 'removeListener'])); - ee.on('removeListener', expect(['foo', 'bar'])); - ee.removeAllListeners(); - - var listeners = ee.listeners('foo'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - listeners = ee.listeners('bar'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); -} - -{ - var ee = new events.EventEmitter(); - ee.on('removeListener', common.mustNotCall()); - // Check for regression where removeAllListeners() throws when - // there exists a 'removeListener' listener, but there exists - // no listeners for the provided event type. - assert.doesNotThrow(function () { ee.removeAllListeners(ee, 'foo') }); -} - -{ - var ee = new events.EventEmitter(); - var expectLength = 2; - ee.on('removeListener', function() { - assert.strictEqual(expectLength--, this.listeners('baz').length); - }); - ee.on('baz', common.mustNotCall()); - ee.on('baz', common.mustNotCall()); - ee.on('baz', common.mustNotCall()); - assert.strictEqual(ee.listeners('baz').length, expectLength + 1); - ee.removeAllListeners('baz'); - assert.strictEqual(ee.listeners('baz').length, 0); -} - -{ - var ee = new events.EventEmitter(); - assert.strictEqual(ee, ee.removeAllListeners()); -} - -{ - var ee = new events.EventEmitter(); - ee._events = undefined; - assert.strictEqual(ee, ee.removeAllListeners()); -} diff --git a/deps/npm/node_modules/events/tests/remove-listeners.js b/deps/npm/node_modules/events/tests/remove-listeners.js deleted file mode 100644 index 18e4d1651fa254..00000000000000 --- a/deps/npm/node_modules/events/tests/remove-listeners.js +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var common = require('./common'); -var assert = require('assert'); -var EventEmitter = require('../'); - -var listener1 = function listener1() {}; -var listener2 = function listener2() {}; - -{ - var ee = new EventEmitter(); - ee.on('hello', listener1); - ee.on('removeListener', common.mustCall(function(name, cb) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(cb, listener1); - })); - ee.removeListener('hello', listener1); - var listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); -} - -{ - var ee = new EventEmitter(); - ee.on('hello', listener1); - ee.on('removeListener', common.mustNotCall()); - ee.removeListener('hello', listener2); - - var listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 1); - assert.strictEqual(listeners[0], listener1); -} - -{ - var ee = new EventEmitter(); - ee.on('hello', listener1); - ee.on('hello', listener2); - - var listeners; - ee.once('removeListener', common.mustCall(function(name, cb) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(cb, listener1); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 1); - assert.strictEqual(listeners[0], listener2); - })); - ee.removeListener('hello', listener1); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 1); - assert.strictEqual(listeners[0], listener2); - ee.once('removeListener', common.mustCall(function(name, cb) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(cb, listener2); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - })); - ee.removeListener('hello', listener2); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); -} - -{ - var ee = new EventEmitter(); - - function remove1() { - assert.fail('remove1 should not have been called'); - } - - function remove2() { - assert.fail('remove2 should not have been called'); - } - - ee.on('removeListener', common.mustCall(function(name, cb) { - if (cb !== remove1) return; - this.removeListener('quux', remove2); - this.emit('quux'); - }, 2)); - ee.on('quux', remove1); - ee.on('quux', remove2); - ee.removeListener('quux', remove1); -} - -{ - var ee = new EventEmitter(); - ee.on('hello', listener1); - ee.on('hello', listener2); - - var listeners; - ee.once('removeListener', common.mustCall(function(name, cb) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(cb, listener1); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 1); - assert.strictEqual(listeners[0], listener2); - ee.once('removeListener', common.mustCall(function(name, cb) { - assert.strictEqual(name, 'hello'); - assert.strictEqual(cb, listener2); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - })); - ee.removeListener('hello', listener2); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); - })); - ee.removeListener('hello', listener1); - listeners = ee.listeners('hello'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 0); -} - -{ - var ee = new EventEmitter(); - var listener3 = common.mustCall(function() { - ee.removeListener('hello', listener4); - }, 2); - var listener4 = common.mustCall(); - - ee.on('hello', listener3); - ee.on('hello', listener4); - - // listener4 will still be called although it is removed by listener 3. - ee.emit('hello'); - // This is so because the interal listener array at time of emit - // was [listener3,listener4] - - // Interal listener array [listener3] - ee.emit('hello'); -} - -{ - var ee = new EventEmitter(); - - ee.once('hello', listener1); - ee.on('removeListener', common.mustCall(function(eventName, listener) { - assert.strictEqual(eventName, 'hello'); - assert.strictEqual(listener, listener1); - })); - ee.emit('hello'); -} - -{ - var ee = new EventEmitter(); - - assert.strictEqual(ee, ee.removeListener('foo', function() {})); -} - -// Verify that the removed listener must be a function -assert.throws(function() { - var ee = new EventEmitter(); - - ee.removeListener('foo', null); -}, /^TypeError: The "listener" argument must be of type Function\. Received type object$/); - -{ - var ee = new EventEmitter(); - var listener = function() {}; - ee._events = undefined; - var e = ee.removeListener('foo', listener); - assert.strictEqual(e, ee); -} - -{ - var ee = new EventEmitter(); - - ee.on('foo', listener1); - ee.on('foo', listener2); - var listeners = ee.listeners('foo'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 2); - assert.strictEqual(listeners[0], listener1); - assert.strictEqual(listeners[1], listener2); - - ee.removeListener('foo', listener1); - assert.strictEqual(ee._events.foo, listener2); - - ee.on('foo', listener1); - listeners = ee.listeners('foo'); - assert.ok(Array.isArray(listeners)); - assert.strictEqual(listeners.length, 2); - assert.strictEqual(listeners[0], listener2); - assert.strictEqual(listeners[1], listener1); - - ee.removeListener('foo', listener1); - assert.strictEqual(ee._events.foo, listener2); -} diff --git a/deps/npm/node_modules/events/tests/set-max-listeners-side-effects.js b/deps/npm/node_modules/events/tests/set-max-listeners-side-effects.js deleted file mode 100644 index 13dbb671e90242..00000000000000 --- a/deps/npm/node_modules/events/tests/set-max-listeners-side-effects.js +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -require('./common'); -var assert = require('assert'); -var events = require('../'); - -var e = new events.EventEmitter(); - -if (Object.create) assert.ok(!(e._events instanceof Object)); -assert.strictEqual(Object.keys(e._events).length, 0); -e.setMaxListeners(5); -assert.strictEqual(Object.keys(e._events).length, 0); diff --git a/deps/npm/node_modules/events/tests/special-event-names.js b/deps/npm/node_modules/events/tests/special-event-names.js deleted file mode 100644 index a2f0b744a706c9..00000000000000 --- a/deps/npm/node_modules/events/tests/special-event-names.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -var common = require('./common'); -var EventEmitter = require('../'); -var assert = require('assert'); - -var ee = new EventEmitter(); -var handler = function() {}; - -assert.strictEqual(ee.eventNames().length, 0); - -assert.strictEqual(ee._events.hasOwnProperty, undefined); -assert.strictEqual(ee._events.toString, undefined); - -ee.on('__defineGetter__', handler); -ee.on('toString', handler); -ee.on('__proto__', handler); - -assert.strictEqual(ee.eventNames()[0], '__defineGetter__'); -assert.strictEqual(ee.eventNames()[1], 'toString'); - -assert.strictEqual(ee.listeners('__defineGetter__').length, 1); -assert.strictEqual(ee.listeners('__defineGetter__')[0], handler); -assert.strictEqual(ee.listeners('toString').length, 1); -assert.strictEqual(ee.listeners('toString')[0], handler); - -// Only run __proto__ tests if that property can actually be set -if ({ __proto__: 'ok' }.__proto__ === 'ok') { - assert.strictEqual(ee.eventNames().length, 3); - assert.strictEqual(ee.eventNames()[2], '__proto__'); - assert.strictEqual(ee.listeners('__proto__').length, 1); - assert.strictEqual(ee.listeners('__proto__')[0], handler); - - ee.on('__proto__', common.mustCall(function(val) { - assert.strictEqual(val, 1); - })); - ee.emit('__proto__', 1); - - process.on('__proto__', common.mustCall(function(val) { - assert.strictEqual(val, 1); - })); - process.emit('__proto__', 1); -} else { - console.log('# skipped __proto__') -} diff --git a/deps/npm/node_modules/events/tests/subclass.js b/deps/npm/node_modules/events/tests/subclass.js deleted file mode 100644 index bd033fff4d2669..00000000000000 --- a/deps/npm/node_modules/events/tests/subclass.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var common = require('./common'); -var test = require('tape'); -var assert = require('assert'); -var EventEmitter = require('../').EventEmitter; -var util = require('util'); - -util.inherits(MyEE, EventEmitter); - -function MyEE(cb) { - this.once(1, cb); - this.emit(1); - this.removeAllListeners(); - EventEmitter.call(this); -} - -var myee = new MyEE(common.mustCall()); - - -util.inherits(ErrorEE, EventEmitter); -function ErrorEE() { - this.emit('error', new Error('blerg')); -} - -assert.throws(function() { - new ErrorEE(); -}, /blerg/); - -test.onFinish(function() { - assert.ok(!(myee._events instanceof Object)); - assert.strictEqual(Object.keys(myee._events).length, 0); -}); - - -function MyEE2() { - EventEmitter.call(this); -} - -MyEE2.prototype = new EventEmitter(); - -var ee1 = new MyEE2(); -var ee2 = new MyEE2(); - -ee1.on('x', function() {}); - -assert.strictEqual(ee2.listenerCount('x'), 0); diff --git a/deps/npm/node_modules/events/tests/symbols.js b/deps/npm/node_modules/events/tests/symbols.js deleted file mode 100644 index 0721f0ec0b5d6e..00000000000000 --- a/deps/npm/node_modules/events/tests/symbols.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -var common = require('./common'); -var EventEmitter = require('../'); -var assert = require('assert'); - -var ee = new EventEmitter(); -var foo = Symbol('foo'); -var listener = common.mustCall(); - -ee.on(foo, listener); -assert.strictEqual(ee.listeners(foo).length, 1); -assert.strictEqual(ee.listeners(foo)[0], listener); - -ee.emit(foo); - -ee.removeAllListeners(); -assert.strictEqual(ee.listeners(foo).length, 0); - -ee.on(foo, listener); -assert.strictEqual(ee.listeners(foo).length, 1); -assert.strictEqual(ee.listeners(foo)[0], listener); - -ee.removeListener(foo, listener); -assert.strictEqual(ee.listeners(foo).length, 0); diff --git a/deps/npm/node_modules/ieee754/LICENSE b/deps/npm/node_modules/ieee754/LICENSE deleted file mode 100644 index 5aac82c78c2d99..00000000000000 --- a/deps/npm/node_modules/ieee754/LICENSE +++ /dev/null @@ -1,11 +0,0 @@ -Copyright 2008 Fair Oaks Labs, Inc. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/deps/npm/node_modules/ieee754/index.js b/deps/npm/node_modules/ieee754/index.js deleted file mode 100644 index 81d26c343c93dc..00000000000000 --- a/deps/npm/node_modules/ieee754/index.js +++ /dev/null @@ -1,85 +0,0 @@ -/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ -exports.read = function (buffer, offset, isLE, mLen, nBytes) { - var e, m - var eLen = (nBytes * 8) - mLen - 1 - var eMax = (1 << eLen) - 1 - var eBias = eMax >> 1 - var nBits = -7 - var i = isLE ? (nBytes - 1) : 0 - var d = isLE ? -1 : 1 - var s = buffer[offset + i] - - i += d - - e = s & ((1 << (-nBits)) - 1) - s >>= (-nBits) - nBits += eLen - for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} - - m = e & ((1 << (-nBits)) - 1) - e >>= (-nBits) - nBits += mLen - for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} - - if (e === 0) { - e = 1 - eBias - } else if (e === eMax) { - return m ? NaN : ((s ? -1 : 1) * Infinity) - } else { - m = m + Math.pow(2, mLen) - e = e - eBias - } - return (s ? -1 : 1) * m * Math.pow(2, e - mLen) -} - -exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { - var e, m, c - var eLen = (nBytes * 8) - mLen - 1 - var eMax = (1 << eLen) - 1 - var eBias = eMax >> 1 - var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) - var i = isLE ? 0 : (nBytes - 1) - var d = isLE ? 1 : -1 - var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 - - value = Math.abs(value) - - if (isNaN(value) || value === Infinity) { - m = isNaN(value) ? 1 : 0 - e = eMax - } else { - e = Math.floor(Math.log(value) / Math.LN2) - if (value * (c = Math.pow(2, -e)) < 1) { - e-- - c *= 2 - } - if (e + eBias >= 1) { - value += rt / c - } else { - value += rt * Math.pow(2, 1 - eBias) - } - if (value * c >= 2) { - e++ - c /= 2 - } - - if (e + eBias >= eMax) { - m = 0 - e = eMax - } else if (e + eBias >= 1) { - m = ((value * c) - 1) * Math.pow(2, mLen) - e = e + eBias - } else { - m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) - e = 0 - } - } - - for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} - - e = (e << mLen) | m - eLen += mLen - for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} - - buffer[offset + i - d] |= s * 128 -} diff --git a/deps/npm/node_modules/ieee754/package.json b/deps/npm/node_modules/ieee754/package.json deleted file mode 100644 index 7b23851384185c..00000000000000 --- a/deps/npm/node_modules/ieee754/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "ieee754", - "description": "Read/write IEEE754 floating point numbers from/to a Buffer or array-like object", - "version": "1.2.1", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "contributors": [ - "Romain Beauxis " - ], - "devDependencies": { - "airtap": "^3.0.0", - "standard": "*", - "tape": "^5.0.1" - }, - "keywords": [ - "IEEE 754", - "buffer", - "convert", - "floating point", - "ieee754" - ], - "license": "BSD-3-Clause", - "main": "index.js", - "types": "index.d.ts", - "repository": { - "type": "git", - "url": "git://github.com/feross/ieee754.git" - }, - "scripts": { - "test": "standard && npm run test-node && npm run test-browser", - "test-browser": "airtap -- test/*.js", - "test-browser-local": "airtap --local -- test/*.js", - "test-node": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/deps/npm/node_modules/ignore-walk/lib/index.js b/deps/npm/node_modules/ignore-walk/lib/index.js index 40a0726c3257f1..ad3aecc7389c69 100644 --- a/deps/npm/node_modules/ignore-walk/lib/index.js +++ b/deps/npm/node_modules/ignore-walk/lib/index.js @@ -22,6 +22,7 @@ class Walker extends EE { this.result = this.parent ? this.parent.result : new Set() this.entries = null this.sawError = false + this.exact = opts.exact } sort (a, b) { @@ -164,7 +165,7 @@ class Walker extends EE { } else { // is a directory if (dir) { - this.walker(entry, { isSymbolicLink }, then) + this.walker(entry, { isSymbolicLink, exact: file || this.filterEntry(entry + '/') }, then) } else { then() } @@ -208,15 +209,19 @@ class Walker extends EE { new Walker(this.walkerOpt(entry, opts)).on('done', then).start() } - filterEntry (entry, partial) { + filterEntry (entry, partial, entryBasename) { let included = true // this = /a/b/c // entry = d // parent /a/b sees c/d if (this.parent && this.parent.filterEntry) { - var pt = this.basename + '/' + entry - included = this.parent.filterEntry(pt, partial) + const parentEntry = this.basename + '/' + entry + const parentBasename = entryBasename || entry + included = this.parent.filterEntry(parentEntry, partial, parentBasename) + if (!included && !this.exact) { + return false + } } this.ignoreFiles.forEach(f => { @@ -226,17 +231,28 @@ class Walker extends EE { // so if it's negated, and already included, no need to check // likewise if it's neither negated nor included if (rule.negate !== included) { + const isRelativeRule = entryBasename && rule.globParts.some(part => + part.length <= (part.slice(-1)[0] ? 1 : 2) + ) + // first, match against /foo/bar // then, against foo/bar // then, in the case of partials, match with a / + // then, if also the rule is relative, match against basename const match = rule.match('/' + entry) || rule.match(entry) || - (!!partial && ( + !!partial && ( rule.match('/' + entry + '/') || - rule.match(entry + '/'))) || - (!!partial && rule.negate && ( - rule.match('/' + entry, true) || - rule.match(entry, true))) + rule.match(entry + '/') || + rule.negate && ( + rule.match('/' + entry, true) || + rule.match(entry, true)) || + isRelativeRule && ( + rule.match('/' + entryBasename + '/') || + rule.match(entryBasename + '/') || + rule.negate && ( + rule.match('/' + entryBasename, true) || + rule.match(entryBasename, true)))) if (match) { included = rule.negate diff --git a/deps/npm/node_modules/ignore-walk/package.json b/deps/npm/node_modules/ignore-walk/package.json index 4d9752ae25127c..cebd4795f953af 100644 --- a/deps/npm/node_modules/ignore-walk/package.json +++ b/deps/npm/node_modules/ignore-walk/package.json @@ -1,11 +1,11 @@ { "name": "ignore-walk", - "version": "6.0.3", + "version": "6.0.4", "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.", "main": "lib/index.js", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.19.0", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, @@ -56,7 +56,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.19.0", "content": "scripts/template-oss", "publish": "true" } diff --git a/deps/npm/node_modules/json-parse-even-better-errors/lib/index.js b/deps/npm/node_modules/json-parse-even-better-errors/lib/index.js index 2b9f3c2bf1d602..c21dd645a778bd 100644 --- a/deps/npm/node_modules/json-parse-even-better-errors/lib/index.js +++ b/deps/npm/node_modules/json-parse-even-better-errors/lib/index.js @@ -1,55 +1,78 @@ 'use strict' -const hexify = char => { +const INDENT = Symbol.for('indent') +const NEWLINE = Symbol.for('newline') + +const DEFAULT_NEWLINE = '\n' +const DEFAULT_INDENT = ' ' +const BOM = /^\uFEFF/ + +// only respect indentation if we got a line break, otherwise squash it +// things other than objects and arrays aren't indented, so ignore those +// Important: in both of these regexps, the $1 capture group is the newline +// or undefined, and the $2 capture group is the indent, or undefined. +const FORMAT = /^\s*[{[]((?:\r?\n)+)([\s\t]*)/ +const EMPTY = /^(?:\{\}|\[\])((?:\r?\n)+)?$/ + +// Node 20 puts single quotes around the token and a comma after it +const UNEXPECTED_TOKEN = /^Unexpected token '?(.)'?(,)? /i + +const hexify = (char) => { const h = char.charCodeAt(0).toString(16).toUpperCase() - return '0x' + (h.length % 2 ? '0' : '') + h + return `0x${h.length % 2 ? '0' : ''}${h}` } -const parseError = (e, txt, context) => { +// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) +// because the buffer-to-string conversion in `fs.readFileSync()` +// translates it to FEFF, the UTF-16 BOM. +const stripBOM = (txt) => String(txt).replace(BOM, '') + +const makeParsedError = (msg, parsing, position = 0) => ({ + message: `${msg} while parsing ${parsing}`, + position, +}) + +const parseError = (e, txt, context = 20) => { + let msg = e.message + if (!txt) { - return { - message: e.message + ' while parsing empty string', - position: 0, - } + return makeParsedError(msg, 'empty string') } - const badToken = e.message.match(/^Unexpected token (.) .*position\s+(\d+)/i) - const errIdx = badToken ? +badToken[2] - : e.message.match(/^Unexpected end of JSON.*/i) ? txt.length - 1 - : null - const msg = badToken ? e.message.replace(/^Unexpected token ./, `Unexpected token ${ - JSON.stringify(badToken[1]) - } (${hexify(badToken[1])})`) - : e.message + const badTokenMatch = msg.match(UNEXPECTED_TOKEN) + const badIndexMatch = msg.match(/ position\s+(\d+)/i) - if (errIdx !== null && errIdx !== undefined) { - const start = errIdx <= context ? 0 - : errIdx - context + if (badTokenMatch) { + msg = msg.replace( + UNEXPECTED_TOKEN, + `Unexpected token ${JSON.stringify(badTokenMatch[1])} (${hexify(badTokenMatch[1])})$2 ` + ) + } - const end = errIdx + context >= txt.length ? txt.length - : errIdx + context + let errIdx + if (badIndexMatch) { + errIdx = +badIndexMatch[1] + } else if (msg.match(/^Unexpected end of JSON.*/i)) { + errIdx = txt.length - 1 + } - const slice = (start === 0 ? '' : '...') + - txt.slice(start, end) + - (end === txt.length ? '' : '...') + if (errIdx == null) { + return makeParsedError(msg, `'${txt.slice(0, context * 2)}'`) + } - const near = txt === slice ? '' : 'near ' + const start = errIdx <= context ? 0 : errIdx - context + const end = errIdx + context >= txt.length ? txt.length : errIdx + context + const slice = `${start ? '...' : ''}${txt.slice(start, end)}${end === txt.length ? '' : '...'}` - return { - message: msg + ` while parsing ${near}${JSON.stringify(slice)}`, - position: errIdx, - } - } else { - return { - message: msg + ` while parsing '${txt.slice(0, context * 2)}'`, - position: 0, - } - } + return makeParsedError( + msg, + `${txt === slice ? '' : 'near '}${JSON.stringify(slice)}`, + errIdx + ) } class JSONParseError extends SyntaxError { constructor (er, txt, context, caller) { - context = context || 20 const metadata = parseError(er, txt, context) super(metadata.message) Object.assign(this, metadata) @@ -63,67 +86,50 @@ class JSONParseError extends SyntaxError { } set name (n) {} + get [Symbol.toStringTag] () { return this.constructor.name } } -const kIndent = Symbol.for('indent') -const kNewline = Symbol.for('newline') -// only respect indentation if we got a line break, otherwise squash it -// things other than objects and arrays aren't indented, so ignore those -// Important: in both of these regexps, the $1 capture group is the newline -// or undefined, and the $2 capture group is the indent, or undefined. -const formatRE = /^\s*[{[]((?:\r?\n)+)([\s\t]*)/ -const emptyRE = /^(?:\{\}|\[\])((?:\r?\n)+)?$/ - -const parseJson = (txt, reviver, context) => { - const parseText = stripBOM(txt) - context = context || 20 - try { +const parseJson = (txt, reviver) => { + const result = JSON.parse(txt, reviver) + if (result && typeof result === 'object') { // get the indentation so that we can save it back nicely // if the file starts with {" then we have an indent of '', ie, none - // otherwise, pick the indentation of the next line after the first \n - // If the pattern doesn't match, then it means no indentation. - // JSON.stringify ignores symbols, so this is reasonably safe. - // if the string is '{}' or '[]', then use the default 2-space indent. - const [, newline = '\n', indent = ' '] = parseText.match(emptyRE) || - parseText.match(formatRE) || - [null, '', ''] - - const result = JSON.parse(parseText, reviver) - if (result && typeof result === 'object') { - result[kNewline] = newline - result[kIndent] = indent - } - return result + // otherwise, pick the indentation of the next line after the first \n If the + // pattern doesn't match, then it means no indentation. JSON.stringify ignores + // symbols, so this is reasonably safe. if the string is '{}' or '[]', then + // use the default 2-space indent. + const match = txt.match(EMPTY) || txt.match(FORMAT) || [null, '', ''] + result[NEWLINE] = match[1] ?? DEFAULT_NEWLINE + result[INDENT] = match[2] ?? DEFAULT_INDENT + } + return result +} + +const parseJsonError = (raw, reviver, context) => { + const txt = stripBOM(raw) + try { + return parseJson(txt, reviver) } catch (e) { - if (typeof txt !== 'string' && !Buffer.isBuffer(txt)) { - const isEmptyArray = Array.isArray(txt) && txt.length === 0 - throw Object.assign(new TypeError( - `Cannot parse ${isEmptyArray ? 'an empty array' : String(txt)}` - ), { - code: 'EJSONPARSE', - systemError: e, - }) + if (typeof raw !== 'string' && !Buffer.isBuffer(raw)) { + const msg = Array.isArray(raw) && raw.length === 0 ? 'an empty array' : String(raw) + throw Object.assign( + new TypeError(`Cannot parse ${msg}`), + { code: 'EJSONPARSE', systemError: e } + ) } - - throw new JSONParseError(e, parseText, context, parseJson) + throw new JSONParseError(e, txt, context, parseJsonError) } } -// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) -// because the buffer-to-string conversion in `fs.readFileSync()` -// translates it to FEFF, the UTF-16 BOM. -const stripBOM = txt => String(txt).replace(/^\uFEFF/, '') - -module.exports = parseJson -parseJson.JSONParseError = JSONParseError - -parseJson.noExceptions = (txt, reviver) => { +module.exports = parseJsonError +parseJsonError.JSONParseError = JSONParseError +parseJsonError.noExceptions = (raw, reviver) => { try { - return JSON.parse(stripBOM(txt), reviver) - } catch (e) { + return parseJson(stripBOM(raw), reviver) + } catch { // no exceptions } } diff --git a/deps/npm/node_modules/json-parse-even-better-errors/package.json b/deps/npm/node_modules/json-parse-even-better-errors/package.json index c496ecbde502e6..5d0a1d97d4d7ea 100644 --- a/deps/npm/node_modules/json-parse-even-better-errors/package.json +++ b/deps/npm/node_modules/json-parse-even-better-errors/package.json @@ -1,6 +1,6 @@ { "name": "json-parse-even-better-errors", - "version": "3.0.0", + "version": "3.0.1", "description": "JSON.parse with context information on error", "main": "lib/index.js", "files": [ @@ -10,7 +10,7 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", "lintfix": "npm run lint -- --fix", @@ -27,8 +27,8 @@ "author": "GitHub Inc.", "license": "MIT", "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.20.0", "tap": "^16.3.0" }, "tap": { @@ -43,6 +43,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.20.0", + "publish": true } } diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index 78b60c3d62fe6c..8b8459dcec251b 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -1,23 +1,23 @@ { "name": "libnpmaccess", - "version": "8.0.1", + "version": "8.0.2", "description": "programmatic library for `npm access` commands", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "test": "tap", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "tap": "^16.3.8" }, @@ -41,7 +41,7 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 54a54ece0f026a..99f92bd242c219 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "6.0.3", + "version": "6.0.5", "description": "The registry diff", "repository": { "type": "git", @@ -32,17 +32,17 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", - "lintfix": "node ../.. run lint -- --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "snap": "tap", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "tap": "^16.3.8" }, "dependencies": { @@ -58,7 +58,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index 31e6c7386b1117..bef7d0eda9820a 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "7.0.4", + "version": "7.0.6", "files": [ "bin/", "lib/" @@ -33,12 +33,12 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", - "posttest": "node ../.. run lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "posttest": "npm run lint", "test": "tap", "snap": "tap", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "template-oss-apply": "template-oss-apply --force" }, "tap": { @@ -51,7 +51,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "bin-links": "^4.0.1", "chalk": "^5.2.0", "just-extend": "^6.2.0", @@ -73,7 +73,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 9bacac4cdaabf6..4a8bcb034c1bda 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "5.0.1", + "version": "5.0.3", "main": "lib/index.js", "files": [ "bin/", @@ -31,7 +31,7 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "test": "tap", @@ -41,7 +41,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "tap": "^16.3.8" }, "dependencies": { @@ -52,9 +52,8 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", - "content": "../../scripts/template-oss/index.js", - "npm": "npm" + "version": "4.21.3", + "content": "../../scripts/template-oss/index.js" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index b59524f57b695f..0bd822abba2c3f 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "10.0.0", + "version": "10.0.1", "description": "programmatic API for managing npm registry hooks", "main": "lib/index.js", "files": [ @@ -9,11 +9,11 @@ ], "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "template-oss-apply": "template-oss-apply --force" }, "repository": { @@ -35,7 +35,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "tap": "^16.3.8" }, @@ -44,7 +44,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 596854acfc2a8c..a0aedb79b6084c 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "6.0.1", + "version": "6.0.2", "description": "Programmatic api for `npm org` commands", "author": "GitHub Inc.", "main": "lib/index.js", @@ -14,11 +14,11 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, @@ -28,7 +28,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "minipass": "^7.0.4", "nock": "^13.3.3", "tap": "^16.3.8" @@ -49,7 +49,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index fbaa3dc256bc54..89986434a1cb2e 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "6.0.3", + "version": "6.0.5", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -13,17 +13,17 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "spawk": "^1.7.1", "tap": "^16.3.8" @@ -46,7 +46,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index 3d08280870ac72..ce2982450249c2 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "9.0.2", + "version": "9.0.3", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -14,10 +14,10 @@ ], "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", - "lintfix": "node ../.. run lint -- --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "postlint": "template-oss-check", "snap": "tap", "template-oss-apply": "template-oss-apply --force" @@ -26,7 +26,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "tap": "^16.3.8" }, @@ -52,7 +52,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 1a1e06bb569bae..42cb78839081cd 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "7.0.0", + "version": "7.0.1", "description": "Programmatic API for searching in npm and compatible registries.", "author": "GitHub Inc.", "main": "lib/index.js", @@ -16,17 +16,17 @@ ], "license": "ISC", "scripts": { - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "tap": "^16.3.8" }, @@ -45,7 +45,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 3b9ab774711858..bafeeb3fcdc4c6 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -1,22 +1,22 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "6.0.0", + "version": "6.0.1", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "nock": "^13.3.3", "tap": "^16.3.8" }, @@ -39,7 +39,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index 5baac0c437e8b6..782eeca7d2b795 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "5.0.1", + "version": "5.0.2", "main": "lib/index.js", "files": [ "bin/", @@ -15,12 +15,12 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "test": "tap", - "posttest": "node ../.. run lint", + "posttest": "npm run lint", "snap": "tap", "postlint": "template-oss-check", - "lintfix": "node ../.. run lint -- --fix", + "lintfix": "npm run lint -- --fix", "template-oss-apply": "template-oss-apply --force" }, "tap": { @@ -32,7 +32,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "require-inject": "^1.4.4", "tap": "^16.3.8" }, @@ -48,7 +48,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/lru-cache/dist/commonjs/index.js b/deps/npm/node_modules/lru-cache/dist/commonjs/index.js index 3fec15958a5349..3e8efcdde5e76b 100644 --- a/deps/npm/node_modules/lru-cache/dist/commonjs/index.js +++ b/deps/npm/node_modules/lru-cache/dist/commonjs/index.js @@ -736,6 +736,37 @@ class LRUCache { } return deleted; } + /** + * Get the extended info about a given entry, to get its value, size, and + * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a + * single key. Always returns stale values, if their info is found in the + * cache, so be sure to check for expired TTLs if relevant. + */ + info(key) { + const i = this.#keyMap.get(key); + if (i === undefined) + return undefined; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + return undefined; + const entry = { value }; + if (this.#ttls && this.#starts) { + const ttl = this.#ttls[i]; + const start = this.#starts[i]; + if (ttl && start) { + const remain = ttl - (perf.now() - start); + entry.ttl = remain; + entry.start = Date.now(); + } + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + return entry; + } /** * Return an array of [key, {@link LRUCache.Entry}] tuples which can be * passed to cache.load() diff --git a/deps/npm/node_modules/lru-cache/dist/esm/index.js b/deps/npm/node_modules/lru-cache/dist/esm/index.js index 3c34d3de6c53cc..3f8c178a3af524 100644 --- a/deps/npm/node_modules/lru-cache/dist/esm/index.js +++ b/deps/npm/node_modules/lru-cache/dist/esm/index.js @@ -733,6 +733,37 @@ export class LRUCache { } return deleted; } + /** + * Get the extended info about a given entry, to get its value, size, and + * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a + * single key. Always returns stale values, if their info is found in the + * cache, so be sure to check for expired TTLs if relevant. + */ + info(key) { + const i = this.#keyMap.get(key); + if (i === undefined) + return undefined; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + return undefined; + const entry = { value }; + if (this.#ttls && this.#starts) { + const ttl = this.#ttls[i]; + const start = this.#starts[i]; + if (ttl && start) { + const remain = ttl - (perf.now() - start); + entry.ttl = remain; + entry.start = Date.now(); + } + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + return entry; + } /** * Return an array of [key, {@link LRUCache.Entry}] tuples which can be * passed to cache.load() diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json index e6cb6b32153b5b..3702e6ef425bca 100644 --- a/deps/npm/node_modules/lru-cache/package.json +++ b/deps/npm/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.2", + "version": "10.1.0", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -45,7 +45,10 @@ } } }, - "repository": "git://github.com/isaacs/node-lru-cache.git", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, "devDependencies": { "@tapjs/clock": "^1.1.16", "@types/node": "^20.2.5", @@ -111,8 +114,5 @@ } } }, - "type": "module", - "dependencies": { - "semver": "^7.3.5" - } + "type": "module" } diff --git a/deps/npm/node_modules/minipass-collect/LICENSE b/deps/npm/node_modules/minipass-collect/LICENSE index 19129e315fe593..8b8575ae6b7d39 100644 --- a/deps/npm/node_modules/minipass-collect/LICENSE +++ b/deps/npm/node_modules/minipass-collect/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2019-2023 Isaac Z. Schlueter and Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/deps/npm/node_modules/minipass-collect/index.js b/deps/npm/node_modules/minipass-collect/index.js index 2fe68c0b5a5a9b..3497f55cbee191 100644 --- a/deps/npm/node_modules/minipass-collect/index.js +++ b/deps/npm/node_modules/minipass-collect/index.js @@ -1,4 +1,4 @@ -const Minipass = require('minipass') +const { Minipass } = require('minipass') const _data = Symbol('_data') const _length = Symbol('_length') class Collect extends Minipass { diff --git a/deps/npm/node_modules/minipass-collect/node_modules/minipass/LICENSE b/deps/npm/node_modules/minipass-collect/node_modules/minipass/LICENSE deleted file mode 100644 index bf1dece2e1f122..00000000000000 --- a/deps/npm/node_modules/minipass-collect/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/minipass-collect/node_modules/minipass/index.js b/deps/npm/node_modules/minipass-collect/node_modules/minipass/index.js deleted file mode 100644 index e8797aab6cc276..00000000000000 --- a/deps/npm/node_modules/minipass-collect/node_modules/minipass/index.js +++ /dev/null @@ -1,649 +0,0 @@ -'use strict' -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, -} -const EE = require('events') -const Stream = require('stream') -const SD = require('string_decoder').StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } - - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding (enc) { - this.encoding = enc - } - - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } - - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } - - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - - if (!encoding) - encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) - n = null - - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - - return chunk - } - - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } - - resume () { - return this[RESUME]() - } - - pause () { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed () { - return this[DESTROYED] - } - - get flowing () { - return this[FLOWING] - } - - get paused () { - return this[PAUSED] - } - - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } - - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() - } - - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } - - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } - - pipe (dest, opts) { - if (this[DESTROYED]) - return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() - } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() - } - - return dest - } - - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() - } - } - - addListener (ev, fn) { - return this.on(ev, fn) - } - - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd () { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } - - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND] () { - if (this[EMITTED_END]) - return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() - } - - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this.pipes) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } - - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } - - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - - return this - } - - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } -} diff --git a/deps/npm/node_modules/minipass-collect/node_modules/minipass/package.json b/deps/npm/node_modules/minipass-collect/node_modules/minipass/package.json deleted file mode 100644 index 548d03fa6d5d4b..00000000000000 --- a/deps/npm/node_modules/minipass-collect/node_modules/minipass/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "minipass", - "version": "3.3.6", - "description": "minimal implementation of a PassThrough stream", - "main": "index.js", - "types": "index.d.ts", - "dependencies": { - "yallist": "^4.0.0" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typescript": "^4.7.3" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/deps/npm/node_modules/minipass-collect/package.json b/deps/npm/node_modules/minipass-collect/package.json index 54d87ac2e63b20..f9daa81bafbc16 100644 --- a/deps/npm/node_modules/minipass-collect/package.json +++ b/deps/npm/node_modules/minipass-collect/package.json @@ -1,6 +1,6 @@ { "name": "minipass-collect", - "version": "1.0.2", + "version": "2.0.1", "description": "A Minipass stream that collects all the data into a single chunk", "author": "Isaac Z. Schlueter (https://izs.me)", "license": "ISC", @@ -9,21 +9,22 @@ "snap": "tap", "preversion": "npm test", "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags" }, "tap": { "check-coverage": true }, "devDependencies": { - "tap": "^14.6.9" + "tap": "^16.3.8" }, "dependencies": { - "minipass": "^3.0.0" + "minipass": "^7.0.3" }, "files": [ "index.js" ], "engines": { - "node": ">= 8" - } + "node": ">=16 || 14 >=14.17" + }, + "repository": "https://github.com/isaacs/minipass-collect" } diff --git a/deps/npm/node_modules/npm-packlist/lib/index.js b/deps/npm/node_modules/npm-packlist/lib/index.js index 7577cba0b865d4..985f11ee3f7384 100644 --- a/deps/npm/node_modules/npm-packlist/lib/index.js +++ b/deps/npm/node_modules/npm-packlist/lib/index.js @@ -42,20 +42,6 @@ const strictDefaults = [ '/.git', ] -const allLevels = [ - // these are included by default but can be excluded by package.json files array - '!/readme{,.*[^~$]}', - '!/copying{,.*[^~$]}', - '!/license{,.*[^~$]}', - '!/licence{,.*[^~$]}', -] - -const rootOnly = [ - /^!.*readme/i, - /^!.*copying/i, - /^!.*licen[sc]e/i, -] - const normalizePath = (path) => path.split('\\').join('/') const readOutOfTreeIgnoreFiles = (root, rel, result = []) => { @@ -141,7 +127,6 @@ class PackWalker extends IgnoreWalker { // known required files for this directory this.injectRules(strictRules, [ ...strictDefaults, - ...allLevels, ...this.requiredFiles.map((file) => `!${file}`), ]) } @@ -294,10 +279,14 @@ class PackWalker extends IgnoreWalker { const ignores = [] const strict = [ ...strictDefaults, - ...allLevels, '!/package.json', + '!/readme{,.*[^~$]}', + '!/copying{,.*[^~$]}', + '!/license{,.*[^~$]}', + '!/licence{,.*[^~$]}', '/.git', '/node_modules', + '.npmrc', '/package-lock.json', '/yarn.lock', '/pnpm-lock.yaml', @@ -307,17 +296,13 @@ class PackWalker extends IgnoreWalker { if (files) { for (let file of files) { // invert the rule because these are things we want to include - if (file.startsWith('/')) { + if (file.startsWith('./')) { file = file.slice(1) - } else if (file.startsWith('./')) { - file = file.slice(2) - } else if (file.endsWith('/*')) { - file = file.slice(0, -2) + } + if (file.endsWith('/*')) { + file += '*' } const inverse = `!${file}` - - this.excludeNonRoot(file) - try { // if an entry in the files array is a specific file, then we need to include it as a // strict requirement for this package. if it's a directory or a pattern, it's a default @@ -326,7 +311,7 @@ class PackWalker extends IgnoreWalker { // if we have a file and we know that, it's strictly required if (stat.isFile()) { strict.unshift(inverse) - this.requiredFiles.push(file) + this.requiredFiles.push(file.startsWith('/') ? file.slice(1) : file) } else if (stat.isDirectory()) { // otherwise, it's a default ignore, and since we got here we know it's not a pattern // so we include the directory contents @@ -366,20 +351,6 @@ class PackWalker extends IgnoreWalker { this.injectRules(strictRules, strict, callback) } - // excludes non root files by checking if elements from the files array in - // package.json contain an ! and readme/license/licence/copying, and then - // removing readme/license/licence/copying accordingly from strict defaults - excludeNonRoot (file) { - // Find the pattern - const matchingPattern = rootOnly.find(regex => regex.test(file)) - - if (matchingPattern) { - // Find which index matches the pattern and remove it from allLevels - const indexToRemove = allLevels.findIndex(element => matchingPattern.test(element)) - allLevels.splice(indexToRemove, 1) - } - } - // custom method: after we've finished gathering the files for the root package, we call this // before emitting the 'done' event in order to gather all of the files for bundled deps async gatherBundles () { diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json index 460ca7e30ad23f..8c3a16e741ad30 100644 --- a/deps/npm/node_modules/npm-packlist/package.json +++ b/deps/npm/node_modules/npm-packlist/package.json @@ -1,13 +1,13 @@ { "name": "npm-packlist", - "version": "8.0.0", + "version": "8.0.2", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" }, "main": "lib/index.js", "dependencies": { - "ignore-walk": "^6.0.0" + "ignore-walk": "^6.0.4" }, "author": "GitHub Inc.", "license": "ISC", @@ -18,7 +18,7 @@ "devDependencies": { "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.21.2", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, @@ -28,7 +28,7 @@ "snap": "tap", "postsnap": "npm run lintfix --", "eslint": "eslint", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "npmclilint": "npmcli-lint", "postlint": "template-oss-check", @@ -55,7 +55,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.18.0", + "version": "4.21.2", "publish": true } } diff --git a/deps/npm/node_modules/pacote/lib/registry.js b/deps/npm/node_modules/pacote/lib/registry.js index 993fd3f08a6d91..0e83edf17519a8 100644 --- a/deps/npm/node_modules/pacote/lib/registry.js +++ b/deps/npm/node_modules/pacote/lib/registry.js @@ -14,6 +14,10 @@ const sigstore = require('sigstore') const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' const fullDoc = 'application/json' +// Some really old packages have no time field in their packument so we need a +// cutoff date. +const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z' + const fetch = require('npm-registry-fetch') const _headers = Symbol('_headers') @@ -115,6 +119,13 @@ class RegistryFetcher extends Fetcher { return this.package } + // When verifying signatures, we need to fetch the full/uncompressed + // packument to get publish time as this is not included in the + // corgi/compressed packument. + if (this.opts.verifySignatures) { + this.fullMetadata = true + } + const packument = await this.packument() let mani = await pickManifest(packument, this.spec.fetchSpec, { ...this.opts, @@ -124,6 +135,12 @@ class RegistryFetcher extends Fetcher { mani = rpj.normalize(mani) /* XXX add ETARGET and E403 revalidation of cached packuments here */ + // add _time from packument if fetched with fullMetadata + const time = packument.time?.[mani.version] + if (time) { + mani._time = time + } + // add _resolved and _integrity from dist object const { dist } = mani if (dist) { @@ -171,8 +188,10 @@ class RegistryFetcher extends Fetcher { 'but no corresponding public key can be found' ), { code: 'EMISSINGSIGNATUREKEY' }) } - const validPublicKey = - !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + + const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF) + const validPublicKey = !publicKey.expires || + publishedTime < Date.parse(publicKey.expires) if (!validPublicKey) { throw Object.assign(new Error( `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + @@ -254,8 +273,13 @@ class RegistryFetcher extends Fetcher { ), { code: 'EMISSINGSIGNATUREKEY' }) } - const validPublicKey = - !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + const integratedTime = new Date( + Number( + bundle.verificationMaterial.tlogEntries[0].integratedTime + ) * 1000 + ) + const validPublicKey = !publicKey.expires || + (integratedTime < Date.parse(publicKey.expires)) if (!validPublicKey) { throw Object.assign(new Error( `${mani._id} has attestations with keyid: ${keyid} ` + diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json index 4654b03d988c32..88d479d182f5dc 100644 --- a/deps/npm/node_modules/pacote/package.json +++ b/deps/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "17.0.4", + "version": "17.0.5", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -25,9 +25,9 @@ ] }, "devDependencies": { - "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", + "@npmcli/arborist": "^7.1.0", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.19.0", "hosted-git-info": "^7.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", @@ -72,13 +72,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "ciVersions": [ - "16.14.0", - "16.x", - "18.0.0", - "18.x" - ], - "version": "4.18.0", + "version": "4.19.0", "windowsCI": false, "publish": "true" } diff --git a/deps/npm/node_modules/postcss-selector-parser/package.json b/deps/npm/node_modules/postcss-selector-parser/package.json index dce071cdcb2b3a..d1f6be84cc5c77 100644 --- a/deps/npm/node_modules/postcss-selector-parser/package.json +++ b/deps/npm/node_modules/postcss-selector-parser/package.json @@ -1,6 +1,6 @@ { "name": "postcss-selector-parser", - "version": "6.0.13", + "version": "6.0.15", "devDependencies": { "@babel/cli": "^7.11.6", "@babel/core": "^7.11.6", @@ -18,7 +18,7 @@ "glob": "^8.0.3", "minimist": "^1.2.5", "nyc": "^15.1.0", - "postcss": "^8.0.0", + "postcss": "^8.4.31", "semver": "^7.3.2", "typescript": "^4.0.3" }, diff --git a/deps/npm/node_modules/process/LICENSE b/deps/npm/node_modules/process/LICENSE deleted file mode 100644 index b8c1246cf49cbd..00000000000000 --- a/deps/npm/node_modules/process/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 2013 Roman Shtylman - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/process/browser.js b/deps/npm/node_modules/process/browser.js deleted file mode 100644 index d059362306586e..00000000000000 --- a/deps/npm/node_modules/process/browser.js +++ /dev/null @@ -1,184 +0,0 @@ -// shim for using process in browser -var process = module.exports = {}; - -// cached from whatever global is present so that test runners that stub it -// don't break things. But we need to wrap it in a try catch in case it is -// wrapped in strict mode code which doesn't define any globals. It's inside a -// function because try/catches deoptimize in certain engines. - -var cachedSetTimeout; -var cachedClearTimeout; - -function defaultSetTimout() { - throw new Error('setTimeout has not been defined'); -} -function defaultClearTimeout () { - throw new Error('clearTimeout has not been defined'); -} -(function () { - try { - if (typeof setTimeout === 'function') { - cachedSetTimeout = setTimeout; - } else { - cachedSetTimeout = defaultSetTimout; - } - } catch (e) { - cachedSetTimeout = defaultSetTimout; - } - try { - if (typeof clearTimeout === 'function') { - cachedClearTimeout = clearTimeout; - } else { - cachedClearTimeout = defaultClearTimeout; - } - } catch (e) { - cachedClearTimeout = defaultClearTimeout; - } -} ()) -function runTimeout(fun) { - if (cachedSetTimeout === setTimeout) { - //normal enviroments in sane situations - return setTimeout(fun, 0); - } - // if setTimeout wasn't available but was latter defined - if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { - cachedSetTimeout = setTimeout; - return setTimeout(fun, 0); - } - try { - // when when somebody has screwed with setTimeout but no I.E. maddness - return cachedSetTimeout(fun, 0); - } catch(e){ - try { - // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally - return cachedSetTimeout.call(null, fun, 0); - } catch(e){ - // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error - return cachedSetTimeout.call(this, fun, 0); - } - } - - -} -function runClearTimeout(marker) { - if (cachedClearTimeout === clearTimeout) { - //normal enviroments in sane situations - return clearTimeout(marker); - } - // if clearTimeout wasn't available but was latter defined - if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { - cachedClearTimeout = clearTimeout; - return clearTimeout(marker); - } - try { - // when when somebody has screwed with setTimeout but no I.E. maddness - return cachedClearTimeout(marker); - } catch (e){ - try { - // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally - return cachedClearTimeout.call(null, marker); - } catch (e){ - // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. - // Some versions of I.E. have different rules for clearTimeout vs setTimeout - return cachedClearTimeout.call(this, marker); - } - } - - - -} -var queue = []; -var draining = false; -var currentQueue; -var queueIndex = -1; - -function cleanUpNextTick() { - if (!draining || !currentQueue) { - return; - } - draining = false; - if (currentQueue.length) { - queue = currentQueue.concat(queue); - } else { - queueIndex = -1; - } - if (queue.length) { - drainQueue(); - } -} - -function drainQueue() { - if (draining) { - return; - } - var timeout = runTimeout(cleanUpNextTick); - draining = true; - - var len = queue.length; - while(len) { - currentQueue = queue; - queue = []; - while (++queueIndex < len) { - if (currentQueue) { - currentQueue[queueIndex].run(); - } - } - queueIndex = -1; - len = queue.length; - } - currentQueue = null; - draining = false; - runClearTimeout(timeout); -} - -process.nextTick = function (fun) { - var args = new Array(arguments.length - 1); - if (arguments.length > 1) { - for (var i = 1; i < arguments.length; i++) { - args[i - 1] = arguments[i]; - } - } - queue.push(new Item(fun, args)); - if (queue.length === 1 && !draining) { - runTimeout(drainQueue); - } -}; - -// v8 likes predictible objects -function Item(fun, array) { - this.fun = fun; - this.array = array; -} -Item.prototype.run = function () { - this.fun.apply(null, this.array); -}; -process.title = 'browser'; -process.browser = true; -process.env = {}; -process.argv = []; -process.version = ''; // empty string to avoid regexp issues -process.versions = {}; - -function noop() {} - -process.on = noop; -process.addListener = noop; -process.once = noop; -process.off = noop; -process.removeListener = noop; -process.removeAllListeners = noop; -process.emit = noop; -process.prependListener = noop; -process.prependOnceListener = noop; - -process.listeners = function (name) { return [] } - -process.binding = function (name) { - throw new Error('process.binding is not supported'); -}; - -process.cwd = function () { return '/' }; -process.chdir = function (dir) { - throw new Error('process.chdir is not supported'); -}; -process.umask = function() { return 0; }; diff --git a/deps/npm/node_modules/process/index.js b/deps/npm/node_modules/process/index.js deleted file mode 100644 index 8d8ed7df45bb87..00000000000000 --- a/deps/npm/node_modules/process/index.js +++ /dev/null @@ -1,2 +0,0 @@ -// for now just expose the builtin process global from node.js -module.exports = global.process; diff --git a/deps/npm/node_modules/process/package.json b/deps/npm/node_modules/process/package.json deleted file mode 100644 index d2cfaade44177a..00000000000000 --- a/deps/npm/node_modules/process/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "author": "Roman Shtylman ", - "name": "process", - "description": "process information for node.js and browsers", - "keywords": [ - "process" - ], - "scripts": { - "test": "mocha test.js", - "browser": "zuul --no-coverage --ui mocha-bdd --local 8080 -- test.js" - }, - "version": "0.11.10", - "repository": { - "type": "git", - "url": "git://github.com/shtylman/node-process.git" - }, - "license": "MIT", - "browser": "./browser.js", - "main": "./index.js", - "engines": { - "node": ">= 0.6.0" - }, - "devDependencies": { - "mocha": "2.2.1", - "zuul": "^3.10.3" - } -} diff --git a/deps/npm/node_modules/process/test.js b/deps/npm/node_modules/process/test.js deleted file mode 100644 index 8ba579c0a20267..00000000000000 --- a/deps/npm/node_modules/process/test.js +++ /dev/null @@ -1,199 +0,0 @@ -var assert = require('assert'); -var ourProcess = require('./browser'); -describe('test against our process', function () { - test(ourProcess); -}); -if (!process.browser) { - describe('test against node', function () { - test(process); - }); - vmtest(); -} -function test (ourProcess) { - describe('test arguments', function () { - it ('works', function (done) { - var order = 0; - - - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'first one works'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'recursive one is 4th'); - }, 3); - }, 0); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'second one starts'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'this is third'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'this is last'); - done(); - }, 5); - }, 4); - }, 1); - ourProcess.nextTick(function (num) { - - assert.equal(num, order++, '3rd schedualed happens after the error'); - }, 2); - }); - }); -if (!process.browser) { - describe('test errors', function (t) { - it ('works', function (done) { - var order = 0; - process.removeAllListeners('uncaughtException'); - process.once('uncaughtException', function(err) { - assert.equal(2, order++, 'error is third'); - ourProcess.nextTick(function () { - assert.equal(5, order++, 'schedualed in error is last'); - done(); - }); - }); - ourProcess.nextTick(function () { - assert.equal(0, order++, 'first one works'); - ourProcess.nextTick(function () { - assert.equal(4, order++, 'recursive one is 4th'); - }); - }); - ourProcess.nextTick(function () { - assert.equal(1, order++, 'second one starts'); - throw(new Error('an error is thrown')); - }); - ourProcess.nextTick(function () { - assert.equal(3, order++, '3rd schedualed happens after the error'); - }); - }); - }); -} - describe('rename globals', function (t) { - var oldTimeout = setTimeout; - var oldClear = clearTimeout; - - it('clearTimeout', function (done){ - - var ok = true; - clearTimeout = function () { - ok = false; - } - var ran = false; - function cleanup() { - clearTimeout = oldClear; - var err; - try { - assert.ok(ok, 'fake clearTimeout ran'); - assert.ok(ran, 'should have run'); - } catch (e) { - err = e; - } - done(err); - } - setTimeout(cleanup, 1000); - ourProcess.nextTick(function () { - ran = true; - }); - }); - it('just setTimeout', function (done){ - - - setTimeout = function () { - setTimeout = oldTimeout; - try { - assert.ok(false, 'fake setTimeout called') - } catch (e) { - done(e); - } - - } - - ourProcess.nextTick(function () { - setTimeout = oldTimeout; - done(); - }); - }); - }); -} -function vmtest() { - var vm = require('vm'); - var fs = require('fs'); - var process = fs.readFileSync('./browser.js', {encoding: 'utf8'}); - - - describe('should work in vm in strict mode with no globals', function () { - it('should parse', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'this.works = process.browser;'; - var script = new vm.Script(str); - var context = { - works: false - }; - script.runInNewContext(context); - assert.ok(context.works); - done(); - }); - it('setTimeout throws error', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'try {process.nextTick(function () {})} catch (e){this.works = e;}'; - var script = new vm.Script(str); - var context = { - works: false - }; - script.runInNewContext(context); - assert.ok(context.works); - done(); - }); - it('should generally work', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - setTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs setTimeout', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - hiddenSetTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs clearTimeout', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var clearTimeout = hiddenClearTimeout;process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - hiddenClearTimeout: clearTimeout, - setTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs setTimeout and then redefine', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {setTimeout = function (){throw new Error("foo")};hiddenSetTimeout(function(){process.nextTick(function (){assert.ok(true);done();});});});'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - hiddenSetTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - }); -} diff --git a/deps/npm/node_modules/readable-stream/LICENSE b/deps/npm/node_modules/readable-stream/LICENSE deleted file mode 100644 index 2873b3b2e59507..00000000000000 --- a/deps/npm/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,47 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index e03c6bf5ff87aa..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict' - -// Keep this file as an alias for the full stream module. -module.exports = require('./stream').Duplex diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index 1206dc4555fe2d..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict' - -// Keep this file as an alias for the full stream module. -module.exports = require('./stream').PassThrough diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 49416586f20981..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict' - -// Keep this file as an alias for the full stream module. -module.exports = require('./stream').Readable diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index ef227b12c57c3d..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict' - -// Keep this file as an alias for the full stream module. -module.exports = require('./stream').Transform diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 00c7b037ce7bff..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict' - -// Keep this file as an alias for the full stream module. -module.exports = require('./stream').Writable diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js deleted file mode 100644 index 3a26a1d3e6d76d..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -const { AbortError, codes } = require('../../ours/errors') -const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils') -const eos = require('./end-of-stream') -const { ERR_INVALID_ARG_TYPE } = codes - -// This method is inlined here for readable-stream -// It also does not allow for signal to not exist on the stream -// https://github.com/nodejs/node/pull/36061#discussion_r533718029 -const validateAbortSignal = (signal, name) => { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) - } -} -module.exports.addAbortSignal = function addAbortSignal(signal, stream) { - validateAbortSignal(signal, 'signal') - if (!isNodeStream(stream) && !isWebStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) - } - return module.exports.addAbortSignalNoValidate(signal, stream) -} -module.exports.addAbortSignalNoValidate = function (signal, stream) { - if (typeof signal !== 'object' || !('aborted' in signal)) { - return stream - } - const onAbort = isNodeStream(stream) - ? () => { - stream.destroy( - new AbortError(undefined, { - cause: signal.reason - }) - ) - } - : () => { - stream[kControllerErrorFunction]( - new AbortError(undefined, { - cause: signal.reason - }) - ) - } - if (signal.aborted) { - onAbort() - } else { - signal.addEventListener('abort', onAbort) - eos(stream, () => signal.removeEventListener('abort', onAbort)) - } - return stream -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js deleted file mode 100644 index b55e35cf9a0f88..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js +++ /dev/null @@ -1,157 +0,0 @@ -'use strict' - -const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials') -const { Buffer } = require('buffer') -const { inspect } = require('../../ours/util') -module.exports = class BufferList { - constructor() { - this.head = null - this.tail = null - this.length = 0 - } - push(v) { - const entry = { - data: v, - next: null - } - if (this.length > 0) this.tail.next = entry - else this.head = entry - this.tail = entry - ++this.length - } - unshift(v) { - const entry = { - data: v, - next: this.head - } - if (this.length === 0) this.tail = entry - this.head = entry - ++this.length - } - shift() { - if (this.length === 0) return - const ret = this.head.data - if (this.length === 1) this.head = this.tail = null - else this.head = this.head.next - --this.length - return ret - } - clear() { - this.head = this.tail = null - this.length = 0 - } - join(s) { - if (this.length === 0) return '' - let p = this.head - let ret = '' + p.data - while ((p = p.next) !== null) ret += s + p.data - return ret - } - concat(n) { - if (this.length === 0) return Buffer.alloc(0) - const ret = Buffer.allocUnsafe(n >>> 0) - let p = this.head - let i = 0 - while (p) { - TypedArrayPrototypeSet(ret, p.data, i) - i += p.data.length - p = p.next - } - return ret - } - - // Consumes a specified amount of bytes or characters from the buffered data. - consume(n, hasStrings) { - const data = this.head.data - if (n < data.length) { - // `slice` is the same for buffers and strings. - const slice = data.slice(0, n) - this.head.data = data.slice(n) - return slice - } - if (n === data.length) { - // First chunk is a perfect match. - return this.shift() - } - // Result spans more than one buffer. - return hasStrings ? this._getString(n) : this._getBuffer(n) - } - first() { - return this.head.data - } - *[SymbolIterator]() { - for (let p = this.head; p; p = p.next) { - yield p.data - } - } - - // Consumes a specified amount of characters from the buffered data. - _getString(n) { - let ret = '' - let p = this.head - let c = 0 - do { - const str = p.data - if (n > str.length) { - ret += str - n -= str.length - } else { - if (n === str.length) { - ret += str - ++c - if (p.next) this.head = p.next - else this.head = this.tail = null - } else { - ret += StringPrototypeSlice(str, 0, n) - this.head = p - p.data = StringPrototypeSlice(str, n) - } - break - } - ++c - } while ((p = p.next) !== null) - this.length -= c - return ret - } - - // Consumes a specified amount of bytes from the buffered data. - _getBuffer(n) { - const ret = Buffer.allocUnsafe(n) - const retLen = n - let p = this.head - let c = 0 - do { - const buf = p.data - if (n > buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n) - n -= buf.length - } else { - if (n === buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n) - ++c - if (p.next) this.head = p.next - else this.head = this.tail = null - } else { - TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n) - this.head = p - p.data = buf.slice(n) - } - break - } - ++c - } while ((p = p.next) !== null) - this.length -= c - return ret - } - - // Make sure the linked list only shows the minimal necessary information. - [Symbol.for('nodejs.util.inspect.custom')](_, options) { - return inspect(this, { - ...options, - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - }) - } -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/compose.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/compose.js deleted file mode 100644 index f565c12ef3620c..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/compose.js +++ /dev/null @@ -1,194 +0,0 @@ -'use strict' - -const { pipeline } = require('./pipeline') -const Duplex = require('./duplex') -const { destroyer } = require('./destroy') -const { - isNodeStream, - isReadable, - isWritable, - isWebStream, - isTransformStream, - isWritableStream, - isReadableStream -} = require('./utils') -const { - AbortError, - codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } -} = require('../../ours/errors') -const eos = require('./end-of-stream') -module.exports = function compose(...streams) { - if (streams.length === 0) { - throw new ERR_MISSING_ARGS('streams') - } - if (streams.length === 1) { - return Duplex.from(streams[0]) - } - const orgStreams = [...streams] - if (typeof streams[0] === 'function') { - streams[0] = Duplex.from(streams[0]) - } - if (typeof streams[streams.length - 1] === 'function') { - const idx = streams.length - 1 - streams[idx] = Duplex.from(streams[idx]) - } - for (let n = 0; n < streams.length; ++n) { - if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) { - // TODO(ronag): Add checks for non streams. - continue - } - if ( - n < streams.length - 1 && - !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n])) - ) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable') - } - if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable') - } - } - let ondrain - let onfinish - let onreadable - let onclose - let d - function onfinished(err) { - const cb = onclose - onclose = null - if (cb) { - cb(err) - } else if (err) { - d.destroy(err) - } else if (!readable && !writable) { - d.destroy() - } - } - const head = streams[0] - const tail = pipeline(streams, onfinished) - const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head)) - const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail)) - - // TODO(ronag): Avoid double buffering. - // Implement Writable/Readable/Duplex traits. - // See, https://github.com/nodejs/node/pull/33515. - d = new Duplex({ - // TODO (ronag): highWaterMark? - writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode), - readableObjectMode: !!(tail !== null && tail !== undefined && tail.writableObjectMode), - writable, - readable - }) - if (writable) { - if (isNodeStream(head)) { - d._write = function (chunk, encoding, callback) { - if (head.write(chunk, encoding)) { - callback() - } else { - ondrain = callback - } - } - d._final = function (callback) { - head.end() - onfinish = callback - } - head.on('drain', function () { - if (ondrain) { - const cb = ondrain - ondrain = null - cb() - } - }) - } else if (isWebStream(head)) { - const writable = isTransformStream(head) ? head.writable : head - const writer = writable.getWriter() - d._write = async function (chunk, encoding, callback) { - try { - await writer.ready - writer.write(chunk).catch(() => {}) - callback() - } catch (err) { - callback(err) - } - } - d._final = async function (callback) { - try { - await writer.ready - writer.close().catch(() => {}) - onfinish = callback - } catch (err) { - callback(err) - } - } - } - const toRead = isTransformStream(tail) ? tail.readable : tail - eos(toRead, () => { - if (onfinish) { - const cb = onfinish - onfinish = null - cb() - } - }) - } - if (readable) { - if (isNodeStream(tail)) { - tail.on('readable', function () { - if (onreadable) { - const cb = onreadable - onreadable = null - cb() - } - }) - tail.on('end', function () { - d.push(null) - }) - d._read = function () { - while (true) { - const buf = tail.read() - if (buf === null) { - onreadable = d._read - return - } - if (!d.push(buf)) { - return - } - } - } - } else if (isWebStream(tail)) { - const readable = isTransformStream(tail) ? tail.readable : tail - const reader = readable.getReader() - d._read = async function () { - while (true) { - try { - const { value, done } = await reader.read() - if (!d.push(value)) { - return - } - if (done) { - d.push(null) - return - } - } catch { - return - } - } - } - } - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError() - } - onreadable = null - ondrain = null - onfinish = null - if (onclose === null) { - callback(err) - } else { - onclose = callback - if (isNodeStream(tail)) { - destroyer(tail, err) - } - } - } - return d -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js deleted file mode 100644 index db76c29f94bab0..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js +++ /dev/null @@ -1,290 +0,0 @@ -'use strict' - -/* replacement start */ - -const process = require('process/') - -/* replacement end */ - -const { - aggregateTwoErrors, - codes: { ERR_MULTIPLE_CALLBACK }, - AbortError -} = require('../../ours/errors') -const { Symbol } = require('../../ours/primordials') -const { kDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils') -const kDestroy = Symbol('kDestroy') -const kConstruct = Symbol('kConstruct') -function checkError(err, w, r) { - if (err) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack // eslint-disable-line no-unused-expressions - - if (w && !w.errored) { - w.errored = err - } - if (r && !r.errored) { - r.errored = err - } - } -} - -// Backwards compat. cb() is undocumented and unused in core but -// unfortunately might be used by modules. -function destroy(err, cb) { - const r = this._readableState - const w = this._writableState - // With duplex streams we use the writable side for state. - const s = w || r - if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { - if (typeof cb === 'function') { - cb() - } - return this - } - - // We set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - checkError(err, w, r) - if (w) { - w.destroyed = true - } - if (r) { - r.destroyed = true - } - - // If still constructing then defer calling _destroy. - if (!s.constructed) { - this.once(kDestroy, function (er) { - _destroy(this, aggregateTwoErrors(er, err), cb) - }) - } else { - _destroy(this, err, cb) - } - return this -} -function _destroy(self, err, cb) { - let called = false - function onDestroy(err) { - if (called) { - return - } - called = true - const r = self._readableState - const w = self._writableState - checkError(err, w, r) - if (w) { - w.closed = true - } - if (r) { - r.closed = true - } - if (typeof cb === 'function') { - cb(err) - } - if (err) { - process.nextTick(emitErrorCloseNT, self, err) - } else { - process.nextTick(emitCloseNT, self) - } - } - try { - self._destroy(err || null, onDestroy) - } catch (err) { - onDestroy(err) - } -} -function emitErrorCloseNT(self, err) { - emitErrorNT(self, err) - emitCloseNT(self) -} -function emitCloseNT(self) { - const r = self._readableState - const w = self._writableState - if (w) { - w.closeEmitted = true - } - if (r) { - r.closeEmitted = true - } - if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) { - self.emit('close') - } -} -function emitErrorNT(self, err) { - const r = self._readableState - const w = self._writableState - if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) { - return - } - if (w) { - w.errorEmitted = true - } - if (r) { - r.errorEmitted = true - } - self.emit('error', err) -} -function undestroy() { - const r = this._readableState - const w = this._writableState - if (r) { - r.constructed = true - r.closed = false - r.closeEmitted = false - r.destroyed = false - r.errored = null - r.errorEmitted = false - r.reading = false - r.ended = r.readable === false - r.endEmitted = r.readable === false - } - if (w) { - w.constructed = true - w.destroyed = false - w.closed = false - w.closeEmitted = false - w.errored = null - w.errorEmitted = false - w.finalCalled = false - w.prefinished = false - w.ended = w.writable === false - w.ending = w.writable === false - w.finished = w.writable === false - } -} -function errorOrDestroy(stream, err, sync) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - - const r = stream._readableState - const w = stream._writableState - if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { - return this - } - if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy)) - stream.destroy(err) - else if (err) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack // eslint-disable-line no-unused-expressions - - if (w && !w.errored) { - w.errored = err - } - if (r && !r.errored) { - r.errored = err - } - if (sync) { - process.nextTick(emitErrorNT, stream, err) - } else { - emitErrorNT(stream, err) - } - } -} -function construct(stream, cb) { - if (typeof stream._construct !== 'function') { - return - } - const r = stream._readableState - const w = stream._writableState - if (r) { - r.constructed = false - } - if (w) { - w.constructed = false - } - stream.once(kConstruct, cb) - if (stream.listenerCount(kConstruct) > 1) { - // Duplex - return - } - process.nextTick(constructNT, stream) -} -function constructNT(stream) { - let called = false - function onConstruct(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK()) - return - } - called = true - const r = stream._readableState - const w = stream._writableState - const s = w || r - if (r) { - r.constructed = true - } - if (w) { - w.constructed = true - } - if (s.destroyed) { - stream.emit(kDestroy, err) - } else if (err) { - errorOrDestroy(stream, err, true) - } else { - process.nextTick(emitConstructNT, stream) - } - } - try { - stream._construct((err) => { - process.nextTick(onConstruct, err) - }) - } catch (err) { - process.nextTick(onConstruct, err) - } -} -function emitConstructNT(stream) { - stream.emit(kConstruct) -} -function isRequest(stream) { - return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function' -} -function emitCloseLegacy(stream) { - stream.emit('close') -} -function emitErrorCloseLegacy(stream, err) { - stream.emit('error', err) - process.nextTick(emitCloseLegacy, stream) -} - -// Normalize destroy for legacy. -function destroyer(stream, err) { - if (!stream || isDestroyed(stream)) { - return - } - if (!err && !isFinished(stream)) { - err = new AbortError() - } - - // TODO: Remove isRequest branches. - if (isServerRequest(stream)) { - stream.socket = null - stream.destroy(err) - } else if (isRequest(stream)) { - stream.abort() - } else if (isRequest(stream.req)) { - stream.req.abort() - } else if (typeof stream.destroy === 'function') { - stream.destroy(err) - } else if (typeof stream.close === 'function') { - // TODO: Don't lose err? - stream.close() - } else if (err) { - process.nextTick(emitErrorCloseLegacy, stream, err) - } else { - process.nextTick(emitCloseLegacy, stream) - } - if (!stream.destroyed) { - stream[kDestroyed] = true - } -} -module.exports = { - construct, - destroyer, - destroy, - undestroy, - errorOrDestroy -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/duplex.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/duplex.js deleted file mode 100644 index dd08396738baad..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/duplex.js +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototype inheritance, this class -// prototypically inherits from Readable, and then parasitically from -// Writable. - -'use strict' - -const { - ObjectDefineProperties, - ObjectGetOwnPropertyDescriptor, - ObjectKeys, - ObjectSetPrototypeOf -} = require('../../ours/primordials') -module.exports = Duplex -const Readable = require('./readable') -const Writable = require('./writable') -ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype) -ObjectSetPrototypeOf(Duplex, Readable) -{ - const keys = ObjectKeys(Writable.prototype) - // Allow the keys array to be GC'ed. - for (let i = 0; i < keys.length; i++) { - const method = keys[i] - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method] - } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options) - Readable.call(this, options) - Writable.call(this, options) - if (options) { - this.allowHalfOpen = options.allowHalfOpen !== false - if (options.readable === false) { - this._readableState.readable = false - this._readableState.ended = true - this._readableState.endEmitted = true - } - if (options.writable === false) { - this._writableState.writable = false - this._writableState.ending = true - this._writableState.ended = true - this._writableState.finished = true - } - } else { - this.allowHalfOpen = true - } -} -ObjectDefineProperties(Duplex.prototype, { - writable: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable') - }, - writableHighWaterMark: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark') - }, - writableObjectMode: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode') - }, - writableBuffer: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer') - }, - writableLength: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength') - }, - writableFinished: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished') - }, - writableCorked: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked') - }, - writableEnded: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded') - }, - writableNeedDrain: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain') - }, - destroyed: { - __proto__: null, - get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false - } - return this._readableState.destroyed && this._writableState.destroyed - }, - set(value) { - // Backward compatibility, the user is explicitly - // managing destroyed. - if (this._readableState && this._writableState) { - this._readableState.destroyed = value - this._writableState.destroyed = value - } - } - } -}) -let webStreamsAdapters - -// Lazy to avoid circular references -function lazyWebStreams() { - if (webStreamsAdapters === undefined) webStreamsAdapters = {} - return webStreamsAdapters -} -Duplex.fromWeb = function (pair, options) { - return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options) -} -Duplex.toWeb = function (duplex) { - return lazyWebStreams().newReadableWritablePairFromDuplex(duplex) -} -let duplexify -Duplex.from = function (body) { - if (!duplexify) { - duplexify = require('./duplexify') - } - return duplexify(body, 'body') -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/duplexify.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/duplexify.js deleted file mode 100644 index 599fb47ab53c2e..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/duplexify.js +++ /dev/null @@ -1,379 +0,0 @@ -/* replacement start */ - -const process = require('process/') - -/* replacement end */ - -;('use strict') -const bufferModule = require('buffer') -const { - isReadable, - isWritable, - isIterable, - isNodeStream, - isReadableNodeStream, - isWritableNodeStream, - isDuplexNodeStream -} = require('./utils') -const eos = require('./end-of-stream') -const { - AbortError, - codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE } -} = require('../../ours/errors') -const { destroyer } = require('./destroy') -const Duplex = require('./duplex') -const Readable = require('./readable') -const { createDeferredPromise } = require('../../ours/util') -const from = require('./from') -const Blob = globalThis.Blob || bufferModule.Blob -const isBlob = - typeof Blob !== 'undefined' - ? function isBlob(b) { - return b instanceof Blob - } - : function isBlob(b) { - return false - } -const AbortController = globalThis.AbortController || require('abort-controller').AbortController -const { FunctionPrototypeCall } = require('../../ours/primordials') - -// This is needed for pre node 17. -class Duplexify extends Duplex { - constructor(options) { - super(options) - - // https://github.com/nodejs/node/pull/34385 - - if ((options === null || options === undefined ? undefined : options.readable) === false) { - this._readableState.readable = false - this._readableState.ended = true - this._readableState.endEmitted = true - } - if ((options === null || options === undefined ? undefined : options.writable) === false) { - this._writableState.writable = false - this._writableState.ending = true - this._writableState.ended = true - this._writableState.finished = true - } - } -} -module.exports = function duplexify(body, name) { - if (isDuplexNodeStream(body)) { - return body - } - if (isReadableNodeStream(body)) { - return _duplexify({ - readable: body - }) - } - if (isWritableNodeStream(body)) { - return _duplexify({ - writable: body - }) - } - if (isNodeStream(body)) { - return _duplexify({ - writable: false, - readable: false - }) - } - - // TODO: Webstreams - // if (isReadableStream(body)) { - // return _duplexify({ readable: Readable.fromWeb(body) }); - // } - - // TODO: Webstreams - // if (isWritableStream(body)) { - // return _duplexify({ writable: Writable.fromWeb(body) }); - // } - - if (typeof body === 'function') { - const { value, write, final, destroy } = fromAsyncGen(body) - if (isIterable(value)) { - return from(Duplexify, value, { - // TODO (ronag): highWaterMark? - objectMode: true, - write, - final, - destroy - }) - } - const then = value === null || value === undefined ? undefined : value.then - if (typeof then === 'function') { - let d - const promise = FunctionPrototypeCall( - then, - value, - (val) => { - if (val != null) { - throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val) - } - }, - (err) => { - destroyer(d, err) - } - ) - return (d = new Duplexify({ - // TODO (ronag): highWaterMark? - objectMode: true, - readable: false, - write, - final(cb) { - final(async () => { - try { - await promise - process.nextTick(cb, null) - } catch (err) { - process.nextTick(cb, err) - } - }) - }, - destroy - })) - } - throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value) - } - if (isBlob(body)) { - return duplexify(body.arrayBuffer()) - } - if (isIterable(body)) { - return from(Duplexify, body, { - // TODO (ronag): highWaterMark? - objectMode: true, - writable: false - }) - } - - // TODO: Webstreams. - // if ( - // isReadableStream(body?.readable) && - // isWritableStream(body?.writable) - // ) { - // return Duplexify.fromWeb(body); - // } - - if ( - typeof (body === null || body === undefined ? undefined : body.writable) === 'object' || - typeof (body === null || body === undefined ? undefined : body.readable) === 'object' - ) { - const readable = - body !== null && body !== undefined && body.readable - ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable) - ? body === null || body === undefined - ? undefined - : body.readable - : duplexify(body.readable) - : undefined - const writable = - body !== null && body !== undefined && body.writable - ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable) - ? body === null || body === undefined - ? undefined - : body.writable - : duplexify(body.writable) - : undefined - return _duplexify({ - readable, - writable - }) - } - const then = body === null || body === undefined ? undefined : body.then - if (typeof then === 'function') { - let d - FunctionPrototypeCall( - then, - body, - (val) => { - if (val != null) { - d.push(val) - } - d.push(null) - }, - (err) => { - destroyer(d, err) - } - ) - return (d = new Duplexify({ - objectMode: true, - writable: false, - read() {} - })) - } - throw new ERR_INVALID_ARG_TYPE( - name, - [ - 'Blob', - 'ReadableStream', - 'WritableStream', - 'Stream', - 'Iterable', - 'AsyncIterable', - 'Function', - '{ readable, writable } pair', - 'Promise' - ], - body - ) -} -function fromAsyncGen(fn) { - let { promise, resolve } = createDeferredPromise() - const ac = new AbortController() - const signal = ac.signal - const value = fn( - (async function* () { - while (true) { - const _promise = promise - promise = null - const { chunk, done, cb } = await _promise - process.nextTick(cb) - if (done) return - if (signal.aborted) - throw new AbortError(undefined, { - cause: signal.reason - }) - ;({ promise, resolve } = createDeferredPromise()) - yield chunk - } - })(), - { - signal - } - ) - return { - value, - write(chunk, encoding, cb) { - const _resolve = resolve - resolve = null - _resolve({ - chunk, - done: false, - cb - }) - }, - final(cb) { - const _resolve = resolve - resolve = null - _resolve({ - done: true, - cb - }) - }, - destroy(err, cb) { - ac.abort() - cb(err) - } - } -} -function _duplexify(pair) { - const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable - const w = pair.writable - let readable = !!isReadable(r) - let writable = !!isWritable(w) - let ondrain - let onfinish - let onreadable - let onclose - let d - function onfinished(err) { - const cb = onclose - onclose = null - if (cb) { - cb(err) - } else if (err) { - d.destroy(err) - } - } - - // TODO(ronag): Avoid double buffering. - // Implement Writable/Readable/Duplex traits. - // See, https://github.com/nodejs/node/pull/33515. - d = new Duplexify({ - // TODO (ronag): highWaterMark? - readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode), - writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode), - readable, - writable - }) - if (writable) { - eos(w, (err) => { - writable = false - if (err) { - destroyer(r, err) - } - onfinished(err) - }) - d._write = function (chunk, encoding, callback) { - if (w.write(chunk, encoding)) { - callback() - } else { - ondrain = callback - } - } - d._final = function (callback) { - w.end() - onfinish = callback - } - w.on('drain', function () { - if (ondrain) { - const cb = ondrain - ondrain = null - cb() - } - }) - w.on('finish', function () { - if (onfinish) { - const cb = onfinish - onfinish = null - cb() - } - }) - } - if (readable) { - eos(r, (err) => { - readable = false - if (err) { - destroyer(r, err) - } - onfinished(err) - }) - r.on('readable', function () { - if (onreadable) { - const cb = onreadable - onreadable = null - cb() - } - }) - r.on('end', function () { - d.push(null) - }) - d._read = function () { - while (true) { - const buf = r.read() - if (buf === null) { - onreadable = d._read - return - } - if (!d.push(buf)) { - return - } - } - } - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError() - } - onreadable = null - ondrain = null - onfinish = null - if (onclose === null) { - callback(err) - } else { - onclose = callback - destroyer(w, err) - destroyer(r, err) - } - } - return d -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js deleted file mode 100644 index 043c9c4bdac518..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js +++ /dev/null @@ -1,281 +0,0 @@ -/* replacement start */ - -const process = require('process/') - -/* replacement end */ -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). - -;('use strict') -const { AbortError, codes } = require('../../ours/errors') -const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes -const { kEmptyObject, once } = require('../../ours/util') -const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators') -const { Promise, PromisePrototypeThen } = require('../../ours/primordials') -const { - isClosed, - isReadable, - isReadableNodeStream, - isReadableStream, - isReadableFinished, - isReadableErrored, - isWritable, - isWritableNodeStream, - isWritableStream, - isWritableFinished, - isWritableErrored, - isNodeStream, - willEmitClose: _willEmitClose, - kIsClosedPromise -} = require('./utils') -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function' -} -const nop = () => {} -function eos(stream, options, callback) { - var _options$readable, _options$writable - if (arguments.length === 2) { - callback = options - options = kEmptyObject - } else if (options == null) { - options = kEmptyObject - } else { - validateObject(options, 'options') - } - validateFunction(callback, 'callback') - validateAbortSignal(options.signal, 'options.signal') - callback = once(callback) - if (isReadableStream(stream) || isWritableStream(stream)) { - return eosWeb(stream, options, callback) - } - if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) - } - const readable = - (_options$readable = options.readable) !== null && _options$readable !== undefined - ? _options$readable - : isReadableNodeStream(stream) - const writable = - (_options$writable = options.writable) !== null && _options$writable !== undefined - ? _options$writable - : isWritableNodeStream(stream) - const wState = stream._writableState - const rState = stream._readableState - const onlegacyfinish = () => { - if (!stream.writable) { - onfinish() - } - } - - // TODO (ronag): Improve soft detection to include core modules and - // common ecosystem modules that do properly emit 'close' but fail - // this generic check. - let willEmitClose = - _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable - let writableFinished = isWritableFinished(stream, false) - const onfinish = () => { - writableFinished = true - // Stream should not be destroyed here. If it is that - // means that user space is doing something differently and - // we cannot trust willEmitClose. - if (stream.destroyed) { - willEmitClose = false - } - if (willEmitClose && (!stream.readable || readable)) { - return - } - if (!readable || readableFinished) { - callback.call(stream) - } - } - let readableFinished = isReadableFinished(stream, false) - const onend = () => { - readableFinished = true - // Stream should not be destroyed here. If it is that - // means that user space is doing something differently and - // we cannot trust willEmitClose. - if (stream.destroyed) { - willEmitClose = false - } - if (willEmitClose && (!stream.writable || writable)) { - return - } - if (!writable || writableFinished) { - callback.call(stream) - } - } - const onerror = (err) => { - callback.call(stream, err) - } - let closed = isClosed(stream) - const onclose = () => { - closed = true - const errored = isWritableErrored(stream) || isReadableErrored(stream) - if (errored && typeof errored !== 'boolean') { - return callback.call(stream, errored) - } - if (readable && !readableFinished && isReadableNodeStream(stream, true)) { - if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) - } - if (writable && !writableFinished) { - if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) - } - callback.call(stream) - } - const onclosed = () => { - closed = true - const errored = isWritableErrored(stream) || isReadableErrored(stream) - if (errored && typeof errored !== 'boolean') { - return callback.call(stream, errored) - } - callback.call(stream) - } - const onrequest = () => { - stream.req.on('finish', onfinish) - } - if (isRequest(stream)) { - stream.on('complete', onfinish) - if (!willEmitClose) { - stream.on('abort', onclose) - } - if (stream.req) { - onrequest() - } else { - stream.on('request', onrequest) - } - } else if (writable && !wState) { - // legacy streams - stream.on('end', onlegacyfinish) - stream.on('close', onlegacyfinish) - } - - // Not all streams will emit 'close' after 'aborted'. - if (!willEmitClose && typeof stream.aborted === 'boolean') { - stream.on('aborted', onclose) - } - stream.on('end', onend) - stream.on('finish', onfinish) - if (options.error !== false) { - stream.on('error', onerror) - } - stream.on('close', onclose) - if (closed) { - process.nextTick(onclose) - } else if ( - (wState !== null && wState !== undefined && wState.errorEmitted) || - (rState !== null && rState !== undefined && rState.errorEmitted) - ) { - if (!willEmitClose) { - process.nextTick(onclosed) - } - } else if ( - !readable && - (!willEmitClose || isReadable(stream)) && - (writableFinished || isWritable(stream) === false) - ) { - process.nextTick(onclosed) - } else if ( - !writable && - (!willEmitClose || isWritable(stream)) && - (readableFinished || isReadable(stream) === false) - ) { - process.nextTick(onclosed) - } else if (rState && stream.req && stream.aborted) { - process.nextTick(onclosed) - } - const cleanup = () => { - callback = nop - stream.removeListener('aborted', onclose) - stream.removeListener('complete', onfinish) - stream.removeListener('abort', onclose) - stream.removeListener('request', onrequest) - if (stream.req) stream.req.removeListener('finish', onfinish) - stream.removeListener('end', onlegacyfinish) - stream.removeListener('close', onlegacyfinish) - stream.removeListener('finish', onfinish) - stream.removeListener('end', onend) - stream.removeListener('error', onerror) - stream.removeListener('close', onclose) - } - if (options.signal && !closed) { - const abort = () => { - // Keep it because cleanup removes it. - const endCallback = callback - cleanup() - endCallback.call( - stream, - new AbortError(undefined, { - cause: options.signal.reason - }) - ) - } - if (options.signal.aborted) { - process.nextTick(abort) - } else { - const originalCallback = callback - callback = once((...args) => { - options.signal.removeEventListener('abort', abort) - originalCallback.apply(stream, args) - }) - options.signal.addEventListener('abort', abort) - } - } - return cleanup -} -function eosWeb(stream, options, callback) { - let isAborted = false - let abort = nop - if (options.signal) { - abort = () => { - isAborted = true - callback.call( - stream, - new AbortError(undefined, { - cause: options.signal.reason - }) - ) - } - if (options.signal.aborted) { - process.nextTick(abort) - } else { - const originalCallback = callback - callback = once((...args) => { - options.signal.removeEventListener('abort', abort) - originalCallback.apply(stream, args) - }) - options.signal.addEventListener('abort', abort) - } - } - const resolverFn = (...args) => { - if (!isAborted) { - process.nextTick(() => callback.apply(stream, args)) - } - } - PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn) - return nop -} -function finished(stream, opts) { - var _opts - let autoCleanup = false - if (opts === null) { - opts = kEmptyObject - } - if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) { - validateBoolean(opts.cleanup, 'cleanup') - autoCleanup = opts.cleanup - } - return new Promise((resolve, reject) => { - const cleanup = eos(stream, opts, (err) => { - if (autoCleanup) { - cleanup() - } - if (err) { - reject(err) - } else { - resolve() - } - }) - }) -} -module.exports = eos -module.exports.finished = finished diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js deleted file mode 100644 index c7e75314028794..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js +++ /dev/null @@ -1,98 +0,0 @@ -'use strict' - -/* replacement start */ - -const process = require('process/') - -/* replacement end */ - -const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials') -const { Buffer } = require('buffer') -const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes -function from(Readable, iterable, opts) { - let iterator - if (typeof iterable === 'string' || iterable instanceof Buffer) { - return new Readable({ - objectMode: true, - ...opts, - read() { - this.push(iterable) - this.push(null) - } - }) - } - let isAsync - if (iterable && iterable[SymbolAsyncIterator]) { - isAsync = true - iterator = iterable[SymbolAsyncIterator]() - } else if (iterable && iterable[SymbolIterator]) { - isAsync = false - iterator = iterable[SymbolIterator]() - } else { - throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable) - } - const readable = new Readable({ - objectMode: true, - highWaterMark: 1, - // TODO(ronag): What options should be allowed? - ...opts - }) - - // Flag to protect against _read - // being called before last iteration completion. - let reading = false - readable._read = function () { - if (!reading) { - reading = true - next() - } - } - readable._destroy = function (error, cb) { - PromisePrototypeThen( - close(error), - () => process.nextTick(cb, error), - // nextTick is here in case cb throws - (e) => process.nextTick(cb, e || error) - ) - } - async function close(error) { - const hadError = error !== undefined && error !== null - const hasThrow = typeof iterator.throw === 'function' - if (hadError && hasThrow) { - const { value, done } = await iterator.throw(error) - await value - if (done) { - return - } - } - if (typeof iterator.return === 'function') { - const { value } = await iterator.return() - await value - } - } - async function next() { - for (;;) { - try { - const { value, done } = isAsync ? await iterator.next() : iterator.next() - if (done) { - readable.push(null) - } else { - const res = value && typeof value.then === 'function' ? await value : value - if (res === null) { - reading = false - throw new ERR_STREAM_NULL_VALUES() - } else if (readable.push(res)) { - continue - } else { - reading = false - } - } - } catch (err) { - readable.destroy(err) - } - break - } - } - return readable -} -module.exports = from diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/lazy_transform.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/lazy_transform.js deleted file mode 100644 index 439461a1278392..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/lazy_transform.js +++ /dev/null @@ -1,51 +0,0 @@ -// LazyTransform is a special type of Transform stream that is lazily loaded. -// This is used for performance with bi-API-ship: when two APIs are available -// for the stream, one conventional and one non-conventional. -'use strict' - -const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials') -const stream = require('../../stream') -const { getDefaultEncoding } = require('../crypto/util') -module.exports = LazyTransform -function LazyTransform(options) { - this._options = options -} -ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype) -ObjectSetPrototypeOf(LazyTransform, stream.Transform) -function makeGetter(name) { - return function () { - stream.Transform.call(this, this._options) - this._writableState.decodeStrings = false - if (!this._options || !this._options.defaultEncoding) { - this._writableState.defaultEncoding = getDefaultEncoding() - } - return this[name] - } -} -function makeSetter(name) { - return function (val) { - ObjectDefineProperty(this, name, { - __proto__: null, - value: val, - enumerable: true, - configurable: true, - writable: true - }) - } -} -ObjectDefineProperties(LazyTransform.prototype, { - _readableState: { - __proto__: null, - get: makeGetter('_readableState'), - set: makeSetter('_readableState'), - configurable: true, - enumerable: true - }, - _writableState: { - __proto__: null, - get: makeGetter('_writableState'), - set: makeSetter('_writableState'), - configurable: true, - enumerable: true - } -}) diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/legacy.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/legacy.js deleted file mode 100644 index d492f7ff4e6b69..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/legacy.js +++ /dev/null @@ -1,89 +0,0 @@ -'use strict' - -const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials') -const { EventEmitter: EE } = require('events') -function Stream(opts) { - EE.call(this, opts) -} -ObjectSetPrototypeOf(Stream.prototype, EE.prototype) -ObjectSetPrototypeOf(Stream, EE) -Stream.prototype.pipe = function (dest, options) { - const source = this - function ondata(chunk) { - if (dest.writable && dest.write(chunk) === false && source.pause) { - source.pause() - } - } - source.on('data', ondata) - function ondrain() { - if (source.readable && source.resume) { - source.resume() - } - } - dest.on('drain', ondrain) - - // If the 'end' option is not supplied, dest.end() will be called when - // source gets the 'end' or 'close' events. Only dest.end() once. - if (!dest._isStdio && (!options || options.end !== false)) { - source.on('end', onend) - source.on('close', onclose) - } - let didOnEnd = false - function onend() { - if (didOnEnd) return - didOnEnd = true - dest.end() - } - function onclose() { - if (didOnEnd) return - didOnEnd = true - if (typeof dest.destroy === 'function') dest.destroy() - } - - // Don't leave dangling pipes when there are errors. - function onerror(er) { - cleanup() - if (EE.listenerCount(this, 'error') === 0) { - this.emit('error', er) - } - } - prependListener(source, 'error', onerror) - prependListener(dest, 'error', onerror) - - // Remove all the event listeners that were added. - function cleanup() { - source.removeListener('data', ondata) - dest.removeListener('drain', ondrain) - source.removeListener('end', onend) - source.removeListener('close', onclose) - source.removeListener('error', onerror) - dest.removeListener('error', onerror) - source.removeListener('end', cleanup) - source.removeListener('close', cleanup) - dest.removeListener('close', cleanup) - } - source.on('end', cleanup) - source.on('close', cleanup) - dest.on('close', cleanup) - dest.emit('pipe', source) - - // Allow for unix-like usage: A.pipe(B).pipe(C) - return dest -} -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn) - - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn) - else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn) - else emitter._events[event] = [fn, emitter._events[event]] -} -module.exports = { - Stream, - prependListener -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/operators.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/operators.js deleted file mode 100644 index 869cacb39faca9..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/operators.js +++ /dev/null @@ -1,457 +0,0 @@ -'use strict' - -const AbortController = globalThis.AbortController || require('abort-controller').AbortController -const { - codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, - AbortError -} = require('../../ours/errors') -const { validateAbortSignal, validateInteger, validateObject } = require('../validators') -const kWeakHandler = require('../../ours/primordials').Symbol('kWeak') -const { finished } = require('./end-of-stream') -const staticCompose = require('./compose') -const { addAbortSignalNoValidate } = require('./add-abort-signal') -const { isWritable, isNodeStream } = require('./utils') -const { - ArrayPrototypePush, - MathFloor, - Number, - NumberIsNaN, - Promise, - PromiseReject, - PromisePrototypeThen, - Symbol -} = require('../../ours/primordials') -const kEmpty = Symbol('kEmpty') -const kEof = Symbol('kEof') -function compose(stream, options) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - if (isNodeStream(stream) && !isWritable(stream)) { - throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable') - } - const composedStream = staticCompose(this, stream) - if (options !== null && options !== undefined && options.signal) { - // Not validating as we already validated before - addAbortSignalNoValidate(options.signal, composedStream) - } - return composedStream -} -function map(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - let concurrency = 1 - if ((options === null || options === undefined ? undefined : options.concurrency) != null) { - concurrency = MathFloor(options.concurrency) - } - validateInteger(concurrency, 'concurrency', 1) - return async function* map() { - var _options$signal, _options$signal2 - const ac = new AbortController() - const stream = this - const queue = [] - const signal = ac.signal - const signalOpt = { - signal - } - const abort = () => ac.abort() - if ( - options !== null && - options !== undefined && - (_options$signal = options.signal) !== null && - _options$signal !== undefined && - _options$signal.aborted - ) { - abort() - } - options === null || options === undefined - ? undefined - : (_options$signal2 = options.signal) === null || _options$signal2 === undefined - ? undefined - : _options$signal2.addEventListener('abort', abort) - let next - let resume - let done = false - function onDone() { - done = true - } - async function pump() { - try { - for await (let val of stream) { - var _val - if (done) { - return - } - if (signal.aborted) { - throw new AbortError() - } - try { - val = fn(val, signalOpt) - } catch (err) { - val = PromiseReject(err) - } - if (val === kEmpty) { - continue - } - if (typeof ((_val = val) === null || _val === undefined ? undefined : _val.catch) === 'function') { - val.catch(onDone) - } - queue.push(val) - if (next) { - next() - next = null - } - if (!done && queue.length && queue.length >= concurrency) { - await new Promise((resolve) => { - resume = resolve - }) - } - } - queue.push(kEof) - } catch (err) { - const val = PromiseReject(err) - PromisePrototypeThen(val, undefined, onDone) - queue.push(val) - } finally { - var _options$signal3 - done = true - if (next) { - next() - next = null - } - options === null || options === undefined - ? undefined - : (_options$signal3 = options.signal) === null || _options$signal3 === undefined - ? undefined - : _options$signal3.removeEventListener('abort', abort) - } - } - pump() - try { - while (true) { - while (queue.length > 0) { - const val = await queue[0] - if (val === kEof) { - return - } - if (signal.aborted) { - throw new AbortError() - } - if (val !== kEmpty) { - yield val - } - queue.shift() - if (resume) { - resume() - resume = null - } - } - await new Promise((resolve) => { - next = resolve - }) - } - } finally { - ac.abort() - done = true - if (resume) { - resume() - resume = null - } - } - }.call(this) -} -function asIndexedPairs(options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - return async function* asIndexedPairs() { - let index = 0 - for await (const val of this) { - var _options$signal4 - if ( - options !== null && - options !== undefined && - (_options$signal4 = options.signal) !== null && - _options$signal4 !== undefined && - _options$signal4.aborted - ) { - throw new AbortError({ - cause: options.signal.reason - }) - } - yield [index++, val] - } - }.call(this) -} -async function some(fn, options = undefined) { - for await (const unused of filter.call(this, fn, options)) { - return true - } - return false -} -async function every(fn, options = undefined) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - // https://en.wikipedia.org/wiki/De_Morgan%27s_laws - return !(await some.call( - this, - async (...args) => { - return !(await fn(...args)) - }, - options - )) -} -async function find(fn, options) { - for await (const result of filter.call(this, fn, options)) { - return result - } - return undefined -} -async function forEach(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - async function forEachFn(value, options) { - await fn(value, options) - return kEmpty - } - // eslint-disable-next-line no-unused-vars - for await (const unused of map.call(this, forEachFn, options)); -} -function filter(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - async function filterFn(value, options) { - if (await fn(value, options)) { - return value - } - return kEmpty - } - return map.call(this, filterFn, options) -} - -// Specific to provide better error to reduce since the argument is only -// missing if the stream has no items in it - but the code is still appropriate -class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS { - constructor() { - super('reduce') - this.message = 'Reduce of an empty stream requires an initial value' - } -} -async function reduce(reducer, initialValue, options) { - var _options$signal5 - if (typeof reducer !== 'function') { - throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer) - } - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - let hasInitialValue = arguments.length > 1 - if ( - options !== null && - options !== undefined && - (_options$signal5 = options.signal) !== null && - _options$signal5 !== undefined && - _options$signal5.aborted - ) { - const err = new AbortError(undefined, { - cause: options.signal.reason - }) - this.once('error', () => {}) // The error is already propagated - await finished(this.destroy(err)) - throw err - } - const ac = new AbortController() - const signal = ac.signal - if (options !== null && options !== undefined && options.signal) { - const opts = { - once: true, - [kWeakHandler]: this - } - options.signal.addEventListener('abort', () => ac.abort(), opts) - } - let gotAnyItemFromStream = false - try { - for await (const value of this) { - var _options$signal6 - gotAnyItemFromStream = true - if ( - options !== null && - options !== undefined && - (_options$signal6 = options.signal) !== null && - _options$signal6 !== undefined && - _options$signal6.aborted - ) { - throw new AbortError() - } - if (!hasInitialValue) { - initialValue = value - hasInitialValue = true - } else { - initialValue = await reducer(initialValue, value, { - signal - }) - } - } - if (!gotAnyItemFromStream && !hasInitialValue) { - throw new ReduceAwareErrMissingArgs() - } - } finally { - ac.abort() - } - return initialValue -} -async function toArray(options) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - const result = [] - for await (const val of this) { - var _options$signal7 - if ( - options !== null && - options !== undefined && - (_options$signal7 = options.signal) !== null && - _options$signal7 !== undefined && - _options$signal7.aborted - ) { - throw new AbortError(undefined, { - cause: options.signal.reason - }) - } - ArrayPrototypePush(result, val) - } - return result -} -function flatMap(fn, options) { - const values = map.call(this, fn, options) - return async function* flatMap() { - for await (const val of values) { - yield* val - } - }.call(this) -} -function toIntegerOrInfinity(number) { - // We coerce here to align with the spec - // https://github.com/tc39/proposal-iterator-helpers/issues/169 - number = Number(number) - if (NumberIsNaN(number)) { - return 0 - } - if (number < 0) { - throw new ERR_OUT_OF_RANGE('number', '>= 0', number) - } - return number -} -function drop(number, options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - number = toIntegerOrInfinity(number) - return async function* drop() { - var _options$signal8 - if ( - options !== null && - options !== undefined && - (_options$signal8 = options.signal) !== null && - _options$signal8 !== undefined && - _options$signal8.aborted - ) { - throw new AbortError() - } - for await (const val of this) { - var _options$signal9 - if ( - options !== null && - options !== undefined && - (_options$signal9 = options.signal) !== null && - _options$signal9 !== undefined && - _options$signal9.aborted - ) { - throw new AbortError() - } - if (number-- <= 0) { - yield val - } - } - }.call(this) -} -function take(number, options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - number = toIntegerOrInfinity(number) - return async function* take() { - var _options$signal10 - if ( - options !== null && - options !== undefined && - (_options$signal10 = options.signal) !== null && - _options$signal10 !== undefined && - _options$signal10.aborted - ) { - throw new AbortError() - } - for await (const val of this) { - var _options$signal11 - if ( - options !== null && - options !== undefined && - (_options$signal11 = options.signal) !== null && - _options$signal11 !== undefined && - _options$signal11.aborted - ) { - throw new AbortError() - } - if (number-- > 0) { - yield val - } else { - return - } - } - }.call(this) -} -module.exports.streamReturningOperators = { - asIndexedPairs, - drop, - filter, - flatMap, - map, - take, - compose -} -module.exports.promiseReturningOperators = { - every, - forEach, - reduce, - toArray, - some, - find -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/passthrough.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/passthrough.js deleted file mode 100644 index ed4f486c3baa44..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/passthrough.js +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict' - -const { ObjectSetPrototypeOf } = require('../../ours/primordials') -module.exports = PassThrough -const Transform = require('./transform') -ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype) -ObjectSetPrototypeOf(PassThrough, Transform) -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options) - Transform.call(this, options) -} -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk) -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js deleted file mode 100644 index 8393ba5146991b..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js +++ /dev/null @@ -1,465 +0,0 @@ -/* replacement start */ - -const process = require('process/') - -/* replacement end */ -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). - -;('use strict') -const { ArrayIsArray, Promise, SymbolAsyncIterator } = require('../../ours/primordials') -const eos = require('./end-of-stream') -const { once } = require('../../ours/util') -const destroyImpl = require('./destroy') -const Duplex = require('./duplex') -const { - aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_INVALID_RETURN_VALUE, - ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED, - ERR_STREAM_PREMATURE_CLOSE - }, - AbortError -} = require('../../ours/errors') -const { validateFunction, validateAbortSignal } = require('../validators') -const { - isIterable, - isReadable, - isReadableNodeStream, - isNodeStream, - isTransformStream, - isWebStream, - isReadableStream, - isReadableEnded -} = require('./utils') -const AbortController = globalThis.AbortController || require('abort-controller').AbortController -let PassThrough -let Readable -function destroyer(stream, reading, writing) { - let finished = false - stream.on('close', () => { - finished = true - }) - const cleanup = eos( - stream, - { - readable: reading, - writable: writing - }, - (err) => { - finished = !err - } - ) - return { - destroy: (err) => { - if (finished) return - finished = true - destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')) - }, - cleanup - } -} -function popCallback(streams) { - // Streams should never be an empty array. It should always contain at least - // a single stream. Therefore optimize for the average case instead of - // checking for length === 0 as well. - validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]') - return streams.pop() -} -function makeAsyncIterable(val) { - if (isIterable(val)) { - return val - } else if (isReadableNodeStream(val)) { - // Legacy streams are not Iterable. - return fromReadable(val) - } - throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val) -} -async function* fromReadable(val) { - if (!Readable) { - Readable = require('./readable') - } - yield* Readable.prototype[SymbolAsyncIterator].call(val) -} -async function pumpToNode(iterable, writable, finish, { end }) { - let error - let onresolve = null - const resume = (err) => { - if (err) { - error = err - } - if (onresolve) { - const callback = onresolve - onresolve = null - callback() - } - } - const wait = () => - new Promise((resolve, reject) => { - if (error) { - reject(error) - } else { - onresolve = () => { - if (error) { - reject(error) - } else { - resolve() - } - } - } - }) - writable.on('drain', resume) - const cleanup = eos( - writable, - { - readable: false - }, - resume - ) - try { - if (writable.writableNeedDrain) { - await wait() - } - for await (const chunk of iterable) { - if (!writable.write(chunk)) { - await wait() - } - } - if (end) { - writable.end() - } - await wait() - finish() - } catch (err) { - finish(error !== err ? aggregateTwoErrors(error, err) : err) - } finally { - cleanup() - writable.off('drain', resume) - } -} -async function pumpToWeb(readable, writable, finish, { end }) { - if (isTransformStream(writable)) { - writable = writable.writable - } - // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure - const writer = writable.getWriter() - try { - for await (const chunk of readable) { - await writer.ready - writer.write(chunk).catch(() => {}) - } - await writer.ready - if (end) { - await writer.close() - } - finish() - } catch (err) { - try { - await writer.abort(err) - finish(err) - } catch (err) { - finish(err) - } - } -} -function pipeline(...streams) { - return pipelineImpl(streams, once(popCallback(streams))) -} -function pipelineImpl(streams, callback, opts) { - if (streams.length === 1 && ArrayIsArray(streams[0])) { - streams = streams[0] - } - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams') - } - const ac = new AbortController() - const signal = ac.signal - const outerSignal = opts === null || opts === undefined ? undefined : opts.signal - - // Need to cleanup event listeners if last stream is readable - // https://github.com/nodejs/node/issues/35452 - const lastStreamCleanup = [] - validateAbortSignal(outerSignal, 'options.signal') - function abort() { - finishImpl(new AbortError()) - } - outerSignal === null || outerSignal === undefined ? undefined : outerSignal.addEventListener('abort', abort) - let error - let value - const destroys = [] - let finishCount = 0 - function finish(err) { - finishImpl(err, --finishCount === 0) - } - function finishImpl(err, final) { - if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { - error = err - } - if (!error && !final) { - return - } - while (destroys.length) { - destroys.shift()(error) - } - outerSignal === null || outerSignal === undefined ? undefined : outerSignal.removeEventListener('abort', abort) - ac.abort() - if (final) { - if (!error) { - lastStreamCleanup.forEach((fn) => fn()) - } - process.nextTick(callback, error, value) - } - } - let ret - for (let i = 0; i < streams.length; i++) { - const stream = streams[i] - const reading = i < streams.length - 1 - const writing = i > 0 - const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false - const isLastStream = i === streams.length - 1 - if (isNodeStream(stream)) { - if (end) { - const { destroy, cleanup } = destroyer(stream, reading, writing) - destroys.push(destroy) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup) - } - } - - // Catch stream errors that occur after pipe/pump has completed. - function onError(err) { - if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - finish(err) - } - } - stream.on('error', onError) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(() => { - stream.removeListener('error', onError) - }) - } - } - if (i === 0) { - if (typeof stream === 'function') { - ret = stream({ - signal - }) - if (!isIterable(ret)) { - throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret) - } - } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { - ret = stream - } else { - ret = Duplex.from(stream) - } - } else if (typeof stream === 'function') { - if (isTransformStream(ret)) { - var _ret - ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable) - } else { - ret = makeAsyncIterable(ret) - } - ret = stream(ret, { - signal - }) - if (reading) { - if (!isIterable(ret, true)) { - throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret) - } - } else { - var _ret2 - if (!PassThrough) { - PassThrough = require('./passthrough') - } - - // If the last argument to pipeline is not a stream - // we must create a proxy stream so that pipeline(...) - // always returns a stream which can be further - // composed through `.pipe(stream)`. - - const pt = new PassThrough({ - objectMode: true - }) - - // Handle Promises/A+ spec, `then` could be a getter that throws on - // second use. - const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then - if (typeof then === 'function') { - finishCount++ - then.call( - ret, - (val) => { - value = val - if (val != null) { - pt.write(val) - } - if (end) { - pt.end() - } - process.nextTick(finish) - }, - (err) => { - pt.destroy(err) - process.nextTick(finish, err) - } - ) - } else if (isIterable(ret, true)) { - finishCount++ - pumpToNode(ret, pt, finish, { - end - }) - } else if (isReadableStream(ret) || isTransformStream(ret)) { - const toRead = ret.readable || ret - finishCount++ - pumpToNode(toRead, pt, finish, { - end - }) - } else { - throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret) - } - ret = pt - const { destroy, cleanup } = destroyer(ret, false, true) - destroys.push(destroy) - if (isLastStream) { - lastStreamCleanup.push(cleanup) - } - } - } else if (isNodeStream(stream)) { - if (isReadableNodeStream(ret)) { - finishCount += 2 - const cleanup = pipe(ret, stream, finish, { - end - }) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup) - } - } else if (isTransformStream(ret) || isReadableStream(ret)) { - const toRead = ret.readable || ret - finishCount++ - pumpToNode(toRead, stream, finish, { - end - }) - } else if (isIterable(ret)) { - finishCount++ - pumpToNode(ret, stream, finish, { - end - }) - } else { - throw new ERR_INVALID_ARG_TYPE( - 'val', - ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], - ret - ) - } - ret = stream - } else if (isWebStream(stream)) { - if (isReadableNodeStream(ret)) { - finishCount++ - pumpToWeb(makeAsyncIterable(ret), stream, finish, { - end - }) - } else if (isReadableStream(ret) || isIterable(ret)) { - finishCount++ - pumpToWeb(ret, stream, finish, { - end - }) - } else if (isTransformStream(ret)) { - finishCount++ - pumpToWeb(ret.readable, stream, finish, { - end - }) - } else { - throw new ERR_INVALID_ARG_TYPE( - 'val', - ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], - ret - ) - } - ret = stream - } else { - ret = Duplex.from(stream) - } - } - if ( - (signal !== null && signal !== undefined && signal.aborted) || - (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted) - ) { - process.nextTick(abort) - } - return ret -} -function pipe(src, dst, finish, { end }) { - let ended = false - dst.on('close', () => { - if (!ended) { - // Finish if the destination closes before the source has completed. - finish(new ERR_STREAM_PREMATURE_CLOSE()) - } - }) - src.pipe(dst, { - end: false - }) // If end is true we already will have a listener to end dst. - - if (end) { - // Compat. Before node v10.12.0 stdio used to throw an error so - // pipe() did/does not end() stdio destinations. - // Now they allow it but "secretly" don't close the underlying fd. - - function endFn() { - ended = true - dst.end() - } - if (isReadableEnded(src)) { - // End the destination if the source has already ended. - process.nextTick(endFn) - } else { - src.once('end', endFn) - } - } else { - finish() - } - eos( - src, - { - readable: true, - writable: false - }, - (err) => { - const rState = src._readableState - if ( - err && - err.code === 'ERR_STREAM_PREMATURE_CLOSE' && - rState && - rState.ended && - !rState.errored && - !rState.errorEmitted - ) { - // Some readable streams will emit 'close' before 'end'. However, since - // this is on the readable side 'end' should still be emitted if the - // stream has been ended and no error emitted. This should be allowed in - // favor of backwards compatibility. Since the stream is piped to a - // destination this should not result in any observable difference. - // We don't need to check if this is a writable premature close since - // eos will only fail with premature close on the reading side for - // duplex streams. - src.once('end', finish).once('error', finish) - } else { - finish(err) - } - } - ) - return eos( - dst, - { - readable: false, - writable: true - }, - finish - ) -} -module.exports = { - pipelineImpl, - pipeline -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/readable.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/readable.js deleted file mode 100644 index 3fc01d1f880932..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/readable.js +++ /dev/null @@ -1,1247 +0,0 @@ -/* replacement start */ - -const process = require('process/') - -/* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -;('use strict') -const { - ArrayPrototypeIndexOf, - NumberIsInteger, - NumberIsNaN, - NumberParseInt, - ObjectDefineProperties, - ObjectKeys, - ObjectSetPrototypeOf, - Promise, - SafeSet, - SymbolAsyncIterator, - Symbol -} = require('../../ours/primordials') -module.exports = Readable -Readable.ReadableState = ReadableState -const { EventEmitter: EE } = require('events') -const { Stream, prependListener } = require('./legacy') -const { Buffer } = require('buffer') -const { addAbortSignal } = require('./add-abort-signal') -const eos = require('./end-of-stream') -let debug = require('../../ours/util').debuglog('stream', (fn) => { - debug = fn -}) -const BufferList = require('./buffer_list') -const destroyImpl = require('./destroy') -const { getHighWaterMark, getDefaultHighWaterMark } = require('./state') -const { - aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_OUT_OF_RANGE, - ERR_STREAM_PUSH_AFTER_EOF, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT - } -} = require('../../ours/errors') -const { validateObject } = require('../validators') -const kPaused = Symbol('kPaused') -const { StringDecoder } = require('string_decoder') -const from = require('./from') -ObjectSetPrototypeOf(Readable.prototype, Stream.prototype) -ObjectSetPrototypeOf(Readable, Stream) -const nop = () => {} -const { errorOrDestroy } = destroyImpl -function ReadableState(options, stream, isDuplex) { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') - - // Object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away. - this.objectMode = !!(options && options.objectMode) - if (isDuplex) this.objectMode = this.objectMode || !!(options && options.readableObjectMode) - - // The point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = options - ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) - : getDefaultHighWaterMark(false) - - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift(). - this.buffer = new BufferList() - this.length = 0 - this.pipes = [] - this.flowing = null - this.ended = false - this.endEmitted = false - this.reading = false - - // Stream is still being constructed and cannot be - // destroyed until construction finished or failed. - // Async construction is opt in, therefore we start as - // constructed. - this.constructed = true - - // A flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true - - // Whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false - this.emittedReadable = false - this.readableListening = false - this.resumeScheduled = false - this[kPaused] = null - - // True if the error was already emitted and should not be thrown again. - this.errorEmitted = false - - // Should close be emitted on destroy. Defaults to true. - this.emitClose = !options || options.emitClose !== false - - // Should .destroy() be called after 'end' (and potentially 'finish'). - this.autoDestroy = !options || options.autoDestroy !== false - - // Has it been destroyed. - this.destroyed = false - - // Indicates whether the stream has errored. When true no further - // _read calls, 'data' or 'readable' events should occur. This is needed - // since when autoDestroy is disabled we need a way to tell whether the - // stream has failed. - this.errored = null - - // Indicates whether the stream has finished destroying. - this.closed = false - - // True if close has been emitted or would have been emitted - // depending on emitClose. - this.closeEmitted = false - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' - - // Ref the piped dest which we need a drain event on it - // type: null | Writable | Set. - this.awaitDrainWriters = null - this.multiAwaitDrain = false - - // If true, a maybeReadMore has been scheduled. - this.readingMore = false - this.dataEmitted = false - this.decoder = null - this.encoding = null - if (options && options.encoding) { - this.decoder = new StringDecoder(options.encoding) - this.encoding = options.encoding - } -} -function Readable(options) { - if (!(this instanceof Readable)) return new Readable(options) - - // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5. - const isDuplex = this instanceof require('./duplex') - this._readableState = new ReadableState(options, this, isDuplex) - if (options) { - if (typeof options.read === 'function') this._read = options.read - if (typeof options.destroy === 'function') this._destroy = options.destroy - if (typeof options.construct === 'function') this._construct = options.construct - if (options.signal && !isDuplex) addAbortSignal(options.signal, this) - } - Stream.call(this, options) - destroyImpl.construct(this, () => { - if (this._readableState.needReadable) { - maybeReadMore(this, this._readableState) - } - }) -} -Readable.prototype.destroy = destroyImpl.destroy -Readable.prototype._undestroy = destroyImpl.undestroy -Readable.prototype._destroy = function (err, cb) { - cb(err) -} -Readable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err) -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, false) -} - -// Unshift should *always* be something directly out of read(). -Readable.prototype.unshift = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, true) -} -function readableAddChunk(stream, chunk, encoding, addToFront) { - debug('readableAddChunk', chunk) - const state = stream._readableState - let err - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding - if (state.encoding !== encoding) { - if (addToFront && state.encoding) { - // When unshifting, if state.encoding is set, we have to save - // the string in the BufferList with the state encoding. - chunk = Buffer.from(chunk, encoding).toString(state.encoding) - } else { - chunk = Buffer.from(chunk, encoding) - encoding = '' - } - } - } else if (chunk instanceof Buffer) { - encoding = '' - } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk) - encoding = '' - } else if (chunk != null) { - err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) - } - } - if (err) { - errorOrDestroy(stream, err) - } else if (chunk === null) { - state.reading = false - onEofChunk(stream, state) - } else if (state.objectMode || (chunk && chunk.length > 0)) { - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()) - else if (state.destroyed || state.errored) return false - else addChunk(stream, state, chunk, true) - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()) - } else if (state.destroyed || state.errored) { - return false - } else { - state.reading = false - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk) - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false) - else maybeReadMore(stream, state) - } else { - addChunk(stream, state, chunk, false) - } - } - } else if (!addToFront) { - state.reading = false - maybeReadMore(stream, state) - } - - // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - return !state.ended && (state.length < state.highWaterMark || state.length === 0) -} -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) { - // Use the guard to avoid creating `Set()` repeatedly - // when we have multiple pipes. - if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear() - } else { - state.awaitDrainWriters = null - } - state.dataEmitted = true - stream.emit('data', chunk) - } else { - // Update the buffer info. - state.length += state.objectMode ? 1 : chunk.length - if (addToFront) state.buffer.unshift(chunk) - else state.buffer.push(chunk) - if (state.needReadable) emitReadable(stream) - } - maybeReadMore(stream, state) -} -Readable.prototype.isPaused = function () { - const state = this._readableState - return state[kPaused] === true || state.flowing === false -} - -// Backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - const decoder = new StringDecoder(enc) - this._readableState.decoder = decoder - // If setEncoding(null), decoder.encoding equals utf8. - this._readableState.encoding = this._readableState.decoder.encoding - const buffer = this._readableState.buffer - // Iterate over current buffer to convert already stored Buffers: - let content = '' - for (const data of buffer) { - content += decoder.write(data) - } - buffer.clear() - if (content !== '') buffer.push(content) - this._readableState.length = content.length - return this -} - -// Don't raise the hwm > 1GB. -const MAX_HWM = 0x40000000 -function computeNewHighWaterMark(n) { - if (n > MAX_HWM) { - throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n) - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts. - n-- - n |= n >>> 1 - n |= n >>> 2 - n |= n >>> 4 - n |= n >>> 8 - n |= n >>> 16 - n++ - } - return n -} - -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || (state.length === 0 && state.ended)) return 0 - if (state.objectMode) return 1 - if (NumberIsNaN(n)) { - // Only flow one buffer at a time. - if (state.flowing && state.length) return state.buffer.first().length - return state.length - } - if (n <= state.length) return n - return state.ended ? state.length : 0 -} - -// You can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n) - // Same as parseInt(undefined, 10), however V8 7.3 performance regressed - // in this scenario, so we are doing it manually. - if (n === undefined) { - n = NaN - } else if (!NumberIsInteger(n)) { - n = NumberParseInt(n, 10) - } - const state = this._readableState - const nOrig = n - - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n) - if (n !== 0) state.emittedReadable = false - - // If we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if ( - n === 0 && - state.needReadable && - ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended) - ) { - debug('read: emitReadable', state.length, state.ended) - if (state.length === 0 && state.ended) endReadable(this) - else emitReadable(this) - return null - } - n = howMuchToRead(n, state) - - // If we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this) - return null - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - let doRead = state.needReadable - debug('need readable', doRead) - - // If we currently have less than the highWaterMark, then also read some. - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true - debug('length less than watermark', doRead) - } - - // However, if we've ended, then there's no point, if we're already - // reading, then it's unnecessary, if we're constructing we have to wait, - // and if we're destroyed or errored, then it's not allowed, - if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) { - doRead = false - debug('reading, ended or constructing', doRead) - } else if (doRead) { - debug('do read') - state.reading = true - state.sync = true - // If the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true - - // Call internal read method - try { - this._read(state.highWaterMark) - } catch (err) { - errorOrDestroy(this, err) - } - state.sync = false - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state) - } - let ret - if (n > 0) ret = fromList(n, state) - else ret = null - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark - n = 0 - } else { - state.length -= n - if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear() - } else { - state.awaitDrainWriters = null - } - } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this) - } - if (ret !== null && !state.errorEmitted && !state.closeEmitted) { - state.dataEmitted = true - this.emit('data', ret) - } - return ret -} -function onEofChunk(stream, state) { - debug('onEofChunk') - if (state.ended) return - if (state.decoder) { - const chunk = state.decoder.end() - if (chunk && chunk.length) { - state.buffer.push(chunk) - state.length += state.objectMode ? 1 : chunk.length - } - } - state.ended = true - if (state.sync) { - // If we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call. - emitReadable(stream) - } else { - // Emit 'readable' now to make sure it gets picked up. - state.needReadable = false - state.emittedReadable = true - // We have to emit readable now that we are EOF. Modules - // in the ecosystem (e.g. dicer) rely on this event being sync. - emitReadable_(stream) - } -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - const state = stream._readableState - debug('emitReadable', state.needReadable, state.emittedReadable) - state.needReadable = false - if (!state.emittedReadable) { - debug('emitReadable', state.flowing) - state.emittedReadable = true - process.nextTick(emitReadable_, stream) - } -} -function emitReadable_(stream) { - const state = stream._readableState - debug('emitReadable_', state.destroyed, state.length, state.ended) - if (!state.destroyed && !state.errored && (state.length || state.ended)) { - stream.emit('readable') - state.emittedReadable = false - } - - // The stream needs another readable event if: - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark - flow(stream) -} - -// At this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore && state.constructed) { - state.readingMore = true - process.nextTick(maybeReadMore_, stream, state) - } -} -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while ( - !state.reading && - !state.ended && - (state.length < state.highWaterMark || (state.flowing && state.length === 0)) - ) { - const len = state.length - debug('maybeReadMore read 0') - stream.read(0) - if (len === state.length) - // Didn't get any data, stop spinning. - break - } - state.readingMore = false -} - -// Abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_read()') -} -Readable.prototype.pipe = function (dest, pipeOpts) { - const src = this - const state = this._readableState - if (state.pipes.length === 1) { - if (!state.multiAwaitDrain) { - state.multiAwaitDrain = true - state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []) - } - } - state.pipes.push(dest) - debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts) - const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr - const endFn = doEnd ? onend : unpipe - if (state.endEmitted) process.nextTick(endFn) - else src.once('end', endFn) - dest.on('unpipe', onunpipe) - function onunpipe(readable, unpipeInfo) { - debug('onunpipe') - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true - cleanup() - } - } - } - function onend() { - debug('onend') - dest.end() - } - let ondrain - let cleanedUp = false - function cleanup() { - debug('cleanup') - // Cleanup event handlers once the pipe is broken. - dest.removeListener('close', onclose) - dest.removeListener('finish', onfinish) - if (ondrain) { - dest.removeListener('drain', ondrain) - } - dest.removeListener('error', onerror) - dest.removeListener('unpipe', onunpipe) - src.removeListener('end', onend) - src.removeListener('end', unpipe) - src.removeListener('data', ondata) - cleanedUp = true - - // If the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain() - } - function pause() { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if (!cleanedUp) { - if (state.pipes.length === 1 && state.pipes[0] === dest) { - debug('false write response, pause', 0) - state.awaitDrainWriters = dest - state.multiAwaitDrain = false - } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { - debug('false write response, pause', state.awaitDrainWriters.size) - state.awaitDrainWriters.add(dest) - } - src.pause() - } - if (!ondrain) { - // When the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - ondrain = pipeOnDrain(src, dest) - dest.on('drain', ondrain) - } - } - src.on('data', ondata) - function ondata(chunk) { - debug('ondata') - const ret = dest.write(chunk) - debug('dest.write', ret) - if (ret === false) { - pause() - } - } - - // If the dest has an error, then stop piping into it. - // However, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er) - unpipe() - dest.removeListener('error', onerror) - if (dest.listenerCount('error') === 0) { - const s = dest._writableState || dest._readableState - if (s && !s.errorEmitted) { - // User incorrectly emitted 'error' directly on the stream. - errorOrDestroy(dest, er) - } else { - dest.emit('error', er) - } - } - } - - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror) - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish) - unpipe() - } - dest.once('close', onclose) - function onfinish() { - debug('onfinish') - dest.removeListener('close', onclose) - unpipe() - } - dest.once('finish', onfinish) - function unpipe() { - debug('unpipe') - src.unpipe(dest) - } - - // Tell the dest that it's being piped to. - dest.emit('pipe', src) - - // Start the flow if it hasn't been started already. - - if (dest.writableNeedDrain === true) { - if (state.flowing) { - pause() - } - } else if (!state.flowing) { - debug('pipe resume') - src.resume() - } - return dest -} -function pipeOnDrain(src, dest) { - return function pipeOnDrainFunctionResult() { - const state = src._readableState - - // `ondrain` will call directly, - // `this` maybe not a reference to dest, - // so we use the real dest here. - if (state.awaitDrainWriters === dest) { - debug('pipeOnDrain', 1) - state.awaitDrainWriters = null - } else if (state.multiAwaitDrain) { - debug('pipeOnDrain', state.awaitDrainWriters.size) - state.awaitDrainWriters.delete(dest) - } - if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) { - src.resume() - } - } -} -Readable.prototype.unpipe = function (dest) { - const state = this._readableState - const unpipeInfo = { - hasUnpiped: false - } - - // If we're not piping anywhere, then do nothing. - if (state.pipes.length === 0) return this - if (!dest) { - // remove all. - const dests = state.pipes - state.pipes = [] - this.pause() - for (let i = 0; i < dests.length; i++) - dests[i].emit('unpipe', this, { - hasUnpiped: false - }) - return this - } - - // Try to find the right one. - const index = ArrayPrototypeIndexOf(state.pipes, dest) - if (index === -1) return this - state.pipes.splice(index, 1) - if (state.pipes.length === 0) this.pause() - dest.emit('unpipe', this, unpipeInfo) - return this -} - -// Set up data events if they are asked for -// Ensure readable listeners eventually get something. -Readable.prototype.on = function (ev, fn) { - const res = Stream.prototype.on.call(this, ev, fn) - const state = this._readableState - if (ev === 'data') { - // Update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0 - - // Try start flowing on next tick if stream isn't explicitly paused. - if (state.flowing !== false) this.resume() - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true - state.flowing = false - state.emittedReadable = false - debug('on readable', state.length, state.reading) - if (state.length) { - emitReadable(this) - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this) - } - } - } - return res -} -Readable.prototype.addListener = Readable.prototype.on -Readable.prototype.removeListener = function (ev, fn) { - const res = Stream.prototype.removeListener.call(this, ev, fn) - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this) - } - return res -} -Readable.prototype.off = Readable.prototype.removeListener -Readable.prototype.removeAllListeners = function (ev) { - const res = Stream.prototype.removeAllListeners.apply(this, arguments) - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this) - } - return res -} -function updateReadableListening(self) { - const state = self._readableState - state.readableListening = self.listenerCount('readable') > 0 - if (state.resumeScheduled && state[kPaused] === false) { - // Flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true - - // Crude way to check if we should resume. - } else if (self.listenerCount('data') > 0) { - self.resume() - } else if (!state.readableListening) { - state.flowing = null - } -} -function nReadingNextTick(self) { - debug('readable nexttick read 0') - self.read(0) -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - const state = this._readableState - if (!state.flowing) { - debug('resume') - // We flow only if there is no one listening - // for readable, but we still have to call - // resume(). - state.flowing = !state.readableListening - resume(this, state) - } - state[kPaused] = false - return this -} -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true - process.nextTick(resume_, stream, state) - } -} -function resume_(stream, state) { - debug('resume', state.reading) - if (!state.reading) { - stream.read(0) - } - state.resumeScheduled = false - stream.emit('resume') - flow(stream) - if (state.flowing && !state.reading) stream.read(0) -} -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing) - if (this._readableState.flowing !== false) { - debug('pause') - this._readableState.flowing = false - this.emit('pause') - } - this._readableState[kPaused] = true - return this -} -function flow(stream) { - const state = stream._readableState - debug('flow', state.flowing) - while (state.flowing && stream.read() !== null); -} - -// Wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - let paused = false - - // TODO (ronag): Should this.destroy(err) emit - // 'error' on the wrapped stream? Would require - // a static factory method, e.g. Readable.wrap(stream). - - stream.on('data', (chunk) => { - if (!this.push(chunk) && stream.pause) { - paused = true - stream.pause() - } - }) - stream.on('end', () => { - this.push(null) - }) - stream.on('error', (err) => { - errorOrDestroy(this, err) - }) - stream.on('close', () => { - this.destroy() - }) - stream.on('destroy', () => { - this.destroy() - }) - this._read = () => { - if (paused && stream.resume) { - paused = false - stream.resume() - } - } - - // Proxy all the other methods. Important when wrapping filters and duplexes. - const streamKeys = ObjectKeys(stream) - for (let j = 1; j < streamKeys.length; j++) { - const i = streamKeys[j] - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = stream[i].bind(stream) - } - } - return this -} -Readable.prototype[SymbolAsyncIterator] = function () { - return streamToAsyncIterator(this) -} -Readable.prototype.iterator = function (options) { - if (options !== undefined) { - validateObject(options, 'options') - } - return streamToAsyncIterator(this, options) -} -function streamToAsyncIterator(stream, options) { - if (typeof stream.read !== 'function') { - stream = Readable.wrap(stream, { - objectMode: true - }) - } - const iter = createAsyncIterator(stream, options) - iter.stream = stream - return iter -} -async function* createAsyncIterator(stream, options) { - let callback = nop - function next(resolve) { - if (this === stream) { - callback() - callback = nop - } else { - callback = resolve - } - } - stream.on('readable', next) - let error - const cleanup = eos( - stream, - { - writable: false - }, - (err) => { - error = err ? aggregateTwoErrors(error, err) : null - callback() - callback = nop - } - ) - try { - while (true) { - const chunk = stream.destroyed ? null : stream.read() - if (chunk !== null) { - yield chunk - } else if (error) { - throw error - } else if (error === null) { - return - } else { - await new Promise(next) - } - } - } catch (err) { - error = aggregateTwoErrors(error, err) - throw error - } finally { - if ( - (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) && - (error === undefined || stream._readableState.autoDestroy) - ) { - destroyImpl.destroyer(stream, null) - } else { - stream.off('readable', next) - cleanup() - } - } -} - -// Making it explicit these properties are not enumerable -// because otherwise some prototype manipulation in -// userland will fail. -ObjectDefineProperties(Readable.prototype, { - readable: { - __proto__: null, - get() { - const r = this._readableState - // r.readable === false means that this is part of a Duplex stream - // where the readable side was disabled upon construction. - // Compat. The user might manually disable readable side through - // deprecated setter. - return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted - }, - set(val) { - // Backwards compat. - if (this._readableState) { - this._readableState.readable = !!val - } - } - }, - readableDidRead: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState.dataEmitted - } - }, - readableAborted: { - __proto__: null, - enumerable: false, - get: function () { - return !!( - this._readableState.readable !== false && - (this._readableState.destroyed || this._readableState.errored) && - !this._readableState.endEmitted - ) - } - }, - readableHighWaterMark: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState.highWaterMark - } - }, - readableBuffer: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState && this._readableState.buffer - } - }, - readableFlowing: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState.flowing - }, - set: function (state) { - if (this._readableState) { - this._readableState.flowing = state - } - } - }, - readableLength: { - __proto__: null, - enumerable: false, - get() { - return this._readableState.length - } - }, - readableObjectMode: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.objectMode : false - } - }, - readableEncoding: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.encoding : null - } - }, - errored: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.errored : null - } - }, - closed: { - __proto__: null, - get() { - return this._readableState ? this._readableState.closed : false - } - }, - destroyed: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.destroyed : false - }, - set(value) { - // We ignore the value if the stream - // has not been initialized yet. - if (!this._readableState) { - return - } - - // Backward compatibility, the user is explicitly - // managing destroyed. - this._readableState.destroyed = value - } - }, - readableEnded: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.endEmitted : false - } - } -}) -ObjectDefineProperties(ReadableState.prototype, { - // Legacy getter for `pipesCount`. - pipesCount: { - __proto__: null, - get() { - return this.pipes.length - } - }, - // Legacy property for `paused`. - paused: { - __proto__: null, - get() { - return this[kPaused] !== false - }, - set(value) { - this[kPaused] = !!value - } - } -}) - -// Exposed for testing purposes only. -Readable._fromList = fromList - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered. - if (state.length === 0) return null - let ret - if (state.objectMode) ret = state.buffer.shift() - else if (!n || n >= state.length) { - // Read it all, truncate the list. - if (state.decoder) ret = state.buffer.join('') - else if (state.buffer.length === 1) ret = state.buffer.first() - else ret = state.buffer.concat(state.length) - state.buffer.clear() - } else { - // read part of list. - ret = state.buffer.consume(n, state.decoder) - } - return ret -} -function endReadable(stream) { - const state = stream._readableState - debug('endReadable', state.endEmitted) - if (!state.endEmitted) { - state.ended = true - process.nextTick(endReadableNT, state, stream) - } -} -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length) - - // Check that we didn't get one last unshift. - if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { - state.endEmitted = true - stream.emit('end') - if (stream.writable && stream.allowHalfOpen === false) { - process.nextTick(endWritableNT, stream) - } else if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well. - const wState = stream._writableState - const autoDestroy = - !wState || - (wState.autoDestroy && - // We don't expect the writable to ever 'finish' - // if writable is explicitly set to false. - (wState.finished || wState.writable === false)) - if (autoDestroy) { - stream.destroy() - } - } - } -} -function endWritableNT(stream) { - const writable = stream.writable && !stream.writableEnded && !stream.destroyed - if (writable) { - stream.end() - } -} -Readable.from = function (iterable, opts) { - return from(Readable, iterable, opts) -} -let webStreamsAdapters - -// Lazy to avoid circular references -function lazyWebStreams() { - if (webStreamsAdapters === undefined) webStreamsAdapters = {} - return webStreamsAdapters -} -Readable.fromWeb = function (readableStream, options) { - return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options) -} -Readable.toWeb = function (streamReadable, options) { - return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options) -} -Readable.wrap = function (src, options) { - var _ref, _src$readableObjectMo - return new Readable({ - objectMode: - (_ref = - (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined - ? _src$readableObjectMo - : src.objectMode) !== null && _ref !== undefined - ? _ref - : true, - ...options, - destroy(err, callback) { - destroyImpl.destroyer(src, err) - callback(err) - } - }).wrap(src) -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js deleted file mode 100644 index 18c2d845ff0186..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -const { MathFloor, NumberIsInteger } = require('../../ours/primordials') -const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null -} -function getDefaultHighWaterMark(objectMode) { - return objectMode ? 16 : 16 * 1024 -} -function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey) - if (hwm != null) { - if (!NumberIsInteger(hwm) || hwm < 0) { - const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark' - throw new ERR_INVALID_ARG_VALUE(name, hwm) - } - return MathFloor(hwm) - } - - // Default value - return getDefaultHighWaterMark(state.objectMode) -} -module.exports = { - getHighWaterMark, - getDefaultHighWaterMark -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/transform.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/transform.js deleted file mode 100644 index fa9413a447463c..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict' - -const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials') -module.exports = Transform -const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes -const Duplex = require('./duplex') -const { getHighWaterMark } = require('./state') -ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype) -ObjectSetPrototypeOf(Transform, Duplex) -const kCallback = Symbol('kCallback') -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options) - - // TODO (ronag): This should preferably always be - // applied but would be semver-major. Or even better; - // make Transform a Readable with the Writable interface. - const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null - if (readableHighWaterMark === 0) { - // A Duplex will buffer both on the writable and readable side while - // a Transform just wants to buffer hwm number of elements. To avoid - // buffering twice we disable buffering on the writable side. - options = { - ...options, - highWaterMark: null, - readableHighWaterMark, - // TODO (ronag): 0 is not optimal since we have - // a "bug" where we check needDrain before calling _write and not after. - // Refs: https://github.com/nodejs/node/pull/32887 - // Refs: https://github.com/nodejs/node/pull/35941 - writableHighWaterMark: options.writableHighWaterMark || 0 - } - } - Duplex.call(this, options) - - // We have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false - this[kCallback] = null - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform - if (typeof options.flush === 'function') this._flush = options.flush - } - - // When the writable side finishes, then flush out anything remaining. - // Backwards compat. Some Transform streams incorrectly implement _final - // instead of or in addition to _flush. By using 'prefinish' instead of - // implementing _final we continue supporting this unfortunate use case. - this.on('prefinish', prefinish) -} -function final(cb) { - if (typeof this._flush === 'function' && !this.destroyed) { - this._flush((er, data) => { - if (er) { - if (cb) { - cb(er) - } else { - this.destroy(er) - } - return - } - if (data != null) { - this.push(data) - } - this.push(null) - if (cb) { - cb() - } - }) - } else { - this.push(null) - if (cb) { - cb() - } - } -} -function prefinish() { - if (this._final !== final) { - final.call(this) - } -} -Transform.prototype._final = final -Transform.prototype._transform = function (chunk, encoding, callback) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()') -} -Transform.prototype._write = function (chunk, encoding, callback) { - const rState = this._readableState - const wState = this._writableState - const length = rState.length - this._transform(chunk, encoding, (err, val) => { - if (err) { - callback(err) - return - } - if (val != null) { - this.push(val) - } - if ( - wState.ended || - // Backwards compat. - length === rState.length || - // Backwards compat. - rState.length < rState.highWaterMark - ) { - callback() - } else { - this[kCallback] = callback - } - }) -} -Transform.prototype._read = function () { - if (this[kCallback]) { - const callback = this[kCallback] - this[kCallback] = null - callback() - } -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/utils.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/utils.js deleted file mode 100644 index e589ad96c6924e..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/utils.js +++ /dev/null @@ -1,321 +0,0 @@ -'use strict' - -const { Symbol, SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials') -const kDestroyed = Symbol('kDestroyed') -const kIsErrored = Symbol('kIsErrored') -const kIsReadable = Symbol('kIsReadable') -const kIsDisturbed = Symbol('kIsDisturbed') -const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise') -const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction') -function isReadableNodeStream(obj, strict = false) { - var _obj$_readableState - return !!( - ( - obj && - typeof obj.pipe === 'function' && - typeof obj.on === 'function' && - (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) && - (!obj._writableState || - ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined - ? undefined - : _obj$_readableState.readable) !== false) && - // Duplex - (!obj._writableState || obj._readableState) - ) // Writable has .pipe. - ) -} - -function isWritableNodeStream(obj) { - var _obj$_writableState - return !!( - ( - obj && - typeof obj.write === 'function' && - typeof obj.on === 'function' && - (!obj._readableState || - ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined - ? undefined - : _obj$_writableState.writable) !== false) - ) // Duplex - ) -} - -function isDuplexNodeStream(obj) { - return !!( - obj && - typeof obj.pipe === 'function' && - obj._readableState && - typeof obj.on === 'function' && - typeof obj.write === 'function' - ) -} -function isNodeStream(obj) { - return ( - obj && - (obj._readableState || - obj._writableState || - (typeof obj.write === 'function' && typeof obj.on === 'function') || - (typeof obj.pipe === 'function' && typeof obj.on === 'function')) - ) -} -function isReadableStream(obj) { - return !!( - obj && - !isNodeStream(obj) && - typeof obj.pipeThrough === 'function' && - typeof obj.getReader === 'function' && - typeof obj.cancel === 'function' - ) -} -function isWritableStream(obj) { - return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function') -} -function isTransformStream(obj) { - return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object') -} -function isWebStream(obj) { - return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj) -} -function isIterable(obj, isAsync) { - if (obj == null) return false - if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function' - if (isAsync === false) return typeof obj[SymbolIterator] === 'function' - return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function' -} -function isDestroyed(stream) { - if (!isNodeStream(stream)) return null - const wState = stream._writableState - const rState = stream._readableState - const state = wState || rState - return !!(stream.destroyed || stream[kDestroyed] || (state !== null && state !== undefined && state.destroyed)) -} - -// Have been end():d. -function isWritableEnded(stream) { - if (!isWritableNodeStream(stream)) return null - if (stream.writableEnded === true) return true - const wState = stream._writableState - if (wState !== null && wState !== undefined && wState.errored) return false - if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null - return wState.ended -} - -// Have emitted 'finish'. -function isWritableFinished(stream, strict) { - if (!isWritableNodeStream(stream)) return null - if (stream.writableFinished === true) return true - const wState = stream._writableState - if (wState !== null && wState !== undefined && wState.errored) return false - if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null - return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)) -} - -// Have been push(null):d. -function isReadableEnded(stream) { - if (!isReadableNodeStream(stream)) return null - if (stream.readableEnded === true) return true - const rState = stream._readableState - if (!rState || rState.errored) return false - if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null - return rState.ended -} - -// Have emitted 'end'. -function isReadableFinished(stream, strict) { - if (!isReadableNodeStream(stream)) return null - const rState = stream._readableState - if (rState !== null && rState !== undefined && rState.errored) return false - if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null - return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)) -} -function isReadable(stream) { - if (stream && stream[kIsReadable] != null) return stream[kIsReadable] - if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null - if (isDestroyed(stream)) return false - return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream) -} -function isWritable(stream) { - if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null - if (isDestroyed(stream)) return false - return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream) -} -function isFinished(stream, opts) { - if (!isNodeStream(stream)) { - return null - } - if (isDestroyed(stream)) { - return true - } - if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) { - return false - } - if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) { - return false - } - return true -} -function isWritableErrored(stream) { - var _stream$_writableStat, _stream$_writableStat2 - if (!isNodeStream(stream)) { - return null - } - if (stream.writableErrored) { - return stream.writableErrored - } - return (_stream$_writableStat = - (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined - ? undefined - : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined - ? _stream$_writableStat - : null -} -function isReadableErrored(stream) { - var _stream$_readableStat, _stream$_readableStat2 - if (!isNodeStream(stream)) { - return null - } - if (stream.readableErrored) { - return stream.readableErrored - } - return (_stream$_readableStat = - (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined - ? undefined - : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined - ? _stream$_readableStat - : null -} -function isClosed(stream) { - if (!isNodeStream(stream)) { - return null - } - if (typeof stream.closed === 'boolean') { - return stream.closed - } - const wState = stream._writableState - const rState = stream._readableState - if ( - typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' || - typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean' - ) { - return ( - (wState === null || wState === undefined ? undefined : wState.closed) || - (rState === null || rState === undefined ? undefined : rState.closed) - ) - } - if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) { - return stream._closed - } - return null -} -function isOutgoingMessage(stream) { - return ( - typeof stream._closed === 'boolean' && - typeof stream._defaultKeepAlive === 'boolean' && - typeof stream._removedConnection === 'boolean' && - typeof stream._removedContLen === 'boolean' - ) -} -function isServerResponse(stream) { - return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream) -} -function isServerRequest(stream) { - var _stream$req - return ( - typeof stream._consuming === 'boolean' && - typeof stream._dumped === 'boolean' && - ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) === - undefined - ) -} -function willEmitClose(stream) { - if (!isNodeStream(stream)) return null - const wState = stream._writableState - const rState = stream._readableState - const state = wState || rState - return ( - (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false) - ) -} -function isDisturbed(stream) { - var _stream$kIsDisturbed - return !!( - stream && - ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined - ? _stream$kIsDisturbed - : stream.readableDidRead || stream.readableAborted) - ) -} -function isErrored(stream) { - var _ref, - _ref2, - _ref3, - _ref4, - _ref5, - _stream$kIsErrored, - _stream$_readableStat3, - _stream$_writableStat3, - _stream$_readableStat4, - _stream$_writableStat4 - return !!( - stream && - ((_ref = - (_ref2 = - (_ref3 = - (_ref4 = - (_ref5 = - (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined - ? _stream$kIsErrored - : stream.readableErrored) !== null && _ref5 !== undefined - ? _ref5 - : stream.writableErrored) !== null && _ref4 !== undefined - ? _ref4 - : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined - ? undefined - : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined - ? _ref3 - : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined - ? undefined - : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined - ? _ref2 - : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined - ? undefined - : _stream$_readableStat4.errored) !== null && _ref !== undefined - ? _ref - : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined - ? undefined - : _stream$_writableStat4.errored) - ) -} -module.exports = { - kDestroyed, - isDisturbed, - kIsDisturbed, - isErrored, - kIsErrored, - isReadable, - kIsReadable, - kIsClosedPromise, - kControllerErrorFunction, - isClosed, - isDestroyed, - isDuplexNodeStream, - isFinished, - isIterable, - isReadableNodeStream, - isReadableStream, - isReadableEnded, - isReadableFinished, - isReadableErrored, - isNodeStream, - isWebStream, - isWritable, - isWritableNodeStream, - isWritableStream, - isWritableEnded, - isWritableFinished, - isWritableErrored, - isServerRequest, - isServerResponse, - willEmitClose, - isTransformStream -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/writable.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/writable.js deleted file mode 100644 index 8a28003465766d..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/writable.js +++ /dev/null @@ -1,817 +0,0 @@ -/* replacement start */ - -const process = require('process/') - -/* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -;('use strict') -const { - ArrayPrototypeSlice, - Error, - FunctionPrototypeSymbolHasInstance, - ObjectDefineProperty, - ObjectDefineProperties, - ObjectSetPrototypeOf, - StringPrototypeToLowerCase, - Symbol, - SymbolHasInstance -} = require('../../ours/primordials') -module.exports = Writable -Writable.WritableState = WritableState -const { EventEmitter: EE } = require('events') -const Stream = require('./legacy').Stream -const { Buffer } = require('buffer') -const destroyImpl = require('./destroy') -const { addAbortSignal } = require('./add-abort-signal') -const { getHighWaterMark, getDefaultHighWaterMark } = require('./state') -const { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED, - ERR_STREAM_ALREADY_FINISHED, - ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING -} = require('../../ours/errors').codes -const { errorOrDestroy } = destroyImpl -ObjectSetPrototypeOf(Writable.prototype, Stream.prototype) -ObjectSetPrototypeOf(Writable, Stream) -function nop() {} -const kOnFinished = Symbol('kOnFinished') -function WritableState(options, stream, isDuplex) { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') - - // Object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!(options && options.objectMode) - if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode) - - // The point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write(). - this.highWaterMark = options - ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) - : getDefaultHighWaterMark(false) - - // if _final has been called. - this.finalCalled = false - - // drain event flag. - this.needDrain = false - // At the start of calling end() - this.ending = false - // When end() has been called, and returned. - this.ended = false - // When 'finish' is emitted. - this.finished = false - - // Has it been destroyed - this.destroyed = false - - // Should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - const noDecode = !!(options && options.decodeStrings === false) - this.decodeStrings = !noDecode - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' - - // Not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0 - - // A flag to see when we're in the middle of a write. - this.writing = false - - // When true all writes will be buffered until .uncork() call. - this.corked = 0 - - // A flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true - - // A flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false - - // The callback that's passed to _write(chunk, cb). - this.onwrite = onwrite.bind(undefined, stream) - - // The callback that the user supplies to write(chunk, encoding, cb). - this.writecb = null - - // The amount that is being written when _write is called. - this.writelen = 0 - - // Storage for data passed to the afterWrite() callback in case of - // synchronous _write() completion. - this.afterWriteTickInfo = null - resetBuffer(this) - - // Number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted. - this.pendingcb = 0 - - // Stream is still being constructed and cannot be - // destroyed until construction finished or failed. - // Async construction is opt in, therefore we start as - // constructed. - this.constructed = true - - // Emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams. - this.prefinished = false - - // True if the error was already emitted and should not be thrown again. - this.errorEmitted = false - - // Should close be emitted on destroy. Defaults to true. - this.emitClose = !options || options.emitClose !== false - - // Should .destroy() be called after 'finish' (and potentially 'end'). - this.autoDestroy = !options || options.autoDestroy !== false - - // Indicates whether the stream has errored. When true all write() calls - // should return false. This is needed since when autoDestroy - // is disabled we need a way to tell whether the stream has failed. - this.errored = null - - // Indicates whether the stream has finished destroying. - this.closed = false - - // True if close has been emitted or would have been emitted - // depending on emitClose. - this.closeEmitted = false - this[kOnFinished] = [] -} -function resetBuffer(state) { - state.buffered = [] - state.bufferedIndex = 0 - state.allBuffers = true - state.allNoop = true -} -WritableState.prototype.getBuffer = function getBuffer() { - return ArrayPrototypeSlice(this.buffered, this.bufferedIndex) -} -ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', { - __proto__: null, - get() { - return this.buffered.length - this.bufferedIndex - } -}) -function Writable(options) { - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5. - const isDuplex = this instanceof require('./duplex') - if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options) - this._writableState = new WritableState(options, this, isDuplex) - if (options) { - if (typeof options.write === 'function') this._write = options.write - if (typeof options.writev === 'function') this._writev = options.writev - if (typeof options.destroy === 'function') this._destroy = options.destroy - if (typeof options.final === 'function') this._final = options.final - if (typeof options.construct === 'function') this._construct = options.construct - if (options.signal) addAbortSignal(options.signal, this) - } - Stream.call(this, options) - destroyImpl.construct(this, () => { - const state = this._writableState - if (!state.writing) { - clearBuffer(this, state) - } - finishMaybe(this, state) - }) -} -ObjectDefineProperty(Writable, SymbolHasInstance, { - __proto__: null, - value: function (object) { - if (FunctionPrototypeSymbolHasInstance(this, object)) return true - if (this !== Writable) return false - return object && object._writableState instanceof WritableState - } -}) - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()) -} -function _write(stream, chunk, encoding, cb) { - const state = stream._writableState - if (typeof encoding === 'function') { - cb = encoding - encoding = state.defaultEncoding - } else { - if (!encoding) encoding = state.defaultEncoding - else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) - if (typeof cb !== 'function') cb = nop - } - if (chunk === null) { - throw new ERR_STREAM_NULL_VALUES() - } else if (!state.objectMode) { - if (typeof chunk === 'string') { - if (state.decodeStrings !== false) { - chunk = Buffer.from(chunk, encoding) - encoding = 'buffer' - } - } else if (chunk instanceof Buffer) { - encoding = 'buffer' - } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk) - encoding = 'buffer' - } else { - throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) - } - } - let err - if (state.ending) { - err = new ERR_STREAM_WRITE_AFTER_END() - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('write') - } - if (err) { - process.nextTick(cb, err) - errorOrDestroy(stream, err, true) - return err - } - state.pendingcb++ - return writeOrBuffer(stream, state, chunk, encoding, cb) -} -Writable.prototype.write = function (chunk, encoding, cb) { - return _write(this, chunk, encoding, cb) === true -} -Writable.prototype.cork = function () { - this._writableState.corked++ -} -Writable.prototype.uncork = function () { - const state = this._writableState - if (state.corked) { - state.corked-- - if (!state.writing) clearBuffer(this, state) - } -} -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding) - if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) - this._writableState.defaultEncoding = encoding - return this -} - -// If we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, callback) { - const len = state.objectMode ? 1 : chunk.length - state.length += len - - // stream._write resets state.length - const ret = state.length < state.highWaterMark - // We must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true - if (state.writing || state.corked || state.errored || !state.constructed) { - state.buffered.push({ - chunk, - encoding, - callback - }) - if (state.allBuffers && encoding !== 'buffer') { - state.allBuffers = false - } - if (state.allNoop && callback !== nop) { - state.allNoop = false - } - } else { - state.writelen = len - state.writecb = callback - state.writing = true - state.sync = true - stream._write(chunk, encoding, state.onwrite) - state.sync = false - } - - // Return false if errored or destroyed in order to break - // any synchronous while(stream.write(data)) loops. - return ret && !state.errored && !state.destroyed -} -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len - state.writecb = cb - state.writing = true - state.sync = true - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write')) - else if (writev) stream._writev(chunk, state.onwrite) - else stream._write(chunk, encoding, state.onwrite) - state.sync = false -} -function onwriteError(stream, state, er, cb) { - --state.pendingcb - cb(er) - // Ensure callbacks are invoked even when autoDestroy is - // not enabled. Passing `er` here doesn't make sense since - // it's related to one specific write, not to the buffered - // writes. - errorBuffer(state) - // This can emit error, but error must always follow cb. - errorOrDestroy(stream, er) -} -function onwrite(stream, er) { - const state = stream._writableState - const sync = state.sync - const cb = state.writecb - if (typeof cb !== 'function') { - errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()) - return - } - state.writing = false - state.writecb = null - state.length -= state.writelen - state.writelen = 0 - if (er) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - er.stack // eslint-disable-line no-unused-expressions - - if (!state.errored) { - state.errored = er - } - - // In case of duplex streams we need to notify the readable side of the - // error. - if (stream._readableState && !stream._readableState.errored) { - stream._readableState.errored = er - } - if (sync) { - process.nextTick(onwriteError, stream, state, er, cb) - } else { - onwriteError(stream, state, er, cb) - } - } else { - if (state.buffered.length > state.bufferedIndex) { - clearBuffer(stream, state) - } - if (sync) { - // It is a common case that the callback passed to .write() is always - // the same. In that case, we do not schedule a new nextTick(), but - // rather just increase a counter, to improve performance and avoid - // memory allocations. - if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { - state.afterWriteTickInfo.count++ - } else { - state.afterWriteTickInfo = { - count: 1, - cb, - stream, - state - } - process.nextTick(afterWriteTick, state.afterWriteTickInfo) - } - } else { - afterWrite(stream, state, 1, cb) - } - } -} -function afterWriteTick({ stream, state, count, cb }) { - state.afterWriteTickInfo = null - return afterWrite(stream, state, count, cb) -} -function afterWrite(stream, state, count, cb) { - const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain - if (needDrain) { - state.needDrain = false - stream.emit('drain') - } - while (count-- > 0) { - state.pendingcb-- - cb() - } - if (state.destroyed) { - errorBuffer(state) - } - finishMaybe(stream, state) -} - -// If there's something in the buffer waiting, then invoke callbacks. -function errorBuffer(state) { - if (state.writing) { - return - } - for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { - var _state$errored - const { chunk, callback } = state.buffered[n] - const len = state.objectMode ? 1 : chunk.length - state.length -= len - callback( - (_state$errored = state.errored) !== null && _state$errored !== undefined - ? _state$errored - : new ERR_STREAM_DESTROYED('write') - ) - } - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - var _state$errored2 - onfinishCallbacks[i]( - (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined - ? _state$errored2 - : new ERR_STREAM_DESTROYED('end') - ) - } - resetBuffer(state) -} - -// If there's something in the buffer waiting, then process it. -function clearBuffer(stream, state) { - if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { - return - } - const { buffered, bufferedIndex, objectMode } = state - const bufferedLength = buffered.length - bufferedIndex - if (!bufferedLength) { - return - } - let i = bufferedIndex - state.bufferProcessing = true - if (bufferedLength > 1 && stream._writev) { - state.pendingcb -= bufferedLength - 1 - const callback = state.allNoop - ? nop - : (err) => { - for (let n = i; n < buffered.length; ++n) { - buffered[n].callback(err) - } - } - // Make a copy of `buffered` if it's going to be used by `callback` above, - // since `doWrite` will mutate the array. - const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i) - chunks.allBuffers = state.allBuffers - doWrite(stream, state, true, state.length, chunks, '', callback) - resetBuffer(state) - } else { - do { - const { chunk, encoding, callback } = buffered[i] - buffered[i++] = null - const len = objectMode ? 1 : chunk.length - doWrite(stream, state, false, len, chunk, encoding, callback) - } while (i < buffered.length && !state.writing) - if (i === buffered.length) { - resetBuffer(state) - } else if (i > 256) { - buffered.splice(0, i) - state.bufferedIndex = 0 - } else { - state.bufferedIndex = i - } - } - state.bufferProcessing = false -} -Writable.prototype._write = function (chunk, encoding, cb) { - if (this._writev) { - this._writev( - [ - { - chunk, - encoding - } - ], - cb - ) - } else { - throw new ERR_METHOD_NOT_IMPLEMENTED('_write()') - } -} -Writable.prototype._writev = null -Writable.prototype.end = function (chunk, encoding, cb) { - const state = this._writableState - if (typeof chunk === 'function') { - cb = chunk - chunk = null - encoding = null - } else if (typeof encoding === 'function') { - cb = encoding - encoding = null - } - let err - if (chunk !== null && chunk !== undefined) { - const ret = _write(this, chunk, encoding) - if (ret instanceof Error) { - err = ret - } - } - - // .end() fully uncorks. - if (state.corked) { - state.corked = 1 - this.uncork() - } - if (err) { - // Do nothing... - } else if (!state.errored && !state.ending) { - // This is forgiving in terms of unnecessary calls to end() and can hide - // logic errors. However, usually such errors are harmless and causing a - // hard error can be disproportionately destructive. It is not always - // trivial for the user to determine whether end() needs to be called - // or not. - - state.ending = true - finishMaybe(this, state, true) - state.ended = true - } else if (state.finished) { - err = new ERR_STREAM_ALREADY_FINISHED('end') - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('end') - } - if (typeof cb === 'function') { - if (err || state.finished) { - process.nextTick(cb, err) - } else { - state[kOnFinished].push(cb) - } - } - return this -} -function needFinish(state) { - return ( - state.ending && - !state.destroyed && - state.constructed && - state.length === 0 && - !state.errored && - state.buffered.length === 0 && - !state.finished && - !state.writing && - !state.errorEmitted && - !state.closeEmitted - ) -} -function callFinal(stream, state) { - let called = false - function onFinish(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK()) - return - } - called = true - state.pendingcb-- - if (err) { - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](err) - } - errorOrDestroy(stream, err, state.sync) - } else if (needFinish(state)) { - state.prefinished = true - stream.emit('prefinish') - // Backwards compat. Don't check state.sync here. - // Some streams assume 'finish' will be emitted - // asynchronously relative to _final callback. - state.pendingcb++ - process.nextTick(finish, stream, state) - } - } - state.sync = true - state.pendingcb++ - try { - stream._final(onFinish) - } catch (err) { - onFinish(err) - } - state.sync = false -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.finalCalled = true - callFinal(stream, state) - } else { - state.prefinished = true - stream.emit('prefinish') - } - } -} -function finishMaybe(stream, state, sync) { - if (needFinish(state)) { - prefinish(stream, state) - if (state.pendingcb === 0) { - if (sync) { - state.pendingcb++ - process.nextTick( - (stream, state) => { - if (needFinish(state)) { - finish(stream, state) - } else { - state.pendingcb-- - } - }, - stream, - state - ) - } else if (needFinish(state)) { - state.pendingcb++ - finish(stream, state) - } - } - } -} -function finish(stream, state) { - state.pendingcb-- - state.finished = true - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i]() - } - stream.emit('finish') - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well. - const rState = stream._readableState - const autoDestroy = - !rState || - (rState.autoDestroy && - // We don't expect the readable to ever 'end' - // if readable is explicitly set to false. - (rState.endEmitted || rState.readable === false)) - if (autoDestroy) { - stream.destroy() - } - } -} -ObjectDefineProperties(Writable.prototype, { - closed: { - __proto__: null, - get() { - return this._writableState ? this._writableState.closed : false - } - }, - destroyed: { - __proto__: null, - get() { - return this._writableState ? this._writableState.destroyed : false - }, - set(value) { - // Backward compatibility, the user is explicitly managing destroyed. - if (this._writableState) { - this._writableState.destroyed = value - } - } - }, - writable: { - __proto__: null, - get() { - const w = this._writableState - // w.writable === false means that this is part of a Duplex stream - // where the writable side was disabled upon construction. - // Compat. The user might manually disable writable side through - // deprecated setter. - return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended - }, - set(val) { - // Backwards compatible. - if (this._writableState) { - this._writableState.writable = !!val - } - } - }, - writableFinished: { - __proto__: null, - get() { - return this._writableState ? this._writableState.finished : false - } - }, - writableObjectMode: { - __proto__: null, - get() { - return this._writableState ? this._writableState.objectMode : false - } - }, - writableBuffer: { - __proto__: null, - get() { - return this._writableState && this._writableState.getBuffer() - } - }, - writableEnded: { - __proto__: null, - get() { - return this._writableState ? this._writableState.ending : false - } - }, - writableNeedDrain: { - __proto__: null, - get() { - const wState = this._writableState - if (!wState) return false - return !wState.destroyed && !wState.ending && wState.needDrain - } - }, - writableHighWaterMark: { - __proto__: null, - get() { - return this._writableState && this._writableState.highWaterMark - } - }, - writableCorked: { - __proto__: null, - get() { - return this._writableState ? this._writableState.corked : 0 - } - }, - writableLength: { - __proto__: null, - get() { - return this._writableState && this._writableState.length - } - }, - errored: { - __proto__: null, - enumerable: false, - get() { - return this._writableState ? this._writableState.errored : null - } - }, - writableAborted: { - __proto__: null, - enumerable: false, - get: function () { - return !!( - this._writableState.writable !== false && - (this._writableState.destroyed || this._writableState.errored) && - !this._writableState.finished - ) - } - } -}) -const destroy = destroyImpl.destroy -Writable.prototype.destroy = function (err, cb) { - const state = this._writableState - - // Invoke pending callbacks. - if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) { - process.nextTick(errorBuffer, state) - } - destroy.call(this, err, cb) - return this -} -Writable.prototype._undestroy = destroyImpl.undestroy -Writable.prototype._destroy = function (err, cb) { - cb(err) -} -Writable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err) -} -let webStreamsAdapters - -// Lazy to avoid circular references -function lazyWebStreams() { - if (webStreamsAdapters === undefined) webStreamsAdapters = {} - return webStreamsAdapters -} -Writable.fromWeb = function (writableStream, options) { - return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options) -} -Writable.toWeb = function (streamWritable) { - return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable) -} diff --git a/deps/npm/node_modules/readable-stream/lib/internal/validators.js b/deps/npm/node_modules/readable-stream/lib/internal/validators.js deleted file mode 100644 index 85b2e9cd593d9b..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/validators.js +++ /dev/null @@ -1,510 +0,0 @@ -/* eslint jsdoc/require-jsdoc: "error" */ - -'use strict' - -const { - ArrayIsArray, - ArrayPrototypeIncludes, - ArrayPrototypeJoin, - ArrayPrototypeMap, - NumberIsInteger, - NumberIsNaN, - NumberMAX_SAFE_INTEGER, - NumberMIN_SAFE_INTEGER, - NumberParseInt, - ObjectPrototypeHasOwnProperty, - RegExpPrototypeExec, - String, - StringPrototypeToUpperCase, - StringPrototypeTrim -} = require('../ours/primordials') -const { - hideStackFrames, - codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL } -} = require('../ours/errors') -const { normalizeEncoding } = require('../ours/util') -const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types -const signals = {} - -/** - * @param {*} value - * @returns {boolean} - */ -function isInt32(value) { - return value === (value | 0) -} - -/** - * @param {*} value - * @returns {boolean} - */ -function isUint32(value) { - return value === value >>> 0 -} -const octalReg = /^[0-7]+$/ -const modeDesc = 'must be a 32-bit unsigned integer or an octal string' - -/** - * Parse and validate values that will be converted into mode_t (the S_* - * constants). Only valid numbers and octal strings are allowed. They could be - * converted to 32-bit unsigned integers or non-negative signed integers in the - * C++ land, but any value higher than 0o777 will result in platform-specific - * behaviors. - * - * @param {*} value Values to be validated - * @param {string} name Name of the argument - * @param {number} [def] If specified, will be returned for invalid values - * @returns {number} - */ -function parseFileMode(value, name, def) { - if (typeof value === 'undefined') { - value = def - } - if (typeof value === 'string') { - if (RegExpPrototypeExec(octalReg, value) === null) { - throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc) - } - value = NumberParseInt(value, 8) - } - validateUint32(value, name) - return value -} - -/** - * @callback validateInteger - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ - -/** @type {validateInteger} */ -const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => { - if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value) - if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) -}) - -/** - * @callback validateInt32 - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ - -/** @type {validateInt32} */ -const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => { - // The defaults for min and max correspond to the limits of 32-bit integers. - if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - } - if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value) - } - if (value < min || value > max) { - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) - } -}) - -/** - * @callback validateUint32 - * @param {*} value - * @param {string} name - * @param {number|boolean} [positive=false] - * @returns {asserts value is number} - */ - -/** @type {validateUint32} */ -const validateUint32 = hideStackFrames((value, name, positive = false) => { - if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - } - if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value) - } - const min = positive ? 1 : 0 - // 2 ** 32 === 4294967296 - const max = 4294967295 - if (value < min || value > max) { - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) - } -}) - -/** - * @callback validateString - * @param {*} value - * @param {string} name - * @returns {asserts value is string} - */ - -/** @type {validateString} */ -function validateString(value, name) { - if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value) -} - -/** - * @callback validateNumber - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ - -/** @type {validateNumber} */ -function validateNumber(value, name, min = undefined, max) { - if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - if ( - (min != null && value < min) || - (max != null && value > max) || - ((min != null || max != null) && NumberIsNaN(value)) - ) { - throw new ERR_OUT_OF_RANGE( - name, - `${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`, - value - ) - } -} - -/** - * @callback validateOneOf - * @template T - * @param {T} value - * @param {string} name - * @param {T[]} oneOf - */ - -/** @type {validateOneOf} */ -const validateOneOf = hideStackFrames((value, name, oneOf) => { - if (!ArrayPrototypeIncludes(oneOf, value)) { - const allowed = ArrayPrototypeJoin( - ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))), - ', ' - ) - const reason = 'must be one of: ' + allowed - throw new ERR_INVALID_ARG_VALUE(name, value, reason) - } -}) - -/** - * @callback validateBoolean - * @param {*} value - * @param {string} name - * @returns {asserts value is boolean} - */ - -/** @type {validateBoolean} */ -function validateBoolean(value, name) { - if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value) -} - -/** - * @param {any} options - * @param {string} key - * @param {boolean} defaultValue - * @returns {boolean} - */ -function getOwnPropertyValueOrDefault(options, key, defaultValue) { - return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key] -} - -/** - * @callback validateObject - * @param {*} value - * @param {string} name - * @param {{ - * allowArray?: boolean, - * allowFunction?: boolean, - * nullable?: boolean - * }} [options] - */ - -/** @type {validateObject} */ -const validateObject = hideStackFrames((value, name, options = null) => { - const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false) - const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false) - const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false) - if ( - (!nullable && value === null) || - (!allowArray && ArrayIsArray(value)) || - (typeof value !== 'object' && (!allowFunction || typeof value !== 'function')) - ) { - throw new ERR_INVALID_ARG_TYPE(name, 'Object', value) - } -}) - -/** - * @callback validateDictionary - We are using the Web IDL Standard definition - * of "dictionary" here, which means any value - * whose Type is either Undefined, Null, or - * Object (which includes functions). - * @param {*} value - * @param {string} name - * @see https://webidl.spec.whatwg.org/#es-dictionary - * @see https://tc39.es/ecma262/#table-typeof-operator-results - */ - -/** @type {validateDictionary} */ -const validateDictionary = hideStackFrames((value, name) => { - if (value != null && typeof value !== 'object' && typeof value !== 'function') { - throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value) - } -}) - -/** - * @callback validateArray - * @param {*} value - * @param {string} name - * @param {number} [minLength] - * @returns {asserts value is any[]} - */ - -/** @type {validateArray} */ -const validateArray = hideStackFrames((value, name, minLength = 0) => { - if (!ArrayIsArray(value)) { - throw new ERR_INVALID_ARG_TYPE(name, 'Array', value) - } - if (value.length < minLength) { - const reason = `must be longer than ${minLength}` - throw new ERR_INVALID_ARG_VALUE(name, value, reason) - } -}) - -/** - * @callback validateStringArray - * @param {*} value - * @param {string} name - * @returns {asserts value is string[]} - */ - -/** @type {validateStringArray} */ -function validateStringArray(value, name) { - validateArray(value, name) - for (let i = 0; i < value.length; i++) { - validateString(value[i], `${name}[${i}]`) - } -} - -/** - * @callback validateBooleanArray - * @param {*} value - * @param {string} name - * @returns {asserts value is boolean[]} - */ - -/** @type {validateBooleanArray} */ -function validateBooleanArray(value, name) { - validateArray(value, name) - for (let i = 0; i < value.length; i++) { - validateBoolean(value[i], `${name}[${i}]`) - } -} - -/** - * @param {*} signal - * @param {string} [name='signal'] - * @returns {asserts signal is keyof signals} - */ -function validateSignalName(signal, name = 'signal') { - validateString(signal, name) - if (signals[signal] === undefined) { - if (signals[StringPrototypeToUpperCase(signal)] !== undefined) { - throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)') - } - throw new ERR_UNKNOWN_SIGNAL(signal) - } -} - -/** - * @callback validateBuffer - * @param {*} buffer - * @param {string} [name='buffer'] - * @returns {asserts buffer is ArrayBufferView} - */ - -/** @type {validateBuffer} */ -const validateBuffer = hideStackFrames((buffer, name = 'buffer') => { - if (!isArrayBufferView(buffer)) { - throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer) - } -}) - -/** - * @param {string} data - * @param {string} encoding - */ -function validateEncoding(data, encoding) { - const normalizedEncoding = normalizeEncoding(encoding) - const length = data.length - if (normalizedEncoding === 'hex' && length % 2 !== 0) { - throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`) - } -} - -/** - * Check that the port number is not NaN when coerced to a number, - * is an integer and that it falls within the legal range of port numbers. - * @param {*} port - * @param {string} [name='Port'] - * @param {boolean} [allowZero=true] - * @returns {number} - */ -function validatePort(port, name = 'Port', allowZero = true) { - if ( - (typeof port !== 'number' && typeof port !== 'string') || - (typeof port === 'string' && StringPrototypeTrim(port).length === 0) || - +port !== +port >>> 0 || - port > 0xffff || - (port === 0 && !allowZero) - ) { - throw new ERR_SOCKET_BAD_PORT(name, port, allowZero) - } - return port | 0 -} - -/** - * @callback validateAbortSignal - * @param {*} signal - * @param {string} name - */ - -/** @type {validateAbortSignal} */ -const validateAbortSignal = hideStackFrames((signal, name) => { - if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) - } -}) - -/** - * @callback validateFunction - * @param {*} value - * @param {string} name - * @returns {asserts value is Function} - */ - -/** @type {validateFunction} */ -const validateFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) -}) - -/** - * @callback validatePlainFunction - * @param {*} value - * @param {string} name - * @returns {asserts value is Function} - */ - -/** @type {validatePlainFunction} */ -const validatePlainFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) -}) - -/** - * @callback validateUndefined - * @param {*} value - * @param {string} name - * @returns {asserts value is undefined} - */ - -/** @type {validateUndefined} */ -const validateUndefined = hideStackFrames((value, name) => { - if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value) -}) - -/** - * @template T - * @param {T} value - * @param {string} name - * @param {T[]} union - */ -function validateUnion(value, name, union) { - if (!ArrayPrototypeIncludes(union, value)) { - throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value) - } -} - -/* - The rules for the Link header field are described here: - https://www.rfc-editor.org/rfc/rfc8288.html#section-3 - - This regex validates any string surrounded by angle brackets - (not necessarily a valid URI reference) followed by zero or more - link-params separated by semicolons. -*/ -const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/ - -/** - * @param {any} value - * @param {string} name - */ -function validateLinkHeaderFormat(value, name) { - if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) { - throw new ERR_INVALID_ARG_VALUE( - name, - value, - 'must be an array or string of format "; rel=preload; as=style"' - ) - } -} - -/** - * @param {any} hints - * @return {string} - */ -function validateLinkHeaderValue(hints) { - if (typeof hints === 'string') { - validateLinkHeaderFormat(hints, 'hints') - return hints - } else if (ArrayIsArray(hints)) { - const hintsLength = hints.length - let result = '' - if (hintsLength === 0) { - return result - } - for (let i = 0; i < hintsLength; i++) { - const link = hints[i] - validateLinkHeaderFormat(link, 'hints') - result += link - if (i !== hintsLength - 1) { - result += ', ' - } - } - return result - } - throw new ERR_INVALID_ARG_VALUE( - 'hints', - hints, - 'must be an array or string of format "; rel=preload; as=style"' - ) -} -module.exports = { - isInt32, - isUint32, - parseFileMode, - validateArray, - validateStringArray, - validateBooleanArray, - validateBoolean, - validateBuffer, - validateDictionary, - validateEncoding, - validateFunction, - validateInt32, - validateInteger, - validateNumber, - validateObject, - validateOneOf, - validatePlainFunction, - validatePort, - validateSignalName, - validateString, - validateUint32, - validateUndefined, - validateUnion, - validateAbortSignal, - validateLinkHeaderValue -} diff --git a/deps/npm/node_modules/readable-stream/lib/ours/browser.js b/deps/npm/node_modules/readable-stream/lib/ours/browser.js deleted file mode 100644 index 39acef3d7d9f69..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/ours/browser.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const CustomStream = require('../stream') -const promises = require('../stream/promises') -const originalDestroy = CustomStream.Readable.destroy -module.exports = CustomStream.Readable - -// Explicit export naming is needed for ESM -module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer -module.exports._isUint8Array = CustomStream._isUint8Array -module.exports.isDisturbed = CustomStream.isDisturbed -module.exports.isErrored = CustomStream.isErrored -module.exports.isReadable = CustomStream.isReadable -module.exports.Readable = CustomStream.Readable -module.exports.Writable = CustomStream.Writable -module.exports.Duplex = CustomStream.Duplex -module.exports.Transform = CustomStream.Transform -module.exports.PassThrough = CustomStream.PassThrough -module.exports.addAbortSignal = CustomStream.addAbortSignal -module.exports.finished = CustomStream.finished -module.exports.destroy = CustomStream.destroy -module.exports.destroy = originalDestroy -module.exports.pipeline = CustomStream.pipeline -module.exports.compose = CustomStream.compose -Object.defineProperty(CustomStream, 'promises', { - configurable: true, - enumerable: true, - get() { - return promises - } -}) -module.exports.Stream = CustomStream.Stream - -// Allow default importing -module.exports.default = module.exports diff --git a/deps/npm/node_modules/readable-stream/lib/ours/errors.js b/deps/npm/node_modules/readable-stream/lib/ours/errors.js deleted file mode 100644 index 97866d14f5351d..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/ours/errors.js +++ /dev/null @@ -1,341 +0,0 @@ -'use strict' - -const { format, inspect, AggregateError: CustomAggregateError } = require('./util') - -/* - This file is a reduced and adapted version of the main lib/internal/errors.js file defined at - - https://github.com/nodejs/node/blob/master/lib/internal/errors.js - - Don't try to replace with the original file and keep it up to date (starting from E(...) definitions) - with the upstream file. -*/ - -const AggregateError = globalThis.AggregateError || CustomAggregateError -const kIsNodeError = Symbol('kIsNodeError') -const kTypes = [ - 'string', - 'function', - 'number', - 'object', - // Accept 'Function' and 'Object' as alternative to the lower cased version. - 'Function', - 'Object', - 'boolean', - 'bigint', - 'symbol' -] -const classRegExp = /^([A-Z][a-z0-9]*)+$/ -const nodeInternalPrefix = '__node_internal_' -const codes = {} -function assert(value, message) { - if (!value) { - throw new codes.ERR_INTERNAL_ASSERTION(message) - } -} - -// Only use this for integers! Decimal numbers do not work with this function. -function addNumericalSeparator(val) { - let res = '' - let i = val.length - const start = val[0] === '-' ? 1 : 0 - for (; i >= start + 4; i -= 3) { - res = `_${val.slice(i - 3, i)}${res}` - } - return `${val.slice(0, i)}${res}` -} -function getMessage(key, msg, args) { - if (typeof msg === 'function') { - assert( - msg.length <= args.length, - // Default options do not count. - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).` - ) - return msg(...args) - } - const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length - assert( - expectedLength === args.length, - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).` - ) - if (args.length === 0) { - return msg - } - return format(msg, ...args) -} -function E(code, message, Base) { - if (!Base) { - Base = Error - } - class NodeError extends Base { - constructor(...args) { - super(getMessage(code, message, args)) - } - toString() { - return `${this.name} [${code}]: ${this.message}` - } - } - Object.defineProperties(NodeError.prototype, { - name: { - value: Base.name, - writable: true, - enumerable: false, - configurable: true - }, - toString: { - value() { - return `${this.name} [${code}]: ${this.message}` - }, - writable: true, - enumerable: false, - configurable: true - } - }) - NodeError.prototype.code = code - NodeError.prototype[kIsNodeError] = true - codes[code] = NodeError -} -function hideStackFrames(fn) { - // We rename the functions that will be hidden to cut off the stacktrace - // at the outermost one - const hidden = nodeInternalPrefix + fn.name - Object.defineProperty(fn, 'name', { - value: hidden - }) - return fn -} -function aggregateTwoErrors(innerError, outerError) { - if (innerError && outerError && innerError !== outerError) { - if (Array.isArray(outerError.errors)) { - // If `outerError` is already an `AggregateError`. - outerError.errors.push(innerError) - return outerError - } - const err = new AggregateError([outerError, innerError], outerError.message) - err.code = outerError.code - return err - } - return innerError || outerError -} -class AbortError extends Error { - constructor(message = 'The operation was aborted', options = undefined) { - if (options !== undefined && typeof options !== 'object') { - throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options) - } - super(message, options) - this.code = 'ABORT_ERR' - this.name = 'AbortError' - } -} -E('ERR_ASSERTION', '%s', Error) -E( - 'ERR_INVALID_ARG_TYPE', - (name, expected, actual) => { - assert(typeof name === 'string', "'name' must be a string") - if (!Array.isArray(expected)) { - expected = [expected] - } - let msg = 'The ' - if (name.endsWith(' argument')) { - // For cases like 'first argument' - msg += `${name} ` - } else { - msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} ` - } - msg += 'must be ' - const types = [] - const instances = [] - const other = [] - for (const value of expected) { - assert(typeof value === 'string', 'All expected entries have to be of type string') - if (kTypes.includes(value)) { - types.push(value.toLowerCase()) - } else if (classRegExp.test(value)) { - instances.push(value) - } else { - assert(value !== 'object', 'The value "object" should be written as "Object"') - other.push(value) - } - } - - // Special handle `object` in case other instances are allowed to outline - // the differences between each other. - if (instances.length > 0) { - const pos = types.indexOf('object') - if (pos !== -1) { - types.splice(types, pos, 1) - instances.push('Object') - } - } - if (types.length > 0) { - switch (types.length) { - case 1: - msg += `of type ${types[0]}` - break - case 2: - msg += `one of type ${types[0]} or ${types[1]}` - break - default: { - const last = types.pop() - msg += `one of type ${types.join(', ')}, or ${last}` - } - } - if (instances.length > 0 || other.length > 0) { - msg += ' or ' - } - } - if (instances.length > 0) { - switch (instances.length) { - case 1: - msg += `an instance of ${instances[0]}` - break - case 2: - msg += `an instance of ${instances[0]} or ${instances[1]}` - break - default: { - const last = instances.pop() - msg += `an instance of ${instances.join(', ')}, or ${last}` - } - } - if (other.length > 0) { - msg += ' or ' - } - } - switch (other.length) { - case 0: - break - case 1: - if (other[0].toLowerCase() !== other[0]) { - msg += 'an ' - } - msg += `${other[0]}` - break - case 2: - msg += `one of ${other[0]} or ${other[1]}` - break - default: { - const last = other.pop() - msg += `one of ${other.join(', ')}, or ${last}` - } - } - if (actual == null) { - msg += `. Received ${actual}` - } else if (typeof actual === 'function' && actual.name) { - msg += `. Received function ${actual.name}` - } else if (typeof actual === 'object') { - var _actual$constructor - if ( - (_actual$constructor = actual.constructor) !== null && - _actual$constructor !== undefined && - _actual$constructor.name - ) { - msg += `. Received an instance of ${actual.constructor.name}` - } else { - const inspected = inspect(actual, { - depth: -1 - }) - msg += `. Received ${inspected}` - } - } else { - let inspected = inspect(actual, { - colors: false - }) - if (inspected.length > 25) { - inspected = `${inspected.slice(0, 25)}...` - } - msg += `. Received type ${typeof actual} (${inspected})` - } - return msg - }, - TypeError -) -E( - 'ERR_INVALID_ARG_VALUE', - (name, value, reason = 'is invalid') => { - let inspected = inspect(value) - if (inspected.length > 128) { - inspected = inspected.slice(0, 128) + '...' - } - const type = name.includes('.') ? 'property' : 'argument' - return `The ${type} '${name}' ${reason}. Received ${inspected}` - }, - TypeError -) -E( - 'ERR_INVALID_RETURN_VALUE', - (input, name, value) => { - var _value$constructor - const type = - value !== null && - value !== undefined && - (_value$constructor = value.constructor) !== null && - _value$constructor !== undefined && - _value$constructor.name - ? `instance of ${value.constructor.name}` - : `type ${typeof value}` - return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.` - }, - TypeError -) -E( - 'ERR_MISSING_ARGS', - (...args) => { - assert(args.length > 0, 'At least one arg needs to be specified') - let msg - const len = args.length - args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ') - switch (len) { - case 1: - msg += `The ${args[0]} argument` - break - case 2: - msg += `The ${args[0]} and ${args[1]} arguments` - break - default: - { - const last = args.pop() - msg += `The ${args.join(', ')}, and ${last} arguments` - } - break - } - return `${msg} must be specified` - }, - TypeError -) -E( - 'ERR_OUT_OF_RANGE', - (str, range, input) => { - assert(range, 'Missing "range" argument') - let received - if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { - received = addNumericalSeparator(String(input)) - } else if (typeof input === 'bigint') { - received = String(input) - if (input > 2n ** 32n || input < -(2n ** 32n)) { - received = addNumericalSeparator(received) - } - received += 'n' - } else { - received = inspect(input) - } - return `The value of "${str}" is out of range. It must be ${range}. Received ${received}` - }, - RangeError -) -E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error) -E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error) -E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error) -E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error) -E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error) -E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError) -E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error) -E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error) -E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error) -E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error) -E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError) -module.exports = { - AbortError, - aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), - hideStackFrames, - codes -} diff --git a/deps/npm/node_modules/readable-stream/lib/ours/index.js b/deps/npm/node_modules/readable-stream/lib/ours/index.js deleted file mode 100644 index 6cdd2d78557677..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/ours/index.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict' - -const Stream = require('stream') -if (Stream && process.env.READABLE_STREAM === 'disable') { - const promises = Stream.promises - - // Explicit export naming is needed for ESM - module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer - module.exports._isUint8Array = Stream._isUint8Array - module.exports.isDisturbed = Stream.isDisturbed - module.exports.isErrored = Stream.isErrored - module.exports.isReadable = Stream.isReadable - module.exports.Readable = Stream.Readable - module.exports.Writable = Stream.Writable - module.exports.Duplex = Stream.Duplex - module.exports.Transform = Stream.Transform - module.exports.PassThrough = Stream.PassThrough - module.exports.addAbortSignal = Stream.addAbortSignal - module.exports.finished = Stream.finished - module.exports.destroy = Stream.destroy - module.exports.pipeline = Stream.pipeline - module.exports.compose = Stream.compose - Object.defineProperty(Stream, 'promises', { - configurable: true, - enumerable: true, - get() { - return promises - } - }) - module.exports.Stream = Stream.Stream -} else { - const CustomStream = require('../stream') - const promises = require('../stream/promises') - const originalDestroy = CustomStream.Readable.destroy - module.exports = CustomStream.Readable - - // Explicit export naming is needed for ESM - module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer - module.exports._isUint8Array = CustomStream._isUint8Array - module.exports.isDisturbed = CustomStream.isDisturbed - module.exports.isErrored = CustomStream.isErrored - module.exports.isReadable = CustomStream.isReadable - module.exports.Readable = CustomStream.Readable - module.exports.Writable = CustomStream.Writable - module.exports.Duplex = CustomStream.Duplex - module.exports.Transform = CustomStream.Transform - module.exports.PassThrough = CustomStream.PassThrough - module.exports.addAbortSignal = CustomStream.addAbortSignal - module.exports.finished = CustomStream.finished - module.exports.destroy = CustomStream.destroy - module.exports.destroy = originalDestroy - module.exports.pipeline = CustomStream.pipeline - module.exports.compose = CustomStream.compose - Object.defineProperty(CustomStream, 'promises', { - configurable: true, - enumerable: true, - get() { - return promises - } - }) - module.exports.Stream = CustomStream.Stream -} - -// Allow default importing -module.exports.default = module.exports diff --git a/deps/npm/node_modules/readable-stream/lib/ours/primordials.js b/deps/npm/node_modules/readable-stream/lib/ours/primordials.js deleted file mode 100644 index 9464cc7fea6a12..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/ours/primordials.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -/* - This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at - - https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js - - Don't try to replace with the original file and keep it up to date with the upstream file. -*/ -module.exports = { - ArrayIsArray(self) { - return Array.isArray(self) - }, - ArrayPrototypeIncludes(self, el) { - return self.includes(el) - }, - ArrayPrototypeIndexOf(self, el) { - return self.indexOf(el) - }, - ArrayPrototypeJoin(self, sep) { - return self.join(sep) - }, - ArrayPrototypeMap(self, fn) { - return self.map(fn) - }, - ArrayPrototypePop(self, el) { - return self.pop(el) - }, - ArrayPrototypePush(self, el) { - return self.push(el) - }, - ArrayPrototypeSlice(self, start, end) { - return self.slice(start, end) - }, - Error, - FunctionPrototypeCall(fn, thisArgs, ...args) { - return fn.call(thisArgs, ...args) - }, - FunctionPrototypeSymbolHasInstance(self, instance) { - return Function.prototype[Symbol.hasInstance].call(self, instance) - }, - MathFloor: Math.floor, - Number, - NumberIsInteger: Number.isInteger, - NumberIsNaN: Number.isNaN, - NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, - NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, - NumberParseInt: Number.parseInt, - ObjectDefineProperties(self, props) { - return Object.defineProperties(self, props) - }, - ObjectDefineProperty(self, name, prop) { - return Object.defineProperty(self, name, prop) - }, - ObjectGetOwnPropertyDescriptor(self, name) { - return Object.getOwnPropertyDescriptor(self, name) - }, - ObjectKeys(obj) { - return Object.keys(obj) - }, - ObjectSetPrototypeOf(target, proto) { - return Object.setPrototypeOf(target, proto) - }, - Promise, - PromisePrototypeCatch(self, fn) { - return self.catch(fn) - }, - PromisePrototypeThen(self, thenFn, catchFn) { - return self.then(thenFn, catchFn) - }, - PromiseReject(err) { - return Promise.reject(err) - }, - ReflectApply: Reflect.apply, - RegExpPrototypeTest(self, value) { - return self.test(value) - }, - SafeSet: Set, - String, - StringPrototypeSlice(self, start, end) { - return self.slice(start, end) - }, - StringPrototypeToLowerCase(self) { - return self.toLowerCase() - }, - StringPrototypeToUpperCase(self) { - return self.toUpperCase() - }, - StringPrototypeTrim(self) { - return self.trim() - }, - Symbol, - SymbolFor: Symbol.for, - SymbolAsyncIterator: Symbol.asyncIterator, - SymbolHasInstance: Symbol.hasInstance, - SymbolIterator: Symbol.iterator, - TypedArrayPrototypeSet(self, buf, len) { - return self.set(buf, len) - }, - Uint8Array -} diff --git a/deps/npm/node_modules/readable-stream/lib/ours/util.js b/deps/npm/node_modules/readable-stream/lib/ours/util.js deleted file mode 100644 index e125ce17aa83c2..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/ours/util.js +++ /dev/null @@ -1,128 +0,0 @@ -'use strict' - -const bufferModule = require('buffer') -const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor -const Blob = globalThis.Blob || bufferModule.Blob -/* eslint-disable indent */ -const isBlob = - typeof Blob !== 'undefined' - ? function isBlob(b) { - // eslint-disable-next-line indent - return b instanceof Blob - } - : function isBlob(b) { - return false - } -/* eslint-enable indent */ - -// This is a simplified version of AggregateError -class AggregateError extends Error { - constructor(errors) { - if (!Array.isArray(errors)) { - throw new TypeError(`Expected input to be an Array, got ${typeof errors}`) - } - let message = '' - for (let i = 0; i < errors.length; i++) { - message += ` ${errors[i].stack}\n` - } - super(message) - this.name = 'AggregateError' - this.errors = errors - } -} -module.exports = { - AggregateError, - kEmptyObject: Object.freeze({}), - once(callback) { - let called = false - return function (...args) { - if (called) { - return - } - called = true - callback.apply(this, args) - } - }, - createDeferredPromise: function () { - let resolve - let reject - - // eslint-disable-next-line promise/param-names - const promise = new Promise((res, rej) => { - resolve = res - reject = rej - }) - return { - promise, - resolve, - reject - } - }, - promisify(fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) - } - return resolve(...args) - }) - }) - }, - debuglog() { - return function () {} - }, - format(format, ...args) { - // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args - return format.replace(/%([sdifj])/g, function (...[_unused, type]) { - const replacement = args.shift() - if (type === 'f') { - return replacement.toFixed(6) - } else if (type === 'j') { - return JSON.stringify(replacement) - } else if (type === 's' && typeof replacement === 'object') { - const ctor = replacement.constructor !== Object ? replacement.constructor.name : '' - return `${ctor} {}`.trim() - } else { - return replacement.toString() - } - }) - }, - inspect(value) { - // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options - switch (typeof value) { - case 'string': - if (value.includes("'")) { - if (!value.includes('"')) { - return `"${value}"` - } else if (!value.includes('`') && !value.includes('${')) { - return `\`${value}\`` - } - } - return `'${value}'` - case 'number': - if (isNaN(value)) { - return 'NaN' - } else if (Object.is(value, -0)) { - return String(value) - } - return value - case 'bigint': - return `${String(value)}n` - case 'boolean': - case 'undefined': - return String(value) - case 'object': - return '{}' - } - }, - types: { - isAsyncFunction(fn) { - return fn instanceof AsyncFunction - }, - isArrayBufferView(arr) { - return ArrayBuffer.isView(arr) - } - }, - isBlob -} -module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/deps/npm/node_modules/readable-stream/lib/stream.js b/deps/npm/node_modules/readable-stream/lib/stream.js deleted file mode 100644 index e9bb6ba9080331..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/stream.js +++ /dev/null @@ -1,136 +0,0 @@ -/* replacement start */ - -const { Buffer } = require('buffer') - -/* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -;('use strict') -const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials') -const { - promisify: { custom: customPromisify } -} = require('./ours/util') -const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators') -const { - codes: { ERR_ILLEGAL_CONSTRUCTOR } -} = require('./ours/errors') -const compose = require('./internal/streams/compose') -const { pipeline } = require('./internal/streams/pipeline') -const { destroyer } = require('./internal/streams/destroy') -const eos = require('./internal/streams/end-of-stream') -const internalBuffer = {} -const promises = require('./stream/promises') -const utils = require('./internal/streams/utils') -const Stream = (module.exports = require('./internal/streams/legacy').Stream) -Stream.isDisturbed = utils.isDisturbed -Stream.isErrored = utils.isErrored -Stream.isReadable = utils.isReadable -Stream.Readable = require('./internal/streams/readable') -for (const key of ObjectKeys(streamReturningOperators)) { - const op = streamReturningOperators[key] - function fn(...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR() - } - return Stream.Readable.from(ReflectApply(op, this, args)) - } - ObjectDefineProperty(fn, 'name', { - __proto__: null, - value: op.name - }) - ObjectDefineProperty(fn, 'length', { - __proto__: null, - value: op.length - }) - ObjectDefineProperty(Stream.Readable.prototype, key, { - __proto__: null, - value: fn, - enumerable: false, - configurable: true, - writable: true - }) -} -for (const key of ObjectKeys(promiseReturningOperators)) { - const op = promiseReturningOperators[key] - function fn(...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR() - } - return ReflectApply(op, this, args) - } - ObjectDefineProperty(fn, 'name', { - __proto__: null, - value: op.name - }) - ObjectDefineProperty(fn, 'length', { - __proto__: null, - value: op.length - }) - ObjectDefineProperty(Stream.Readable.prototype, key, { - __proto__: null, - value: fn, - enumerable: false, - configurable: true, - writable: true - }) -} -Stream.Writable = require('./internal/streams/writable') -Stream.Duplex = require('./internal/streams/duplex') -Stream.Transform = require('./internal/streams/transform') -Stream.PassThrough = require('./internal/streams/passthrough') -Stream.pipeline = pipeline -const { addAbortSignal } = require('./internal/streams/add-abort-signal') -Stream.addAbortSignal = addAbortSignal -Stream.finished = eos -Stream.destroy = destroyer -Stream.compose = compose -ObjectDefineProperty(Stream, 'promises', { - __proto__: null, - configurable: true, - enumerable: true, - get() { - return promises - } -}) -ObjectDefineProperty(pipeline, customPromisify, { - __proto__: null, - enumerable: true, - get() { - return promises.pipeline - } -}) -ObjectDefineProperty(eos, customPromisify, { - __proto__: null, - enumerable: true, - get() { - return promises.finished - } -}) - -// Backwards-compat with node 0.4.x -Stream.Stream = Stream -Stream._isUint8Array = function isUint8Array(value) { - return value instanceof Uint8Array -} -Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) -} diff --git a/deps/npm/node_modules/readable-stream/lib/stream/promises.js b/deps/npm/node_modules/readable-stream/lib/stream/promises.js deleted file mode 100644 index 5d4ce15f4904b7..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/stream/promises.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict' - -const { ArrayPrototypePop, Promise } = require('../ours/primordials') -const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils') -const { pipelineImpl: pl } = require('../internal/streams/pipeline') -const { finished } = require('../internal/streams/end-of-stream') -require('../../lib/stream.js') -function pipeline(...streams) { - return new Promise((resolve, reject) => { - let signal - let end - const lastArg = streams[streams.length - 1] - if ( - lastArg && - typeof lastArg === 'object' && - !isNodeStream(lastArg) && - !isIterable(lastArg) && - !isWebStream(lastArg) - ) { - const options = ArrayPrototypePop(streams) - signal = options.signal - end = options.end - } - pl( - streams, - (err, value) => { - if (err) { - reject(err) - } else { - resolve(value) - } - }, - { - signal, - end - } - ) - }) -} -module.exports = { - finished, - pipeline -} diff --git a/deps/npm/node_modules/readable-stream/package.json b/deps/npm/node_modules/readable-stream/package.json deleted file mode 100644 index 289f3a45a634f3..00000000000000 --- a/deps/npm/node_modules/readable-stream/package.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "name": "readable-stream", - "version": "4.4.2", - "description": "Node.js Streams, a user-land copy of the stream library from Node.js", - "homepage": "https://github.com/nodejs/readable-stream", - "license": "MIT", - "licenses": [ - { - "type": "MIT", - "url": "https://choosealicense.com/licenses/mit/" - } - ], - "keywords": [ - "readable", - "stream", - "pipe" - ], - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream" - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "main": "lib/ours/index.js", - "files": [ - "lib", - "LICENSE", - "README.md" - ], - "browser": { - "util": "./lib/ours/util.js", - "./lib/ours/index.js": "./lib/ours/browser.js" - }, - "scripts": { - "build": "node build/build.mjs", - "postbuild": "prettier -w lib test", - "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:prepare": "node test/browser/runner-prepare.mjs", - "test:browsers": "node test/browser/runner-browser.mjs", - "test:bundlers": "node test/browser/runner-node.mjs", - "test:readable-stream-only": "node readable-stream-test/runner-prepare.mjs", - "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "format": "prettier -w src lib test", - "lint": "eslint src" - }, - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "devDependencies": { - "@babel/core": "^7.17.10", - "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", - "@babel/plugin-proposal-optional-chaining": "^7.16.7", - "@rollup/plugin-commonjs": "^22.0.0", - "@rollup/plugin-inject": "^4.0.4", - "@rollup/plugin-node-resolve": "^13.3.0", - "@sinonjs/fake-timers": "^9.1.2", - "browserify": "^17.0.0", - "c8": "^7.11.2", - "esbuild": "^0.14.39", - "esbuild-plugin-alias": "^0.2.1", - "eslint": "^8.15.0", - "eslint-config-standard": "^17.0.0", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-n": "^15.2.0", - "eslint-plugin-promise": "^6.0.0", - "playwright": "^1.21.1", - "prettier": "^2.6.2", - "rollup": "^2.72.1", - "rollup-plugin-polyfill-node": "^0.9.0", - "tap": "^16.2.0", - "tap-mocha-reporter": "^5.0.3", - "tape": "^5.5.3", - "tar": "^6.1.11", - "undici": "^5.1.1", - "webpack": "^5.72.1", - "webpack-cli": "^4.9.2" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } -} diff --git a/deps/npm/node_modules/safe-buffer/LICENSE b/deps/npm/node_modules/safe-buffer/LICENSE deleted file mode 100644 index 0c068ceecbd48f..00000000000000 --- a/deps/npm/node_modules/safe-buffer/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/npm/node_modules/safe-buffer/index.js b/deps/npm/node_modules/safe-buffer/index.js deleted file mode 100644 index f8d3ec98852f44..00000000000000 --- a/deps/npm/node_modules/safe-buffer/index.js +++ /dev/null @@ -1,65 +0,0 @@ -/*! safe-buffer. MIT License. Feross Aboukhadijeh */ -/* eslint-disable node/no-deprecated-api */ -var buffer = require('buffer') -var Buffer = buffer.Buffer - -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} - -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} - -SafeBuffer.prototype = Object.create(Buffer.prototype) - -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) - -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) -} - -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) - } - } else { - buf.fill(0) - } - return buf -} - -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} - -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return buffer.SlowBuffer(size) -} diff --git a/deps/npm/node_modules/safe-buffer/package.json b/deps/npm/node_modules/safe-buffer/package.json deleted file mode 100644 index f2869e256477a9..00000000000000 --- a/deps/npm/node_modules/safe-buffer/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "safe-buffer", - "description": "Safer Node.js Buffer API", - "version": "5.2.1", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/safe-buffer/issues" - }, - "devDependencies": { - "standard": "*", - "tape": "^5.0.0" - }, - "homepage": "https://github.com/feross/safe-buffer", - "keywords": [ - "buffer", - "buffer allocate", - "node security", - "safe", - "safe-buffer", - "security", - "uninitialized" - ], - "license": "MIT", - "main": "index.js", - "types": "index.d.ts", - "repository": { - "type": "git", - "url": "git://github.com/feross/safe-buffer.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/deps/npm/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/string_decoder/LICENSE deleted file mode 100644 index 778edb20730ef4..00000000000000 --- a/deps/npm/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,48 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - diff --git a/deps/npm/node_modules/string_decoder/lib/string_decoder.js b/deps/npm/node_modules/string_decoder/lib/string_decoder.js deleted file mode 100644 index 2e89e63f7933e4..00000000000000 --- a/deps/npm/node_modules/string_decoder/lib/string_decoder.js +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - -/**/ - -var Buffer = require('safe-buffer').Buffer; -/**/ - -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; - -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } - } -}; - -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.StringDecoder = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} - -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; - -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; - -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; - -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} - -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; - } - return 0; -} - -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; - } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } - } - } -} - -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; -} - -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} - -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} - -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} - -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); - } - return r; -} - -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString('base64', i, buf.length - n); -} - -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} - -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} - -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} \ No newline at end of file diff --git a/deps/npm/node_modules/string_decoder/package.json b/deps/npm/node_modules/string_decoder/package.json deleted file mode 100644 index b2bb141160cad3..00000000000000 --- a/deps/npm/node_modules/string_decoder/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "string_decoder", - "version": "1.3.0", - "description": "The string_decoder module from Node core", - "main": "lib/string_decoder.js", - "files": [ - "lib" - ], - "dependencies": { - "safe-buffer": "~5.2.0" - }, - "devDependencies": { - "babel-polyfill": "^6.23.0", - "core-util-is": "^1.0.2", - "inherits": "^2.0.3", - "tap": "~0.4.8" - }, - "scripts": { - "test": "tap test/parallel/*.js && node test/verify-dependencies", - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/string_decoder.git" - }, - "homepage": "https://github.com/nodejs/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT" -} diff --git a/deps/npm/package.json b/deps/npm/package.json index a946e38493e0e4..7f222796a8a2f8 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "10.2.4", + "version": "10.3.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -57,12 +57,12 @@ "@npmcli/fs": "^3.1.0", "@npmcli/map-workspaces": "^3.0.4", "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^7.0.2", + "@npmcli/promise-spawn": "^7.0.1", + "@npmcli/run-script": "^7.0.3", "@sigstore/tuf": "^2.2.0", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^18.0.0", + "cacache": "^18.0.2", "chalk": "^5.3.0", "ci-info": "^4.0.0", "cli-columns": "^4.0.0", @@ -76,7 +76,7 @@ "ini": "^4.1.1", "init-package-json": "^6.0.0", "is-cidr": "^5.0.3", - "json-parse-even-better-errors": "^3.0.0", + "json-parse-even-better-errors": "^3.0.1", "libnpmaccess": "^8.0.1", "libnpmdiff": "^6.0.3", "libnpmexec": "^7.0.4", @@ -105,7 +105,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^17.0.4", + "pacote": "^17.0.5", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", @@ -199,23 +199,23 @@ "devDependencies": { "@npmcli/docs": "^1.0.0", "@npmcli/eslint-config": "^4.0.2", - "@npmcli/git": "^5.0.3", + "@npmcli/git": "^5.0.4", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/template-oss": "4.21.3", "@tufjs/repo-mock": "^2.0.0", "ajv": "^8.12.0", "ajv-formats": "^2.1.1", "ajv-formats-draft2019": "^1.6.1", "diff": "^5.1.0", "licensee": "^10.0.0", - "nock": "^13.3.8", - "npm-packlist": "^8.0.0", + "nock": "^13.4.0", + "npm-packlist": "^8.0.2", "remark": "^14.0.2", "remark-gfm": "^3.0.1", "remark-github": "^11.2.4", "spawk": "^1.7.1", - "tap": "^16.3.8" + "tap": "^16.3.9" }, "scripts": { "dependencies": "node scripts/bundle-and-gitignore-deps.js && node scripts/dependency-graph.js", @@ -227,7 +227,7 @@ "snap": "tap", "prepack": "node . run build -w docs", "posttest": "node . run lint", - "lint": "eslint \"**/*.js\"", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "node . run lint -- --fix", "lint-all": "node . run lint -ws -iwr --if-present", "resetdeps": "node scripts/resetdeps.js", @@ -258,7 +258,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.21.3", "content": "./scripts/template-oss/root.js" }, "license": "Artistic-2.0", diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs index b210601e9118b9..593de43c2f0193 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs @@ -35,6 +35,7 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna "commit-hooks": true, "cpu": null, "os": null, + "libc": null, "depth": null, "description": true, "dev": false, @@ -245,6 +246,7 @@ json = false key = null legacy-bundling = false legacy-peer-deps = false +libc = null link = false local-address = null location = "user" diff --git a/deps/npm/tap-snapshots/test/lib/commands/pack.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/pack.js.test.cjs index 318b017e16e864..74b472a7e4bb64 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/pack.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/pack.js.test.cjs @@ -16,10 +16,10 @@ Array [ name: test-package version: 1.0.0 filename: test-package-1.0.0.tgz - package size: 136 B + package size: {size} unpacked size: 41 B - shasum: a92a0679a70a450f14f98a468756948a679e4107 - integrity: sha512-Gka9ZV/Bryxky[...]LgMJ+0F+FhXMA== + shasum: {sha} + integrity: {integrity} total files: 1 ), "", @@ -41,14 +41,14 @@ Array [ Object { "mode": 420, "path": "package.json", - "size": 41, + "size": "{size}", }, ], "id": "test-package@1.0.0", - "integrity": "sha512-Gka9ZV/BryxkypfvMpTvLfaJE1AUi7PK1EAbYqnVzqtucf6QvUK4CFsLVzagY1GwZVx2T1jwWLgMJ+0F+FhXMA==", + "integrity": "{integrity}", "name": "test-package", - "shasum": "a92a0679a70a450f14f98a468756948a679e4107", - "size": 136, + "shasum": "{sha}", + "size": "{size}", "unpackedSize": 41, "version": "1.0.0", }, @@ -71,14 +71,14 @@ Array [ Object { "mode": 420, "path": "package.json", - "size": 50, + "size": "{size}", }, ], "id": "@myscope/test-package@1.0.0", - "integrity": "sha512-bUu8iTm2E5DZMrwKeyx963K6ViEmaFocXh75EujgI+FHSaJeqvObcdk1KFwdx8CbOgsfNHEvWNQw/bONAJsoNw==", + "integrity": "{integrity}", "name": "@myscope/test-package", - "shasum": "7e6eb2e1ca46bed6b8fa8e144e0fcd1b22fe2d98", - "size": 145, + "shasum": "{sha}", + "size": "{size}", "unpackedSize": 50, "version": "1.0.0", }, @@ -97,10 +97,10 @@ Array [ name: test-package version: 1.0.0 filename: test-package-1.0.0.tgz - package size: 136 B + package size: {size} unpacked size: 41 B - shasum: a92a0679a70a450f14f98a468756948a679e4107 - integrity: sha512-Gka9ZV/Bryxky[...]LgMJ+0F+FhXMA== + shasum: {sha} + integrity: {integrity} total files: 1 ), "", diff --git a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs index 0055c59c3641cf..597bf236f46b21 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs @@ -38,10 +38,10 @@ Array [ name: test-package version: 1.0.0 filename: test-package-1.0.0.tgz - package size: 160 B + package size: {size} unpacked size: 87 B - shasum:{sha} - integrity:{sha} + shasum: {sha} + integrity: {integrity} total files: 1 ), ], @@ -82,15 +82,15 @@ exports[`test/lib/commands/publish.js TAP json > new package json 1`] = ` "id": "test-package@1.0.0", "name": "test-package", "version": "1.0.0", - "size": 160, + "size": "{size}", "unpackedSize": 87, "shasum": "{sha}", - "integrity": "{sha}", + "integrity": "{integrity}", "filename": "test-package-1.0.0.tgz", "files": [ { "path": "package.json", - "size": 87, + "size": "{size}", "mode": 420 } ], @@ -289,10 +289,10 @@ Array [ name: @npm/test-package version: 1.0.0 filename: npm-test-package-1.0.0.tgz - package size: 147 B + package size: {size} unpacked size: 55 B - shasum:{sha} - integrity:{sha} + shasum: {sha} + integrity: {integrity} total files: 1 ), ], @@ -344,10 +344,10 @@ Array [ name: @npm/test-package version: 1.0.0 filename: npm-test-package-1.0.0.tgz - package size: 147 B + package size: {size} unpacked size: 55 B - shasum:{sha} - integrity:{sha} + shasum: {sha} + integrity: {integrity} total files: 1 ), ], @@ -398,10 +398,10 @@ Array [ name: test-tar-package version: 1.0.0 filename: test-tar-package-1.0.0.tgz - package size: 218 B + package size: {size} unpacked size: 124 B - shasum:{sha} - integrity:{sha} + shasum: {sha} + integrity: {integrity} total files: 2 ), ], @@ -550,15 +550,15 @@ exports[`test/lib/commands/publish.js TAP workspaces json > all workspaces in js "id": "workspace-a@1.2.3-a", "name": "workspace-a", "version": "1.2.3-a", - "size": 162, + "size": "{size}", "unpackedSize": 82, "shasum": "{sha}", - "integrity": "{sha}", + "integrity": "{integrity}", "filename": "workspace-a-1.2.3-a.tgz", "files": [ { "path": "package.json", - "size": 82, + "size": "{size}", "mode": 420 } ], @@ -569,15 +569,15 @@ exports[`test/lib/commands/publish.js TAP workspaces json > all workspaces in js "id": "workspace-b@1.2.3-n", "name": "workspace-b", "version": "1.2.3-n", - "size": 171, + "size": "{size}", "unpackedSize": 92, "shasum": "{sha}", - "integrity": "{sha}", + "integrity": "{integrity}", "filename": "workspace-b-1.2.3-n.tgz", "files": [ { "path": "package.json", - "size": 92, + "size": "{size}", "mode": 420 } ], @@ -588,15 +588,15 @@ exports[`test/lib/commands/publish.js TAP workspaces json > all workspaces in js "id": "workspace-n@1.2.3-n", "name": "workspace-n", "version": "1.2.3-n", - "size": 140, + "size": "{size}", "unpackedSize": 42, "shasum": "{sha}", - "integrity": "{sha}", + "integrity": "{integrity}", "filename": "workspace-n-1.2.3-n.tgz", "files": [ { "path": "package.json", - "size": 42, + "size": "{size}", "mode": 420 } ], diff --git a/deps/npm/tap-snapshots/test/lib/commands/sbom.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/sbom.js.test.cjs index 0079832f7427fb..826cf074e6038f 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/sbom.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/sbom.js.test.cjs @@ -82,14 +82,14 @@ exports[`test/lib/commands/sbom.js TAP sbom --omit dev > must match snapshot 1`] "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-sbom-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-sbom-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dog-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dog-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -155,9 +155,9 @@ exports[`test/lib/commands/sbom.js TAP sbom --omit optional > must match snapsho "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-sbom-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-sbom-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -223,9 +223,9 @@ exports[`test/lib/commands/sbom.js TAP sbom --omit peer > must match snapshot 1` "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-sbom-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-sbom-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -435,19 +435,19 @@ exports[`test/lib/commands/sbom.js TAP sbom basic sbom - spdx > must match snaps "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-sbom-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-sbom-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-test-npm-sbom-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-sbom-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dog-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dog-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -547,18 +547,18 @@ exports[`test/lib/commands/sbom.js TAP sbom extraneous dep > must match snapshot "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dog-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dog-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", "relationshipType": "OPTIONAL_DEPENDENCY_OF" } ] @@ -710,39 +710,39 @@ exports[`test/lib/commands/sbom.js TAP sbom loading a tree containing workspaces "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-a-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-d-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-c-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-c-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-d-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-baz-1.0.0", + "spdxElementId": "SPDXRef-Package-baz-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", "relationshipType": "DEV_DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-d-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.1.1", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.1.1", + "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.1.1", - "relatedSpdxElement": "SPDXRef-Package-bar-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-bar-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.1.1", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -825,14 +825,14 @@ exports[`test/lib/commands/sbom.js TAP sbom loading a tree containing workspaces "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-e-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-e-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-f-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-f-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -1051,59 +1051,59 @@ exports[`test/lib/commands/sbom.js TAP sbom loading a tree containing workspaces "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-a-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-b-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-b-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-d-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-e-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-e-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-f-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-f-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-pacote-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-pacote-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-c-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-c-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-d-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-a-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-baz-1.0.0", + "spdxElementId": "SPDXRef-Package-baz-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-a-1.0.0", "relationshipType": "DEV_DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-d-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.1.1", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.1.1", + "relatedSpdxElement": "SPDXRef-Package-d-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.1.1", - "relatedSpdxElement": "SPDXRef-Package-bar-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-bar-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.1.1", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -1169,9 +1169,9 @@ exports[`test/lib/commands/sbom.js TAP sbom loading a tree containing workspaces "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-workspaces-tree-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-pacote-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-pacote-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-workspaces-tree-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -1275,19 +1275,19 @@ exports[`test/lib/commands/sbom.js TAP sbom lock file only > must match snapshot "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dog-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dog-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } @@ -1387,19 +1387,19 @@ exports[`test/lib/commands/sbom.js TAP sbom missing (optional) dep > must match "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-foo-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-test-npm-ls-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-chai-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-chai-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-test-npm-ls-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-foo-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dog-1.0.0", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dog-1.0.0", + "relatedSpdxElement": "SPDXRef-Package-foo-1.0.0", + "relationshipType": "DEPENDENCY_OF" } ] } diff --git a/deps/npm/tap-snapshots/test/lib/commands/view.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/view.js.test.cjs index 5248d439afad95..1c37e26db7f824 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/view.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/view.js.test.cjs @@ -12,23 +12,23 @@ green is a very important color DEPRECATED!! - true -keywords:,colors, green, crayola +keywords: colors, green, crayola -bin:,green +bin: green dist -.tarball:,http://hm.green.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 GB +.tarball: http://hm.green.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 GB dependencies: red: 1.0.0 yellow: 1.0.0 maintainers: --,claudia <c@yellow.com> --,isaacs <i@yellow.com> +- claudia <c@yellow.com> +- isaacs <i@yellow.com> dist-tags: latest: 1.0.0 @@ -41,23 +41,23 @@ green is a very important color DEPRECATED ⚠️ - true -keywords:,colors, green, crayola +keywords: colors, green, crayola -bin:,green +bin: green dist -.tarball:,http://hm.green.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 GB +.tarball: http://hm.green.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 GB dependencies: red: 1.0.0 yellow: 1.0.0 maintainers: --,claudia <c@yellow.com> --,isaacs <i@yellow.com> +- claudia <c@yellow.com> +- isaacs <i@yellow.com> dist-tags: latest: 1.0.0 @@ -70,23 +70,23 @@ green is a very important color DEPRECATED!! - true -keywords:,colors, green, crayola +keywords: colors, green, crayola -bin:,green +bin: green dist -.tarball:,http://hm.green.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 GB +.tarball: http://hm.green.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 GB dependencies: red: 1.0.0 yellow: 1.0.0 maintainers: --,claudia <c@yellow.com> --,isaacs <i@yellow.com> +- claudia <c@yellow.com> +- isaacs <i@yellow.com> dist-tags: latest: 1.0.0 @@ -97,10 +97,10 @@ exports[`test/lib/commands/view.js TAP package in cwd directory > must match sna blue@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.blue.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -113,10 +113,10 @@ exports[`test/lib/commands/view.js TAP package in cwd non-specific version > mus blue@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.blue.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -129,10 +129,10 @@ exports[`test/lib/commands/view.js TAP package in cwd specific version > must ma blue@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.blue.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -181,10 +181,10 @@ exports[`test/lib/commands/view.js TAP package with homepage > must match snapsh http://hm.orange.com dist -.tarball:,http://hm.orange.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.orange.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -195,10 +195,10 @@ exports[`test/lib/commands/view.js TAP package with maintainers info as object > pink@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.pink.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.pink.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -209,10 +209,10 @@ exports[`test/lib/commands/view.js TAP package with more than 25 deps > must mat black@1.0.0 | Proprietary | deps: 25 | versions: 2 dist -.tarball:,http://hm.black.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.black.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dependencies: 0: 1.0.0 @@ -250,10 +250,10 @@ exports[`test/lib/commands/view.js TAP package with no modified time > must matc cyan@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.cyan.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 MB +.tarball: http://hm.cyan.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 MB dist-tags: latest: 1.0.0 @@ -266,10 +266,10 @@ exports[`test/lib/commands/view.js TAP package with no repo or homepage > must m blue@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.blue.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -282,10 +282,10 @@ exports[`test/lib/commands/view.js TAP package with semver range > must match sn blue@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.blue.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -295,10 +295,10 @@ published {TIME} ago blue@1.0.1 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.blue.com/1.0.1.tgz -.shasum:,124 -.integrity:,--- -.unpackedSize:,1.0 kB +.tarball: http://hm.blue.com/1.0.1.tgz +.shasum: 124 +.integrity: --- +.unpackedSize: 1.0 kB dist-tags: latest: 1.0.0 @@ -436,23 +436,23 @@ green is a very important color DEPRECATED!! - true -keywords:,colors, green, crayola +keywords: colors, green, crayola -bin:,green +bin: green dist -.tarball:,http://hm.green.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 GB +.tarball: http://hm.green.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 GB dependencies: red: 1.0.0 yellow: 1.0.0 maintainers: --,claudia <c@yellow.com> --,isaacs <i@yellow.com> +- claudia <c@yellow.com> +- isaacs <i@yellow.com> dist-tags: latest: 1.0.0 @@ -461,10 +461,10 @@ dist-tags: http://hm.orange.com dist -.tarball:,http://hm.orange.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.orange.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 @@ -500,23 +500,23 @@ green is a very important color DEPRECATED!! - true -keywords:,colors, green, crayola +keywords: colors, green, crayola -bin:,green +bin: green dist -.tarball:,http://hm.green.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1.0 GB +.tarball: http://hm.green.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1.0 GB dependencies: red: 1.0.0 yellow: 1.0.0 maintainers: --,claudia <c@yellow.com> --,isaacs <i@yellow.com> +- claudia <c@yellow.com> +- isaacs <i@yellow.com> dist-tags: latest: 1.0.0 @@ -527,10 +527,10 @@ exports[`test/lib/commands/view.js TAP workspaces remote package name > must mat pink@1.0.0 | Proprietary | deps: none | versions: 2 dist -.tarball:,http://hm.pink.com/1.0.0.tgz -.shasum:,123 -.integrity:,--- -.unpackedSize:,1 B +.tarball: http://hm.pink.com/1.0.0.tgz +.shasum: 123 +.integrity: --- +.unpackedSize: 1 B dist-tags: latest: 1.0.0 diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs index 55397121bc0da8..ba1fe703d39d5c 100644 --- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs @@ -903,6 +903,16 @@ Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the +#### \`libc\` + +* Default: null +* Type: null or String + +Override libc of native modules to install. Acceptable values are same as +\`libc\` field of package.json + + + #### \`link\` * Default: false @@ -1421,7 +1431,7 @@ SBOM format to use when generating SBOMs. * Type: "library", "application", or "framework" The type of package described by the generated SBOM. For SPDX, this is the -value for the \`primaryPackagePurpose\` fieled. For CycloneDX, this is the +value for the \`primaryPackagePurpose\` field. For CycloneDX, this is the value for the \`type\` field. @@ -2049,6 +2059,7 @@ Array [ "commit-hooks", "cpu", "os", + "libc", "depth", "description", "dev", @@ -2206,6 +2217,7 @@ Array [ "commit-hooks", "cpu", "os", + "libc", "depth", "description", "dev", @@ -2395,6 +2407,7 @@ Object { "json": false, "key": null, "legacyPeerDeps": false, + "libc": null, "localAddress": null, "location": "user", "lockfileVersion": null, @@ -3240,7 +3253,7 @@ Options: [--include [--include ...]] [--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--package-lock-only] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] -[--no-fund] [--dry-run] [--cpu ] [--os ] +[--no-fund] [--dry-run] [--cpu ] [--os ] [--libc ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] [--include-workspace-root] [--install-links] @@ -3274,6 +3287,7 @@ aliases: add, i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall #### \`dry-run\` #### \`cpu\` #### \`os\` +#### \`libc\` #### \`workspace\` #### \`workspaces\` #### \`include-workspace-root\` @@ -3337,7 +3351,7 @@ Options: [--include [--include ...]] [--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--package-lock-only] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] -[--no-fund] [--dry-run] [--cpu ] [--os ] +[--no-fund] [--dry-run] [--cpu ] [--os ] [--libc ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] [--include-workspace-root] [--install-links] @@ -3371,6 +3385,7 @@ alias: it #### \`dry-run\` #### \`cpu\` #### \`os\` +#### \`libc\` #### \`workspace\` #### \`workspaces\` #### \`include-workspace-root\` diff --git a/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs index 968b14a20d90f5..f31ec8e041f517 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs @@ -5,6 +5,14 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' +exports[`test/lib/utils/open-url-prompt.js TAP does not error when opener can not find command > Outputs extra Browser unavailable message and url 1`] = ` +npm home: +https://www.npmjs.com +Browser unavailable. Please open the URL manually: + https://www.npmjs.com + +` + exports[`test/lib/utils/open-url-prompt.js TAP opens a url > must match snapshot 1`] = ` npm home: https://www.npmjs.com diff --git a/deps/npm/tap-snapshots/test/lib/utils/sbom-spdx.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/sbom-spdx.js.test.cjs index aeda27793a04ff..b887e13ca7dc08 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/sbom-spdx.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/sbom-spdx.js.test.cjs @@ -149,33 +149,33 @@ exports[`test/lib/utils/sbom-spdx.js TAP node - with deps > must match snapshot "relationshipType": "DESCRIBES" }, { - "spdxElementId": "SPDXRef-Package-root-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dep1-0.0.1", - "relationshipType": "HAS_PREREQUISITE" + "spdxElementId": "SPDXRef-Package-dep1-0.0.1", + "relatedSpdxElement": "SPDXRef-Package-root-1.0.0", + "relationshipType": "PREREQUISITE_FOR" }, { - "spdxElementId": "SPDXRef-Package-root-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dep2-0.0.2", + "spdxElementId": "SPDXRef-Package-dep2-0.0.2", + "relatedSpdxElement": "SPDXRef-Package-root-1.0.0", "relationshipType": "OPTIONAL_DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-root-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dep3-0.0.3", + "spdxElementId": "SPDXRef-Package-dep3-0.0.3", + "relatedSpdxElement": "SPDXRef-Package-root-1.0.0", "relationshipType": "DEV_DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-root-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dep4-0.0.4", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dep4-0.0.4", + "relatedSpdxElement": "SPDXRef-Package-root-1.0.0", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-dep4-0.0.4", - "relatedSpdxElement": "SPDXRef-Package-dep5-0.0.5", - "relationshipType": "DEPENDS_ON" + "spdxElementId": "SPDXRef-Package-dep5-0.0.5", + "relatedSpdxElement": "SPDXRef-Package-dep4-0.0.4", + "relationshipType": "DEPENDENCY_OF" }, { - "spdxElementId": "SPDXRef-Package-root-1.0.0", - "relatedSpdxElement": "SPDXRef-Package-dep6-0.0.6", + "spdxElementId": "SPDXRef-Package-dep6-0.0.6", + "relatedSpdxElement": "SPDXRef-Package-root-1.0.0", "relationshipType": "OPTIONAL_DEPENDENCY_OF" } ] diff --git a/deps/npm/tap-snapshots/test/lib/utils/tar.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/tar.js.test.cjs index b34b12df8336d0..e92314b57025eb 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/tar.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/tar.js.test.cjs @@ -23,10 +23,10 @@ bundle-dep name: my-cool-pkg version: 1.0.0 filename: my-cool-pkg-1.0.0.tgz -package size: 271 B +package size: {size} unpacked size: 126 B -shasum: 23e31c8ad422f96301c07730e61ff403b10306f1 -integrity: sha512-/Lg5tEGQv5A5y[...]gq8T9D5+Wat1A== +shasum: {sha} +integrity: {integrity} bundled deps: 1 bundled files: 0 own files: 5 @@ -53,10 +53,10 @@ bundle-dep name: @myscope/my-cool-pkg version: 1.0.0 filename: myscope-my-cool-pkg-1.0.0.tgz -package size: 280 B +package size: {size} unpacked size: 135 B -shasum: a4f63307f2211e8fde72cd39bc1176b4fe997b71 -integrity: sha512-b+RavF8JiErJt[...]YpwkJc8ycaabA== +shasum: {sha} +integrity: {integrity} bundled deps: 1 bundled files: 0 own files: 5 diff --git a/deps/npm/test/fixtures/clean-snapshot.js b/deps/npm/test/fixtures/clean-snapshot.js index 83ddc00f4b7877..bd4ce1d01df93d 100644 --- a/deps/npm/test/fixtures/clean-snapshot.js +++ b/deps/npm/test/fixtures/clean-snapshot.js @@ -33,11 +33,21 @@ const cleanDate = (str) => const cleanTime = str => str.replace(/in [0-9]+m?s\s*$/gm, 'in {TIME}') +const cleanZlib = str => str + .replace(/shasum:( *)[0-9a-f]{40}/g, 'shasum:$1{sha}') + .replace(/integrity:( *).*/g, 'integrity:$1{integrity}') + .replace(/package size:( *)[0-9 A-Z]*/g, 'package size:$1{size}') + + .replace(/"shasum": "[0-9a-f]{40}",/g, '"shasum": "{sha}",') + .replace(/"integrity": ".*",/g, '"integrity": "{integrity}",') + .replace(/"size": [0-9]*,/g, '"size": "{size}",') + module.exports = { - normalizePath, - pathRegex, cleanCwd, cleanDate, - cleanTime, cleanNewlines, + cleanTime, + cleanZlib, + normalizePath, + pathRegex, } diff --git a/deps/npm/test/lib/commands/pack.js b/deps/npm/test/lib/commands/pack.js index 61296cc93a53ae..658fd3489abea1 100644 --- a/deps/npm/test/lib/commands/pack.js +++ b/deps/npm/test/lib/commands/pack.js @@ -1,8 +1,11 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm') +const { cleanZlib } = require('../../fixtures/clean-snapshot') const path = require('path') const fs = require('fs') +t.cleanSnapshot = data => cleanZlib(data) + t.test('should pack current directory with no arguments', async t => { const { npm, outputs, logs } = await loadMockNpm(t, { prefixDir: { diff --git a/deps/npm/test/lib/commands/publish.js b/deps/npm/test/lib/commands/publish.js index c10b380ca95e8e..a5644ce224d670 100644 --- a/deps/npm/test/lib/commands/publish.js +++ b/deps/npm/test/lib/commands/publish.js @@ -1,5 +1,6 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm') +const { cleanZlib } = require('../../fixtures/clean-snapshot') const MockRegistry = require('@npmcli/mock-registry') const pacote = require('pacote') const Arborist = require('@npmcli/arborist') @@ -19,12 +20,7 @@ const pkgJson = { version: '1.0.0', } -t.cleanSnapshot = data => { - return data.replace(/shasum:.*/g, 'shasum:{sha}') - .replace(/integrity:.*/g, 'integrity:{sha}') - .replace(/"shasum": ".*",/g, '"shasum": "{sha}",') - .replace(/"integrity": ".*",/g, '"integrity": "{sha}",') -} +t.cleanSnapshot = data => cleanZlib(data) t.test('respects publishConfig.registry, runs appropriate scripts', async t => { const { npm, joinedOutput, prefix } = await loadMockNpm(t, { diff --git a/deps/npm/test/lib/commands/unpublish.js b/deps/npm/test/lib/commands/unpublish.js index 6e898bd3d07e4b..097309393a2585 100644 --- a/deps/npm/test/lib/commands/unpublish.js +++ b/deps/npm/test/lib/commands/unpublish.js @@ -26,10 +26,10 @@ t.test('no args --force success', async t => { authorization: 'test-auth-token', }) const manifest = registry.manifest({ name: pkg }) - await registry.package({ manifest, query: { write: true } }) + await registry.package({ manifest, query: { write: true }, times: 2 }) registry.unpublish({ manifest }) await npm.exec('unpublish', []) - t.equal(joinedOutput(), '- test-package@1.0.0') + t.equal(joinedOutput(), '- test-package') }) t.test('no args --force missing package.json', async t => { @@ -63,11 +63,28 @@ t.test('no args --force error reading package.json', async t => { ) }) -t.test('no args entire project', async t => { +t.test('with args --force error reading package.json', async t => { + const { npm } = await loadMockNpm(t, { + config: { + force: true, + }, + prefixDir: { + 'package.json': '{ not valid json ]', + }, + }) + + await t.rejects( + npm.exec('unpublish', [pkg]), + /Invalid package.json/, + 'should throw error from reading package.json' + ) +}) + +t.test('no force entire project', async t => { const { npm } = await loadMockNpm(t) await t.rejects( - npm.exec('unpublish', []), + npm.exec('unpublish', ['@npmcli/unpublish-test']), /Refusing to delete entire project/ ) }) @@ -82,6 +99,26 @@ t.test('too many args', async t => { ) }) +t.test('range', async t => { + const { npm } = await loadMockNpm(t) + + await t.rejects( + npm.exec('unpublish', ['a@>1.0.0']), + { code: 'EUSAGE' }, + /single version/ + ) +}) + +t.test('tag', async t => { + const { npm } = await loadMockNpm(t) + + await t.rejects( + npm.exec('unpublish', ['a@>1.0.0']), + { code: 'EUSAGE' }, + /single version/ + ) +}) + t.test('unpublish @version not the last version', async t => { const { joinedOutput, npm } = await loadMockNpm(t, { config: { @@ -129,7 +166,24 @@ t.test('unpublish @version last version', async t => { ) }) -t.test('no version found in package.json', async t => { +t.test('no version found in package.json no force', async t => { + const { npm } = await loadMockNpm(t, { + config: { + ...auth, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: pkg, + }, null, 2), + }, + }) + await t.rejects( + npm.exec('unpublish', []), + /Refusing to delete entire project/ + ) +}) + +t.test('no version found in package.json with force', async t => { const { joinedOutput, npm } = await loadMockNpm(t, { config: { force: true, @@ -147,7 +201,7 @@ t.test('no version found in package.json', async t => { authorization: 'test-auth-token', }) const manifest = registry.manifest({ name: pkg }) - await registry.package({ manifest, query: { write: true } }) + await registry.package({ manifest, query: { write: true }, times: 2 }) registry.unpublish({ manifest }) await npm.exec('unpublish', []) @@ -219,7 +273,7 @@ t.test('workspaces', async t => { 'workspace-b': { 'package.json': JSON.stringify({ name: 'workspace-b', - version: '1.2.3-n', + version: '1.2.3-b', repository: 'https://github.com/npm/workspace-b', }), }, @@ -231,20 +285,20 @@ t.test('workspaces', async t => { }, } - t.test('no force', async t => { + t.test('with package name no force', async t => { const { npm } = await loadMockNpm(t, { config: { - workspaces: true, + workspace: ['workspace-a'], }, prefixDir, }) await t.rejects( - npm.exec('unpublish', []), + npm.exec('unpublish', ['workspace-a']), /Refusing to delete entire project/ ) }) - t.test('all workspaces --force', async t => { + t.test('all workspaces last version --force', async t => { const { joinedOutput, npm } = await loadMockNpm(t, { config: { workspaces: true, @@ -258,9 +312,9 @@ t.test('workspaces', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - const manifestA = registry.manifest({ name: 'workspace-a' }) - const manifestB = registry.manifest({ name: 'workspace-b' }) - const manifestN = registry.manifest({ name: 'workspace-n' }) + const manifestA = registry.manifest({ name: 'workspace-a', versions: ['1.2.3-a'] }) + const manifestB = registry.manifest({ name: 'workspace-b', versions: ['1.2.3-b'] }) + const manifestN = registry.manifest({ name: 'workspace-n', versions: ['1.2.3-n'] }) await registry.package({ manifest: manifestA, query: { write: true }, times: 2 }) await registry.package({ manifest: manifestB, query: { write: true }, times: 2 }) await registry.package({ manifest: manifestN, query: { write: true }, times: 2 }) @@ -271,28 +325,6 @@ t.test('workspaces', async t => { await npm.exec('unpublish', []) t.equal(joinedOutput(), '- workspace-a\n- workspace-b\n- workspace-n') }) - - t.test('one workspace --force', async t => { - const { joinedOutput, npm } = await loadMockNpm(t, { - config: { - workspace: ['workspace-a'], - force: true, - ...auth, - }, - prefixDir, - }) - const registry = new MockRegistry({ - tap: t, - registry: npm.config.get('registry'), - authorization: 'test-auth-token', - }) - const manifestA = registry.manifest({ name: 'workspace-a' }) - await registry.package({ manifest: manifestA, query: { write: true }, times: 2 }) - registry.nock.delete(`/workspace-a/-rev/${manifestA._rev}`).reply(201) - - await npm.exec('unpublish', []) - t.equal(joinedOutput(), '- workspace-a') - }) }) t.test('dryRun with spec', async t => { @@ -331,6 +363,16 @@ t.test('dryRun with no args', async t => { }, null, 2), }, }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: 'test-auth-token', + }) + const manifest = registry.manifest({ + name: pkg, + packuments: ['1.0.0', '1.0.1'], + }) + await registry.package({ manifest, query: { write: true } }) await npm.exec('unpublish', []) t.equal(joinedOutput(), '- test-package@1.0.0') @@ -360,10 +402,10 @@ t.test('publishConfig no spec', async t => { authorization: 'test-other-token', }) const manifest = registry.manifest({ name: pkg }) - await registry.package({ manifest, query: { write: true } }) + await registry.package({ manifest, query: { write: true }, times: 2 }) registry.unpublish({ manifest }) await npm.exec('unpublish', []) - t.equal(joinedOutput(), '- test-package@1.0.0') + t.equal(joinedOutput(), '- test-package') }) t.test('publishConfig with spec', async t => { @@ -421,7 +463,7 @@ t.test('scoped registry config', async t => { authorization: 'test-other-token', }) const manifest = registry.manifest({ name: scopedPkg }) - await registry.package({ manifest, query: { write: true } }) + await registry.package({ manifest, query: { write: true }, times: 2 }) registry.unpublish({ manifest }) await npm.exec('unpublish', [scopedPkg]) }) diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index 162e8c83ca4a4d..e9300ecfa6bd10 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -388,15 +388,18 @@ t.test('debug log', async t => { const log1 = ['silly', 'test', 'before load'] const log2 = ['silly', 'test', 'after load'] + const log3 = ['silly', 'test', 'hello\x00world'] process.emit('log', ...log1) await npm.load() process.emit('log', ...log2) + process.emit('log', ...log3) const debug = await debugFile() t.equal(npm.logFiles.length, 1, 'one debug file') t.match(debug, log1.join(' '), 'before load appears') t.match(debug, log2.join(' '), 'after load log appears') + t.match(debug, 'hello^@world') }) t.test('can load with bad dir', async t => { @@ -517,7 +520,7 @@ t.test('timings', async t => { } }) -t.test('output clears progress and console.logs the message', async t => { +t.test('output clears progress and console.logs cleaned messages', async t => { t.plan(4) let showingProgress = true const logs = [] @@ -541,11 +544,11 @@ t.test('output clears progress and console.logs the message', async t => { }, }, }) - npm.originalOutput('hello') - npm.originalOutputError('error') + npm.originalOutput('hello\x00world') + npm.originalOutputError('error\x00world') - t.match(logs, [['hello']]) - t.match(errors, [['error']]) + t.match(logs, [['hello^@world']]) + t.match(errors, [['error^@world']]) }) t.test('aliases and typos', async t => { diff --git a/deps/npm/test/lib/utils/display.js b/deps/npm/test/lib/utils/display.js index b8f047668bfe4c..2b9db0e6725100 100644 --- a/deps/npm/test/lib/utils/display.js +++ b/deps/npm/test/lib/utils/display.js @@ -3,6 +3,7 @@ const log = require('../../../lib/utils/log-shim') const mockLogs = require('../../fixtures/mock-logs') const mockGlobals = require('@npmcli/mock-globals') const tmock = require('../../fixtures/tmock') +const util = require('util') const mockDisplay = (t, mocks) => { const { logs, logMocks } = mockLogs(mocks) @@ -38,7 +39,7 @@ t.test('setup', async (t) => { t.equal(log.progressEnabled, true) }) -t.test('can log', async (t) => { +t.test('can log cleanly', async (t) => { const explains = [] const { display, logs } = mockDisplay(t, { npmlog: { @@ -53,8 +54,8 @@ t.test('can log', async (t) => { }, }) - display.log('error', 'test') - t.match(logs.error, [['test']]) + display.log('error', 'test\x00message') + t.match(logs.error, [['test^@message']]) display.log('warn', 'ERESOLVE', 'hello', { some: 'object' }) t.match(logs.warn, [['ERESOLVE', 'hello']]) @@ -84,3 +85,77 @@ t.test('handles log throwing', async (t) => { [/attempt to log .* crashed/, Error('explain'), Error('verbose')], ]) }) + +class CustomObj { + [util.inspect.custom] () { + return this.inspected + } +} + +t.test('Display.clean', async (t) => { + const Display = require('../../../lib/utils/display') + const customNaN = new CustomObj() + const customNull = new CustomObj() + const customNumber = new CustomObj() + const customObject = new CustomObj() + const customString = new CustomObj() + const customUndefined = new CustomObj() + customNaN.inspected = NaN + customNull.inspected = null + customNumber.inspected = 477 + customObject.inspected = { custom: 'rend\x00ering' } + customString.inspected = 'custom\x00rendering' + customUndefined.inspected = undefined + t.test('strings', async (t) => { + const tests = [ + [477, '477'], + [null, 'null'], + [NaN, 'NaN'], + [true, 'true'], + [undefined, 'undefined'], + ['🚀', '🚀'], + // Cover the bounds of each range and a few characters from inside each range + // \x00 through \x1f + ['hello\x00world', 'hello^@world'], + ['hello\x07world', 'hello^Gworld'], + ['hello\x1bworld', 'hello^[world'], + ['hello\x1eworld', 'hello^^world'], + ['hello\x1fworld', 'hello^_world'], + // \x7f is C0 + ['hello\x7fworld', 'hello^?world'], + // \x80 through \x9f + ['hello\x80world', 'hello^@world'], + ['hello\x87world', 'hello^Gworld'], + ['hello\x9eworld', 'hello^^world'], + ['hello\x9fworld', 'hello^_world'], + // Allowed C0 + ['hello\tworld', 'hello\tworld'], + ['hello\nworld', 'hello\nworld'], + ['hello\vworld', 'hello\vworld'], + ['hello\rworld', 'hello\rworld'], + // Allowed SGR + ['hello\x1b[38;5;254mworld', 'hello\x1b[38;5;254mworld'], + ['hello\x1b[mworld', 'hello\x1b[mworld'], + // Unallowed CSI / OSC + ['hello\x1b[2Aworld', 'hello^[[2Aworld'], + ['hello\x9b[2Aworld', 'hello^[[2Aworld'], + ['hello\x9decho goodbye\x9cworld', 'hello^]echo goodbye^\\world'], + // This is done twice to ensure we define inspect.custom as writable + [{ test: 'object' }, "{ test: 'object' }"], + [{ test: 'object' }, "{ test: 'object' }"], + // Make sure custom util.inspect doesn't bypass our cleaning + [customNaN, 'NaN'], + [customNull, 'null'], + [customNumber, '477'], + [customObject, "{ custom: 'rend\\x00ering' }"], + [customString, 'custom^@rendering'], + [customUndefined, 'undefined'], + // UTF-16 form of 8-bit C1 + ['hello\xc2\x9bworld', 'hello\xc2^[world'], + ] + for (const [dirty, clean] of tests) { + const cleaned = Display.clean(dirty) + t.equal(util.format(cleaned), clean) + } + }) +}) diff --git a/deps/npm/test/lib/utils/exit-handler.js b/deps/npm/test/lib/utils/exit-handler.js index 3eb5840985b8f5..b48f96d581775a 100644 --- a/deps/npm/test/lib/utils/exit-handler.js +++ b/deps/npm/test/lib/utils/exit-handler.js @@ -344,12 +344,12 @@ t.test('no logs dir', async (t) => { const { exitHandler, logs } = await mockExitHandler(t, { config: { 'logs-max': 0 }, }) - await exitHandler(new Error()) t.match(logs.error.filter(([t]) => t === ''), [ ['', 'Log files were not written due to the config logs-max=0'], ]) + t.match(logs.filter(([_, task]) => task === 'npm.load.mkdirplogs'), []) }) t.test('timers fail to write', async (t) => { diff --git a/deps/npm/test/lib/utils/open-url-prompt.js b/deps/npm/test/lib/utils/open-url-prompt.js index c889313e162c7f..91058ec9e29a48 100644 --- a/deps/npm/test/lib/utils/open-url-prompt.js +++ b/deps/npm/test/lib/utils/open-url-prompt.js @@ -126,13 +126,24 @@ t.test('does not open url if canceled', async t => { t.test('returns error when opener errors', async t => { const { error, openerUrl } = await mockOpenUrlPrompt(t, { - openerResult: new Error('Opener failed'), + openerResult: Object.assign(new Error('Opener failed'), { code: 1 }), }) t.match(error, /Opener failed/, 'got the correct error') t.equal(openerUrl, 'https://www.npmjs.com', 'did not open') }) +t.test('does not error when opener can not find command', async t => { + const { OUTPUT, error, openerUrl } = await mockOpenUrlPrompt(t, { + // openerResult: new Error('Opener failed'), + openerResult: Object.assign(new Error('Opener failed'), { code: 127 }), + }) + + t.notOk(error, 'Did not error') + t.equal(openerUrl, 'https://www.npmjs.com', 'did not open') + t.matchSnapshot(OUTPUT, 'Outputs extra Browser unavailable message and url') +}) + t.test('throws "canceled" error on SIGINT', async t => { const emitter = new EventEmitter() const { open } = await mockOpenUrlPrompt(t, { diff --git a/deps/npm/test/lib/utils/tar.js b/deps/npm/test/lib/utils/tar.js index 78c01f3f57ae40..274bad95c0af3f 100644 --- a/deps/npm/test/lib/utils/tar.js +++ b/deps/npm/test/lib/utils/tar.js @@ -2,8 +2,10 @@ const t = require('tap') const pack = require('libnpmpack') const ssri = require('ssri') const tmock = require('../../fixtures/tmock') +const { cleanZlib } = require('../../fixtures/clean-snapshot') const { getContents } = require('../../../lib/utils/tar.js') +t.cleanSnapshot = data => cleanZlib(data) const mockTar = ({ notice }) => tmock(t, '{LIB}/utils/tar.js', { 'proc-log': { @@ -121,13 +123,15 @@ t.test('should getContents of a tarball', async (t) => { algorithms: ['sha1', 'sha512'], }) + // zlib is nondeterministic + t.match(tarballContents.shasum, /^[0-9a-f]{40}$/) + delete tarballContents.shasum t.strictSame(tarballContents, { id: 'my-cool-pkg@1.0.0', name: 'my-cool-pkg', version: '1.0.0', - size: 146, + size: tarball.length, unpackedSize: 49, - shasum: 'b8379c5e69693cdda73aec3d81dae1d11c1e75bd', integrity: ssri.parse(integrity.sha512[0]), filename: 'my-cool-pkg-1.0.0.tgz', files: [{ path: 'package.json', size: 49, mode: 420 }], diff --git a/deps/undici/src/README.md b/deps/undici/src/README.md index cec6f032e99be3..e1234a3b0c6de8 100644 --- a/deps/undici/src/README.md +++ b/deps/undici/src/README.md @@ -119,7 +119,7 @@ Returns a promise with the result of the `Dispatcher.request` method. Calls `options.dispatcher.request(options)`. -See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details. +See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details, and [request examples](./examples/README.md) for examples. ### `undici.stream([url, options, ]factory): Promise` diff --git a/deps/undici/src/docs/api/Debug.md b/deps/undici/src/docs/api/Debug.md new file mode 100644 index 00000000000000..1a1794c0ad9ccd --- /dev/null +++ b/deps/undici/src/docs/api/Debug.md @@ -0,0 +1,62 @@ +# Debug + +Undici (and subsenquently `fetch` and `websocket`) exposes a debug statement that can be enabled by setting `NODE_DEBUG` within the environment. + +The flags availabile are: + +## `undici` + +This flag enables debug statements for the core undici library. + +```sh +NODE_DEBUG=undici node script.js + +UNDICI 16241: connecting to nodejs.org using https:h1 +UNDICI 16241: connecting to nodejs.org using https:h1 +UNDICI 16241: connected to nodejs.org using https:h1 +UNDICI 16241: sending request to GET https://nodejs.org// +UNDICI 16241: received response to GET https://nodejs.org// - HTTP 307 +UNDICI 16241: connecting to nodejs.org using https:h1 +UNDICI 16241: trailers received from GET https://nodejs.org// +UNDICI 16241: connected to nodejs.org using https:h1 +UNDICI 16241: sending request to GET https://nodejs.org//en +UNDICI 16241: received response to GET https://nodejs.org//en - HTTP 200 +UNDICI 16241: trailers received from GET https://nodejs.org//en +``` + +## `fetch` + +This flag enables debug statements for the `fetch` API. + +> **Note**: statements are pretty similar to the ones in the `undici` flag, but scoped to `fetch` + +```sh +NODE_DEBUG=fetch node script.js + +FETCH 16241: connecting to nodejs.org using https:h1 +FETCH 16241: connecting to nodejs.org using https:h1 +FETCH 16241: connected to nodejs.org using https:h1 +FETCH 16241: sending request to GET https://nodejs.org// +FETCH 16241: received response to GET https://nodejs.org// - HTTP 307 +FETCH 16241: connecting to nodejs.org using https:h1 +FETCH 16241: trailers received from GET https://nodejs.org// +FETCH 16241: connected to nodejs.org using https:h1 +FETCH 16241: sending request to GET https://nodejs.org//en +FETCH 16241: received response to GET https://nodejs.org//en - HTTP 200 +FETCH 16241: trailers received from GET https://nodejs.org//en +``` + +## `websocket` + +This flag enables debug statements for the `Websocket` API. + +> **Note**: statements can overlap with `UNDICI` ones if `undici` or `fetch` flag has been enabled as well. + +```sh +NODE_DEBUG=fetch node script.js + +WEBSOCKET 18309: connecting to echo.websocket.org using https:h1 +WEBSOCKET 18309: connected to echo.websocket.org using https:h1 +WEBSOCKET 18309: sending request to GET https://echo.websocket.org// +WEBSOCKET 18309: connection opened +``` \ No newline at end of file diff --git a/deps/undici/src/docs/api/DiagnosticsChannel.md b/deps/undici/src/docs/api/DiagnosticsChannel.md index 0aa0b9a0783d7f..6e6ad6e4de851f 100644 --- a/deps/undici/src/docs/api/DiagnosticsChannel.md +++ b/deps/undici/src/docs/api/DiagnosticsChannel.md @@ -105,7 +105,7 @@ You can not assume that this event is related to any specific request. import diagnosticsChannel from 'diagnostics_channel' diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { - // const { host, hostname, protocol, port, servername } = connectParams + // const { host, hostname, protocol, port, servername, version } = connectParams // connector is a function that creates the socket }) ``` @@ -118,7 +118,7 @@ This message is published after a connection is established. import diagnosticsChannel from 'diagnostics_channel' diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => { - // const { host, hostname, protocol, port, servername } = connectParams + // const { host, hostname, protocol, port, servername, version } = connectParams // connector is a function that creates the socket }) ``` @@ -131,7 +131,7 @@ This message is published if it did not succeed to create new connection import diagnosticsChannel from 'diagnostics_channel' diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => { - // const { host, hostname, protocol, port, servername } = connectParams + // const { host, hostname, protocol, port, servername, version } = connectParams // connector is a function that creates the socket console.log(`Connect failed with ${error.message}`) }) diff --git a/deps/undici/src/index-fetch.js b/deps/undici/src/index-fetch.js index ba31a65f25c184..db17a828c21e7f 100644 --- a/deps/undici/src/index-fetch.js +++ b/deps/undici/src/index-fetch.js @@ -4,7 +4,9 @@ const fetchImpl = require('./lib/fetch').fetch module.exports.fetch = function fetch (resource, init = undefined) { return fetchImpl(resource, init).catch((err) => { - Error.captureStackTrace(err, this) + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } throw err }) } diff --git a/deps/undici/src/lib/api/abort-signal.js b/deps/undici/src/lib/api/abort-signal.js index 2985c1efa969b3..3c15202ff44044 100644 --- a/deps/undici/src/lib/api/abort-signal.js +++ b/deps/undici/src/lib/api/abort-signal.js @@ -6,9 +6,9 @@ const kSignal = Symbol('kSignal') function abort (self) { if (self.abort) { - self.abort() + self.abort(self[kSignal]?.reason) } else { - self.onError(new RequestAbortedError()) + self.onError(self[kSignal]?.reason ?? new RequestAbortedError()) } } diff --git a/deps/undici/src/lib/cache/cache.js b/deps/undici/src/lib/cache/cache.js index 9b3110860cd6b8..9fe24b742ad807 100644 --- a/deps/undici/src/lib/cache/cache.js +++ b/deps/undici/src/lib/cache/cache.js @@ -112,14 +112,12 @@ class Cache { // 5.5.2 for (const response of responses) { // 5.5.2.1 - const responseObject = new Response(response.body?.source ?? null) - const body = responseObject[kState].body + const responseObject = new Response(null) responseObject[kState] = response - responseObject[kState].body = body responseObject[kHeaders][kHeadersList] = response.headersList responseObject[kHeaders][kGuard] = 'immutable' - responseList.push(responseObject) + responseList.push(responseObject.clone()) } // 6. @@ -146,8 +144,6 @@ class Cache { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) - requests = webidl.converters['sequence'](requests) - // 1. const responsePromises = [] @@ -155,7 +151,17 @@ class Cache { const requestList = [] // 3. - for (const request of requests) { + for (let request of requests) { + if (request === undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'Cache.addAll', + argument: 'Argument 1', + types: ['undefined is not allowed'] + }) + } + + request = webidl.converters.RequestInfo(request) + if (typeof request === 'string') { continue } diff --git a/deps/undici/src/lib/client.js b/deps/undici/src/lib/client.js index bd4a400ad721ff..cc5c4890552d91 100644 --- a/deps/undici/src/lib/client.js +++ b/deps/undici/src/lib/client.js @@ -9,6 +9,7 @@ const net = require('net') const http = require('http') const { pipeline } = require('stream') const util = require('./core/util') +const { channels } = require('./core/diagnostics') const timers = require('./timers') const Request = require('./core/request') const DispatcherBase = require('./dispatcher-base') @@ -108,21 +109,6 @@ const FastBuffer = Buffer[Symbol.species] const kClosedResolve = Symbol('kClosedResolve') -const channels = {} - -try { - const diagnosticsChannel = require('diagnostics_channel') - channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') - channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') - channels.connectError = diagnosticsChannel.channel('undici:client:connectError') - channels.connected = diagnosticsChannel.channel('undici:client:connected') -} catch { - channels.sendHeaders = { hasSubscribers: false } - channels.beforeConnect = { hasSubscribers: false } - channels.connectError = { hasSubscribers: false } - channels.connected = { hasSubscribers: false } -} - /** * @type {import('../types/client').default} */ @@ -1191,6 +1177,7 @@ async function connect (client) { hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -1284,6 +1271,7 @@ async function connect (client) { hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -1306,6 +1294,7 @@ async function connect (client) { hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -1658,19 +1647,6 @@ function writeH2 (client, session, request) { return false } - try { - // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? - request.onConnect((err) => { - if (request.aborted || request.completed) { - return - } - - errorRequest(client, request, err || new RequestAbortedError()) - }) - } catch (err) { - errorRequest(client, request, err) - } - if (request.aborted) { return false } @@ -1682,9 +1658,34 @@ function writeH2 (client, session, request) { headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] headers[HTTP2_HEADER_METHOD] = method + try { + // We are already connected, streams are pending. + // We can call on connect, and wait for abort + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + err = err || new RequestAbortedError() + + if (stream != null) { + util.destroy(stream, err) + + h2State.openStreams -= 1 + if (h2State.openStreams === 0) { + session.unref() + } + } + + errorRequest(client, request, err) + }) + } catch (err) { + errorRequest(client, request, err) + } + if (method === 'CONNECT') { session.ref() - // we are already connected, streams are pending, first request + // We are already connected, streams are pending, first request // will create a new stream. We trigger a request to create the stream and wait until // `ready` event is triggered // We disabled endStream to allow the user to write to the stream diff --git a/deps/undici/src/lib/compat/dispatcher-weakref.js b/deps/undici/src/lib/compat/dispatcher-weakref.js index a2fd0020416c62..463b29ca31972a 100644 --- a/deps/undici/src/lib/compat/dispatcher-weakref.js +++ b/deps/undici/src/lib/compat/dispatcher-weakref.js @@ -28,6 +28,8 @@ class CompatFinalizer { }) } } + + unregister (key) {} } module.exports = function () { diff --git a/deps/undici/src/lib/core/diagnostics.js b/deps/undici/src/lib/core/diagnostics.js new file mode 100644 index 00000000000000..e76f73a8c3a6be --- /dev/null +++ b/deps/undici/src/lib/core/diagnostics.js @@ -0,0 +1,202 @@ +'use strict' +const diagnosticsChannel = require('diagnostics_channel') +const util = require('util') + +const undiciDebugLog = util.debuglog('undici') +const fetchDebuglog = util.debuglog('fetch') +const websocketDebuglog = util.debuglog('websocket') +let isClientSet = false +const channels = { + // Client + beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'), + connected: diagnosticsChannel.channel('undici:client:connected'), + connectError: diagnosticsChannel.channel('undici:client:connectError'), + sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'), + // Request + create: diagnosticsChannel.channel('undici:request:create'), + bodySent: diagnosticsChannel.channel('undici:request:bodySent'), + headers: diagnosticsChannel.channel('undici:request:headers'), + trailers: diagnosticsChannel.channel('undici:request:trailers'), + error: diagnosticsChannel.channel('undici:request:error'), + // WebSocket + open: diagnosticsChannel.channel('undici:websocket:open'), + close: diagnosticsChannel.channel('undici:websocket:close'), + socketError: diagnosticsChannel.channel('undici:websocket:socket_error'), + ping: diagnosticsChannel.channel('undici:websocket:ping'), + pong: diagnosticsChannel.channel('undici:websocket:pong') +} + +if (undiciDebugLog.enabled || fetchDebuglog.enabled) { + const debuglog = fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog + + // Track all Client events + diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => { + const { + connectParams: { version, protocol, port, host } + } = evt + debuglog( + 'connecting to %s using %s%s', + `${host}${port ? `:${port}` : ''}`, + protocol, + version + ) + }) + + diagnosticsChannel.channel('undici:client:connected').subscribe(evt => { + const { + connectParams: { version, protocol, port, host } + } = evt + debuglog( + 'connected to %s using %s%s', + `${host}${port ? `:${port}` : ''}`, + protocol, + version + ) + }) + + diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => { + const { + connectParams: { version, protocol, port, host }, + error + } = evt + debuglog( + 'connection to %s using %s%s errored - %s', + `${host}${port ? `:${port}` : ''}`, + protocol, + version, + error.message + ) + }) + + diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => { + const { + request: { method, path, origin } + } = evt + debuglog('sending request to %s %s/%s', method, origin, path) + }) + + // Track Request events + diagnosticsChannel.channel('undici:request:headers').subscribe(evt => { + const { + request: { method, path, origin }, + response: { statusCode } + } = evt + debuglog( + 'received response to %s %s/%s - HTTP %d', + method, + origin, + path, + statusCode + ) + }) + + diagnosticsChannel.channel('undici:request:trailers').subscribe(evt => { + const { + request: { method, path, origin } + } = evt + debuglog('trailers received from %s %s/%s', method, origin, path) + }) + + diagnosticsChannel.channel('undici:request:error').subscribe(evt => { + const { + request: { method, path, origin }, + error + } = evt + debuglog( + 'request to %s %s/%s errored - %s', + method, + origin, + path, + error.message + ) + }) + + isClientSet = true +} + +if (websocketDebuglog.enabled) { + if (!isClientSet) { + const debuglog = undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog + diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => { + const { + connectParams: { version, protocol, port, host } + } = evt + debuglog( + 'connecting to %s%s using %s%s', + host, + port ? `:${port}` : '', + protocol, + version + ) + }) + + diagnosticsChannel.channel('undici:client:connected').subscribe(evt => { + const { + connectParams: { version, protocol, port, host } + } = evt + debuglog( + 'connected to %s%s using %s%s', + host, + port ? `:${port}` : '', + protocol, + version + ) + }) + + diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => { + const { + connectParams: { version, protocol, port, host }, + error + } = evt + debuglog( + 'connection to %s%s using %s%s errored - %s', + host, + port ? `:${port}` : '', + protocol, + version, + error.message + ) + }) + + diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => { + const { + request: { method, path, origin } + } = evt + debuglog('sending request to %s %s/%s', method, origin, path) + }) + } + + // Track all WebSocket events + diagnosticsChannel.channel('undici:websocket:open').subscribe(evt => { + const { + address: { address, port } + } = evt + websocketDebuglog('connection opened %s%s', address, port ? `:${port}` : '') + }) + + diagnosticsChannel.channel('undici:websocket:close').subscribe(evt => { + const { websocket, code, reason } = evt + websocketDebuglog( + 'closed connection to %s - %s %s', + websocket.url, + code, + reason + ) + }) + + diagnosticsChannel.channel('undici:websocket:socket_error').subscribe(err => { + websocketDebuglog('connection errored - %s', err.message) + }) + + diagnosticsChannel.channel('undici:websocket:ping').subscribe(evt => { + websocketDebuglog('ping received') + }) + + diagnosticsChannel.channel('undici:websocket:pong').subscribe(evt => { + websocketDebuglog('pong received') + }) +} + +module.exports = { + channels +} diff --git a/deps/undici/src/lib/core/request.js b/deps/undici/src/lib/core/request.js index fe63434ea98b74..4a61da0e454ad6 100644 --- a/deps/undici/src/lib/core/request.js +++ b/deps/undici/src/lib/core/request.js @@ -7,6 +7,7 @@ const { const assert = require('assert') const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols') const util = require('./util') +const { channels } = require('./diagnostics.js') const { headerNameLowerCasedRecord } = require('./constants') // headerCharRegex have been lifted from @@ -25,25 +26,8 @@ const invalidPathRegex = /[^\u0021-\u00ff]/ const kHandler = Symbol('handler') -const channels = {} - let extractBody -try { - const diagnosticsChannel = require('diagnostics_channel') - channels.create = diagnosticsChannel.channel('undici:request:create') - channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') - channels.headers = diagnosticsChannel.channel('undici:request:headers') - channels.trailers = diagnosticsChannel.channel('undici:request:trailers') - channels.error = diagnosticsChannel.channel('undici:request:error') -} catch { - channels.create = { hasSubscribers: false } - channels.bodySent = { hasSubscribers: false } - channels.headers = { hasSubscribers: false } - channels.trailers = { hasSubscribers: false } - channels.error = { hasSubscribers: false } -} - class Request { constructor (origin, { path, diff --git a/deps/undici/src/lib/fetch/body.js b/deps/undici/src/lib/fetch/body.js index 6202887289a02d..16914c14c8fd7a 100644 --- a/deps/undici/src/lib/fetch/body.js +++ b/deps/undici/src/lib/fetch/body.js @@ -14,7 +14,7 @@ const { FormData } = require('./formdata') const { kState } = require('./symbols') const { webidl } = require('./webidl') const { Blob, File: NativeFile } = require('buffer') -const { kBodyUsed } = require('../core/symbols') +const { kBodyUsed, kHeadersList } = require('../core/symbols') const assert = require('assert') const { isErrored } = require('../core/util') const { isUint8Array, isArrayBuffer } = require('util/types') @@ -369,10 +369,12 @@ function bodyMixinMethods (instance) { throwIfAborted(this[kState]) - const contentType = this.headers.get('Content-Type') + const contentType = this.headers[kHeadersList].get('content-type', true) + + const mimeType = contentType !== null ? parseMIMEType(contentType) : 'failure' // If mimeType’s essence is "multipart/form-data", then: - if (/multipart\/form-data/.test(contentType)) { + if (mimeType !== 'failure' && mimeType.essence === 'multipart/form-data') { const headers = {} for (const [key, value] of this.headers) headers[key] = value @@ -430,7 +432,7 @@ function bodyMixinMethods (instance) { await busboyResolve return responseFormData - } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + } else if (mimeType !== 'failure' && mimeType.essence === 'application/x-www-form-urlencoded') { // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: // 1. Let entries be the result of parsing bytes. diff --git a/deps/undici/src/lib/fetch/dataURL.js b/deps/undici/src/lib/fetch/dataURL.js index 762017c8cc9917..d7a638866a3581 100644 --- a/deps/undici/src/lib/fetch/dataURL.js +++ b/deps/undici/src/lib/fetch/dataURL.js @@ -188,11 +188,28 @@ function stringPercentDecode (input) { return percentDecode(bytes) } +/** + * @param {number} byte + */ function isHexCharByte (byte) { // 0-9 A-F a-f return (byte >= 0x30 && byte <= 0x39) || (byte >= 0x41 && byte <= 0x46) || (byte >= 0x61 && byte <= 0x66) } +/** + * @param {number} byte + */ +function hexByteToNumber (byte) { + return ( + // 0-9 + byte >= 0x30 && byte <= 0x39 + ? (byte - 48) + // Convert to uppercase + // ((byte & 0xDF) - 65) + 10 + : ((byte & 0xDF) - 55) + ) +} + // https://url.spec.whatwg.org/#percent-decode /** @param {Uint8Array} input */ function percentDecode (input) { @@ -224,11 +241,8 @@ function percentDecode (input) { } else { // 1. Let bytePoint be the two bytes after byte in input, // decoded, and then interpreted as hexadecimal number. - const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) - const bytePoint = Number.parseInt(nextTwoBytes, 16) - // 2. Append a byte whose value is bytePoint to output. - output[j++] = bytePoint + output[j++] = (hexByteToNumber(input[i + 1]) << 4) | hexByteToNumber(input[i + 2]) // 3. Skip the next two bytes in input. i += 2 @@ -590,14 +604,18 @@ function isHTTPWhiteSpace (char) { * @param {boolean} [trailing=true] */ function removeHTTPWhitespace (str, leading = true, trailing = true) { - let i = 0; let j = str.length + let lead = 0 + let trail = str.length - 1 + if (leading) { - while (j > i && isHTTPWhiteSpace(str.charCodeAt(i))) --i + while (lead < str.length && isHTTPWhiteSpace(str.charCodeAt(lead))) lead++ } + if (trailing) { - while (j > i && isHTTPWhiteSpace(str.charCodeAt(j - 1))) --j + while (trail > 0 && isHTTPWhiteSpace(str.charCodeAt(trail))) trail-- } - return i === 0 && j === str.length ? str : str.substring(i, j) + + return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1) } /** @@ -616,14 +634,18 @@ function isASCIIWhitespace (char) { * @param {boolean} [trailing=true] */ function removeASCIIWhitespace (str, leading = true, trailing = true) { - let i = 0; let j = str.length + let lead = 0 + let trail = str.length - 1 + if (leading) { - while (j > i && isASCIIWhitespace(str.charCodeAt(i))) --i + while (lead < str.length && isASCIIWhitespace(str.charCodeAt(lead))) lead++ } + if (trailing) { - while (j > i && isASCIIWhitespace(str.charCodeAt(j - 1))) --j + while (trail > 0 && isASCIIWhitespace(str.charCodeAt(trail))) trail-- } - return i === 0 && j === str.length ? str : str.substring(i, j) + + return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1) } module.exports = { diff --git a/deps/undici/src/lib/fetch/file.js b/deps/undici/src/lib/fetch/file.js index 3133d255ecdcdb..397994c0d937df 100644 --- a/deps/undici/src/lib/fetch/file.js +++ b/deps/undici/src/lib/fetch/file.js @@ -211,10 +211,7 @@ webidl.converters.BlobPart = function (V, opts) { return webidl.converters.Blob(V, { strict: false }) } - if ( - ArrayBuffer.isView(V) || - types.isAnyArrayBuffer(V) - ) { + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { return webidl.converters.BufferSource(V, opts) } } @@ -282,10 +279,7 @@ function processBlobParts (parts, options) { // 3. Append the result of UTF-8 encoding s to bytes. bytes.push(encoder.encode(s)) - } else if ( - types.isAnyArrayBuffer(element) || - types.isTypedArray(element) - ) { + } else if (ArrayBuffer.isView(element) || types.isArrayBuffer(element)) { // 2. If element is a BufferSource, get a copy of the // bytes held by the buffer source, and append those // bytes to bytes. diff --git a/deps/undici/src/lib/fetch/index.js b/deps/undici/src/lib/fetch/index.js index d64dd90596c9da..f16ce4b4536f20 100644 --- a/deps/undici/src/lib/fetch/index.js +++ b/deps/undici/src/lib/fetch/index.js @@ -1206,7 +1206,7 @@ async function httpFetch (fetchParams) { // encouraged to, transmit an RST_STREAM frame. // See, https://github.com/whatwg/fetch/issues/1288 if (request.redirect !== 'manual') { - fetchParams.controller.connection.destroy() + fetchParams.controller.connection.destroy(undefined, false) } // 2. Switch on request’s redirect mode: @@ -1718,10 +1718,12 @@ async function httpNetworkFetch ( fetchParams.controller.connection = { abort: null, destroyed: false, - destroy (err) { + destroy (err, abort = true) { if (!this.destroyed) { this.destroyed = true - this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + if (abort) { + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } } } } @@ -2152,7 +2154,8 @@ async function httpNetworkFetch ( } else { const keys = Object.keys(rawHeaders) for (let i = 0; i < keys.length; ++i) { - headersList.append(keys[i], rawHeaders[keys[i]]) + // The header names are already in lowercase. + headersList.append(keys[i], rawHeaders[keys[i]], true) } // For H2, The header names are already in lowercase, // so we can avoid the `HeadersList#get` call here. diff --git a/deps/undici/src/lib/fetch/request.js b/deps/undici/src/lib/fetch/request.js index 85b8bb0df2f886..f5522486c1df01 100644 --- a/deps/undici/src/lib/fetch/request.js +++ b/deps/undici/src/lib/fetch/request.js @@ -38,6 +38,8 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { signal.removeEventListener('abort', abort) }) +let patchMethodWarning = false + // https://fetch.spec.whatwg.org/#request-class class Request { // https://fetch.spec.whatwg.org/#dom-request @@ -313,21 +315,36 @@ class Request { // 1. Let method be init["method"]. let method = init.method - // 2. If method is not a method or method is a forbidden method, then - // throw a TypeError. - if (!isValidHTTPToken(method)) { - throw new TypeError(`'${method}' is not a valid HTTP method.`) - } + const mayBeNormalized = normalizeMethodRecord[method] + + if (mayBeNormalized !== undefined) { + // Note: Bypass validation DELETE, GET, HEAD, OPTIONS, POST, PUT, PATCH and these lowercase ones + request.method = mayBeNormalized + } else { + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } - if (forbiddenMethodsSet.has(method.toUpperCase())) { - throw new TypeError(`'${method}' HTTP method is unsupported.`) + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) + } + + // 3. Normalize method. + method = normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method } - // 3. Normalize method. - method = normalizeMethodRecord[method] ?? normalizeMethod(method) + if (!patchMethodWarning && request.method === 'patch') { + process.emitWarning('Using `patch` is highly likely to result in a `405 Method Not Allowed`. `PATCH` is much more likely to succeed.', { + code: 'UNDICI-FETCH-patch' + }) - // 4. Set request’s method to method. - request.method = method + patchMethodWarning = true + } } // 26. If init["signal"] exists, then set signal to it. @@ -371,6 +388,18 @@ class Request { const abort = function () { const ac = acRef.deref() if (ac !== undefined) { + // Currently, there is a problem with FinalizationRegistry. + // https://github.com/nodejs/node/issues/49344 + // https://github.com/nodejs/node/issues/47748 + // In the case of abort, the first step is to unregister from it. + // If the controller can refer to it, it is still registered. + // It will be removed in the future. + requestFinalizer.unregister(abort) + + // Unsubscribe a listener. + // FinalizationRegistry will no longer be called, so this must be done. + this.removeEventListener('abort', abort) + ac.abort(this.reason) } } @@ -388,7 +417,11 @@ class Request { } catch {} util.addAbortListener(signal, abort) - requestFinalizer.register(ac, { signal, abort }) + // The third argument must be a registry key to be unregistered. + // Without it, you cannot unregister. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry + // abort is used as the unregister key. (because it is unique) + requestFinalizer.register(ac, { signal, abort }, abort) } } @@ -471,7 +504,7 @@ class Request { // 3, If Content-Type is non-null and this’s headers’s header list does // not contain `Content-Type`, then append `Content-Type`/Content-Type to // this’s headers. - if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + if (contentType && !this[kHeaders][kHeadersList].contains('content-type', true)) { this[kHeaders].append('content-type', contentType) } } diff --git a/deps/undici/src/lib/fetch/response.js b/deps/undici/src/lib/fetch/response.js index f8894b692ebc58..5be1f438a12fae 100644 --- a/deps/undici/src/lib/fetch/response.js +++ b/deps/undici/src/lib/fetch/response.js @@ -524,7 +524,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V) { return webidl.converters.Blob(V, { strict: false }) } - if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + if (ArrayBuffer.isView(V) || types.isArrayBuffer(V)) { return webidl.converters.BufferSource(V) } diff --git a/deps/undici/src/lib/fetch/util.js b/deps/undici/src/lib/fetch/util.js index 32983720cc0581..03f0e004037ec4 100644 --- a/deps/undici/src/lib/fetch/util.js +++ b/deps/undici/src/lib/fetch/util.js @@ -584,7 +584,7 @@ function bytesMatch (bytes, metadataList) { // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-(?[A-Za-z0-9+/]+={0,2}(?=\s|$))( +[!-~]*)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -681,7 +681,7 @@ function isCancelled (fetchParams) { fetchParams.controller.state === 'terminated' } -const normalizeMethodRecord = { +const normalizeMethodRecordBase = { delete: 'DELETE', DELETE: 'DELETE', get: 'GET', @@ -696,7 +696,14 @@ const normalizeMethodRecord = { PUT: 'PUT' } +const normalizeMethodRecord = { + ...normalizeMethodRecordBase, + patch: 'patch', + PATCH: 'PATCH' +} + // Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecordBase, null) Object.setPrototypeOf(normalizeMethodRecord, null) /** @@ -704,7 +711,7 @@ Object.setPrototypeOf(normalizeMethodRecord, null) * @param {string} method */ function normalizeMethod (method) { - return normalizeMethodRecord[method.toLowerCase()] ?? method + return normalizeMethodRecordBase[method.toLowerCase()] ?? method } // https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string @@ -1213,5 +1220,6 @@ module.exports = { readAllBytes, normalizeMethodRecord, simpleRangeHeaderValue, - buildContentRange + buildContentRange, + parseMetadata } diff --git a/deps/undici/src/lib/fetch/webidl.js b/deps/undici/src/lib/fetch/webidl.js index ca1019221d9ab3..e43cdf6aecc67d 100644 --- a/deps/undici/src/lib/fetch/webidl.js +++ b/deps/undici/src/lib/fetch/webidl.js @@ -614,15 +614,15 @@ webidl.converters.DataView = function (V, opts = {}) { // https://webidl.spec.whatwg.org/#BufferSource webidl.converters.BufferSource = function (V, opts = {}) { if (types.isAnyArrayBuffer(V)) { - return webidl.converters.ArrayBuffer(V, opts) + return webidl.converters.ArrayBuffer(V, { ...opts, allowShared: false }) } if (types.isTypedArray(V)) { - return webidl.converters.TypedArray(V, V.constructor) + return webidl.converters.TypedArray(V, V.constructor, { ...opts, allowShared: false }) } if (types.isDataView(V)) { - return webidl.converters.DataView(V, opts) + return webidl.converters.DataView(V, opts, { ...opts, allowShared: false }) } throw new TypeError(`Could not convert ${V} to a BufferSource.`) diff --git a/deps/undici/src/lib/proxy-agent.js b/deps/undici/src/lib/proxy-agent.js index e3c0f6f3d46d90..c0b5a24bfd19fd 100644 --- a/deps/undici/src/lib/proxy-agent.js +++ b/deps/undici/src/lib/proxy-agent.js @@ -66,7 +66,7 @@ class ProxyAgent extends DispatcherBase { this[kProxyHeaders] = opts.headers || {} const resolvedUrl = new URL(opts.uri) - const { origin, port, host, username, password } = resolvedUrl + const { origin, port, username, password } = resolvedUrl if (opts.auth && opts.token) { throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') @@ -97,7 +97,7 @@ class ProxyAgent extends DispatcherBase { signal: opts.signal, headers: { ...this[kProxyHeaders], - host + host: requestedHost } }) if (statusCode !== 200) { diff --git a/deps/undici/src/lib/websocket/connection.js b/deps/undici/src/lib/websocket/connection.js index 4bc60b3b90be6b..d8a4f04f63715b 100644 --- a/deps/undici/src/lib/websocket/connection.js +++ b/deps/undici/src/lib/websocket/connection.js @@ -1,6 +1,5 @@ 'use strict' -const diagnosticsChannel = require('diagnostics_channel') const { uid, states } = require('./constants') const { kReadyState, @@ -9,6 +8,7 @@ const { kReceivedClose } = require('./symbols') const { fireEvent, failWebsocketConnection } = require('./util') +const { channels } = require('../core/diagnostics') const { CloseEvent } = require('./events') const { makeRequest } = require('../fetch/request') const { fetching } = require('../fetch/index') @@ -16,11 +16,6 @@ const { Headers } = require('../fetch/headers') const { getGlobalDispatcher } = require('../global') const { kHeadersList } = require('../core/symbols') -const channels = {} -channels.open = diagnosticsChannel.channel('undici:websocket:open') -channels.close = diagnosticsChannel.channel('undici:websocket:close') -channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') - /** @type {import('crypto')} */ let crypto try { diff --git a/deps/undici/src/lib/websocket/receiver.js b/deps/undici/src/lib/websocket/receiver.js index bdd2031b418af1..512ef42a06ba67 100644 --- a/deps/undici/src/lib/websocket/receiver.js +++ b/deps/undici/src/lib/websocket/receiver.js @@ -1,9 +1,9 @@ 'use strict' const { Writable } = require('stream') -const diagnosticsChannel = require('diagnostics_channel') const { parserStates, opcodes, states, emptyBuffer } = require('./constants') const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols') +const { channels } = require('../core/diagnostics') const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util') const { WebsocketFrameSend } = require('./frame') @@ -12,10 +12,6 @@ const { WebsocketFrameSend } = require('./frame') // Copyright (c) 2013 Arnout Kazemier and contributors // Copyright (c) 2016 Luigi Pinca and contributors -const channels = {} -channels.ping = diagnosticsChannel.channel('undici:websocket:ping') -channels.pong = diagnosticsChannel.channel('undici:websocket:pong') - class ByteParser extends Writable { #buffers = [] #byteOffset = 0 diff --git a/deps/undici/src/lib/websocket/websocket.js b/deps/undici/src/lib/websocket/websocket.js index 1eac67b7828144..77a12396dcdb05 100644 --- a/deps/undici/src/lib/websocket/websocket.js +++ b/deps/undici/src/lib/websocket/websocket.js @@ -627,7 +627,7 @@ webidl.converters.WebSocketSendData = function (V) { return webidl.converters.Blob(V, { strict: false }) } - if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + if (ArrayBuffer.isView(V) || types.isArrayBuffer(V)) { return webidl.converters.BufferSource(V) } } diff --git a/deps/undici/src/package-lock.json b/deps/undici/src/package-lock.json index 1f94d7f37f4afa..ea184eba995570 100644 --- a/deps/undici/src/package-lock.json +++ b/deps/undici/src/package-lock.json @@ -1,21 +1,23 @@ { "name": "undici", - "version": "6.2.1", + "version": "6.3.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "undici", - "version": "6.2.1", + "version": "6.3.0", "license": "MIT", "dependencies": { "@fastify/busboy": "^2.0.0" }, "devDependencies": { + "@matteo.collina/tspl": "^0.1.1", "@sinonjs/fake-timers": "^11.1.0", "@types/node": "^18.0.3", "abort-controller": "^3.0.0", "atomic-sleep": "^1.0.0", + "borp": "^0.5.0", "chai": "^4.3.4", "chai-as-promised": "^7.1.1", "chai-iterator": "^3.0.2", @@ -45,7 +47,7 @@ "standard": "^17.0.0", "table": "^6.8.0", "tap": "^16.1.0", - "tsd": "^0.29.0", + "tsd": "^0.30.1", "typescript": "^5.0.2", "wait-on": "^7.0.1", "ws": "^8.11.0" @@ -76,6 +78,17 @@ "node": ">=6.0.0" } }, + "node_modules/@asamuzakjp/dom-selector": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-2.0.2.tgz", + "integrity": "sha512-x1KXOatwofR6ZAYzXRBL5wrdV0vwNxlTCK9NCuLqAzQYARqGcvFwiJA6A1ERuh+dgeA4Dxm3JBYictIes+SqUQ==", + "dev": true, + "dependencies": { + "bidi-js": "^1.0.3", + "css-tree": "^2.3.1", + "is-potential-custom-element-name": "^1.0.1" + } + }, "node_modules/@assemblyscript/loader": { "version": "0.19.23", "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.19.23.tgz", @@ -403,9 +416,9 @@ } }, "node_modules/@babel/helpers": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.7.tgz", - "integrity": "sha512-6AMnjCoC8wjqBzDHkuqpa7jAKwvMo4dC+lr/TFBz+ucfulO1XMpDnwWPGBNwClOKZ8h6xn5N81W/R5OrcKtCbQ==", + "version": "7.23.8", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.8.tgz", + "integrity": "sha512-KDqYz4PiOWvDFrdHLPhKtCThtIcKVy6avWD2oG4GEvyQ+XDZwHD4YQd+H2vNMnq2rkdxsDkU82T+Vk8U/WXHRQ==", "dev": true, "dependencies": { "@babel/template": "^7.22.15", @@ -691,9 +704,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.7.tgz", - "integrity": "sha512-w06OXVOFso7LcbzMiDGt+3X7Rh7Ho8MmgPoWU3rarH+8upf+wSU/grlGbWzQyr3DkdN6ZeuMFjpdwW0Q+HxobA==", + "version": "7.23.8", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.8.tgz", + "integrity": "sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw==", "dev": true, "dependencies": { "regenerator-runtime": "^0.14.0" @@ -833,6 +846,16 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/@eslint/eslintrc/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -877,6 +900,18 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@eslint/eslintrc/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -927,19 +962,29 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.13", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz", - "integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==", + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^2.0.1", - "debug": "^4.1.1", + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", "minimatch": "^3.0.5" }, "engines": { "node": ">=10.10.0" } }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/@humanwhocodes/config-array/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -957,6 +1002,18 @@ } } }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@humanwhocodes/config-array/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -977,11 +1034,55 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz", - "integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", + "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", "dev": true }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -998,6 +1099,67 @@ "node": ">=8" } }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", @@ -1204,6 +1366,48 @@ } } }, + "node_modules/@jest/reporters/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@jest/reporters/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@jest/reporters/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -1342,9 +1546,9 @@ "dev": true }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.20", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz", - "integrity": "sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==", + "version": "0.3.21", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.21.tgz", + "integrity": "sha512-SRfKmRe1KvYnxjEMtxEr+J4HIeMX5YBg/qhRHpxEIGjhX1rshcHlnFUE9K0GazhVKWM7B+nARSkV8LuvJdJ5/g==", "dev": true, "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -1357,6 +1561,12 @@ "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==", "dev": true }, + "node_modules/@matteo.collina/tspl": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@matteo.collina/tspl/-/tspl-0.1.1.tgz", + "integrity": "sha512-jJFj8RzdExJGmZOVbyMViYkEgpyxqj/2InjRqnmFvYss+cXQEg47dTjADvL+ZGFRsJf6w5mtI5F+cNUBq1MVvA==", + "dev": true + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -1392,6 +1602,16 @@ "node": ">= 8" } }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, "node_modules/@sideway/address": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", @@ -1485,9 +1705,9 @@ } }, "node_modules/@tsd/typescript": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/@tsd/typescript/-/typescript-5.2.2.tgz", - "integrity": "sha512-VtjHPAKJqLJoHHKBDNofzvQB2+ZVxjXU/Gw6INAS9aINLQYVsxfzrQ2s84huCeYWZRTtrr7R0J7XgpZHjNwBCw==", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/@tsd/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-CQlfzol0ldaU+ftWuG52vH29uRoKboLinLy84wS8TQOu+m+tWoaUfk4svL4ij2V8M5284KymJBlHUusKj6k34w==", "dev": true, "engines": { "node": ">=14.17" @@ -1626,9 +1846,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "18.19.4", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.4.tgz", - "integrity": "sha512-xNzlUhzoHotIsnFoXmJB+yWmBvFZgKCI9TtPIEdYIMM1KWfwuY8zh7wvc1u1OAXlC7dlf6mZVx/s+Y5KfFz19A==", + "version": "18.19.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.6.tgz", + "integrity": "sha512-X36s5CXMrrJOs2lQCdDF68apW4Rfx9ixYMawlepwmE4Anezv/AV2LSpKD1Ub8DAc+urp5bk0BGZ6NtmBitfnsg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -1791,6 +2011,26 @@ "string-width": "^4.1.0" } }, + "node_modules/ansi-align/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/ansi-align/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/ansi-colors": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", @@ -2344,6 +2584,15 @@ "integrity": "sha512-Y7OBvWn+JnW45JWHLY6ybYub2k9cXCMrtCyO1Hds2s6eqClqWhPnOQpgXUPjAiMHj+A8TEPIQQ1dYENnJoBOHQ==", "dev": true }, + "node_modules/bidi-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz", + "integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==", + "dev": true, + "dependencies": { + "require-from-string": "^2.0.2" + } + }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -2362,6 +2611,21 @@ "node": ">=10" } }, + "node_modules/borp": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/borp/-/borp-0.5.0.tgz", + "integrity": "sha512-7noQDTw5NGme7lVSJu1H+Sy0S48PF+H1Eat9F6ZM0d10CfnCqcghMI/m+o8om1axuJOTg1YRVIl0kFcktWpPKQ==", + "dev": true, + "dependencies": { + "c8": "^8.0.1", + "execa": "^8.0.1", + "find-up": "^7.0.0", + "glob": "^10.3.10" + }, + "bin": { + "borp": "borp.js" + } + }, "node_modules/boxen": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/boxen/-/boxen-4.2.0.tgz", @@ -2397,6 +2661,26 @@ "node": ">=8" } }, + "node_modules/boxen/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/boxen/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/boxen/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -2419,13 +2703,12 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "balanced-match": "^1.0.0" } }, "node_modules/braces": { @@ -2502,23 +2785,104 @@ "semver": "^7.0.0" } }, - "node_modules/cacheable-request": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", - "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "node_modules/c8": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/c8/-/c8-8.0.1.tgz", + "integrity": "sha512-EINpopxZNH1mETuI0DzRA4MZpAUH+IFiRhnmFD3vFr3vdrgxqi3VfE3KL0AIL+zDq8rC9bZqwM/VDmmoe04y7w==", "dev": true, "dependencies": { - "clone-response": "^1.0.2", - "get-stream": "^5.1.0", - "http-cache-semantics": "^4.0.0", - "keyv": "^3.0.0", - "lowercase-keys": "^2.0.0", - "normalize-url": "^4.1.0", - "responselike": "^1.0.2" - }, - "engines": { - "node": ">=8" - } + "@bcoe/v8-coverage": "^0.2.3", + "@istanbuljs/schema": "^0.1.3", + "find-up": "^5.0.0", + "foreground-child": "^2.0.0", + "istanbul-lib-coverage": "^3.2.0", + "istanbul-lib-report": "^3.0.1", + "istanbul-reports": "^3.1.6", + "rimraf": "^3.0.2", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.0.0", + "yargs": "^17.7.2", + "yargs-parser": "^21.1.1" + }, + "bin": { + "c8": "bin/c8.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/c8/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/c8/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/c8/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/c8/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "dev": true, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "engines": { + "node": ">=8" + } }, "node_modules/cacheable-request/node_modules/get-stream": { "version": "5.2.0", @@ -2596,6 +2960,12 @@ "semver": "bin/semver" } }, + "node_modules/caching-transform/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/caching-transform/node_modules/write-file-atomic": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", @@ -2657,9 +3027,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001574", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001574.tgz", - "integrity": "sha512-BtYEK4r/iHt/txm81KBudCUcTy7t+s9emrIaHqjYurQ10x71zJ5VQ9x1dYPcz/b+pKSp4y/v1xSI67A+LzpNyg==", + "version": "1.0.30001576", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001576.tgz", + "integrity": "sha512-ff5BdakGe2P3SQsMsiqmt1Lc8221NR1VzHj5jXN5vBny9A6fpze94HiVV/n7XRosOlsShJcvMv5mdnpjOGCEgg==", "dev": true, "funding": [ { @@ -2677,9 +3047,9 @@ ] }, "node_modules/chai": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.0.tgz", - "integrity": "sha512-x9cHNq1uvkCdU+5xTkNh5WtgD4e4yDFCsp9jVc7N7qVeKeftv3gO/ZrviX5d+3ZfxdYnZXZYujjRInu1RogU6A==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz", + "integrity": "sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==", "dev": true, "dependencies": { "assertion-error": "^1.1.0", @@ -2846,14 +3216,54 @@ } }, "node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "dev": true, "dependencies": { "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", + "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/clone-response": { @@ -3030,6 +3440,12 @@ "node": ">=8" } }, + "node_modules/configstore/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/configstore/node_modules/write-file-atomic": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", @@ -3160,6 +3576,19 @@ "node": ">=8" } }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "dev": true, + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, "node_modules/cssstyle": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz", @@ -3581,6 +4010,12 @@ "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", "dev": true }, + "node_modules/docsify-cli/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "node_modules/docsify-cli/node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", @@ -3590,6 +4025,19 @@ "node": ">=0.8.0" } }, + "node_modules/docsify-cli/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/docsify-cli/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -3599,6 +4047,68 @@ "node": ">=4" } }, + "node_modules/docsify-cli/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/docsify-cli/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/docsify-cli/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/docsify-cli/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/docsify-cli/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/docsify-cli/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -3764,6 +4274,12 @@ "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==", "dev": true }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -3771,9 +4287,9 @@ "dev": true }, "node_modules/electron-to-chromium": { - "version": "1.4.623", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.623.tgz", - "integrity": "sha512-lKoz10iCYlP1WtRYdh5MvocQPWVRoI7ysp6qf18bmeBgR8abE6+I2CsfyNKztRDZvhdWc+krKT6wS7Neg8sw3A==", + "version": "1.4.630", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.630.tgz", + "integrity": "sha512-osHqhtjojpCsACVnuD11xO5g9xaCyw7Qqn/C2KParkMv42i8jrJJgx3g7mkHfpxwhy9MnOJr8+pKOdZ7qzgizg==", "dev": true }, "node_modules/emittery": { @@ -3789,9 +4305,9 @@ } }, "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true }, "node_modules/encodeurl": { @@ -4155,6 +4671,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint-formatter-pretty/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/eslint-formatter-pretty/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/eslint-import-resolver-node": { "version": "0.3.9", "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", @@ -4287,6 +4823,16 @@ "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" } }, + "node_modules/eslint-plugin-import/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/eslint-plugin-import/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -4308,10 +4854,22 @@ "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-import/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "node_modules/eslint-plugin-import/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint-plugin-import/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, "node_modules/eslint-plugin-import/node_modules/semver": { @@ -4348,6 +4906,28 @@ "eslint": ">=7.0.0" } }, + "node_modules/eslint-plugin-n/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint-plugin-n/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/eslint-plugin-promise": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", @@ -4390,6 +4970,16 @@ "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" } }, + "node_modules/eslint-plugin-react/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/eslint-plugin-react/node_modules/doctrine": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", @@ -4411,6 +5001,18 @@ "node": ">=4.0" } }, + "node_modules/eslint-plugin-react/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/eslint-plugin-react/node_modules/resolve": { "version": "2.0.0-next.5", "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", @@ -4513,6 +5115,16 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/eslint/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -4613,6 +5225,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/eslint/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -4651,6 +5275,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/eslint/node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -4799,23 +5432,23 @@ "dev": true }, "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", "dev": true, "dependencies": { "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" }, "engines": { - "node": ">=10" + "node": ">=16.17" }, "funding": { "url": "https://github.com/sindresorhus/execa?sponsor=1" @@ -5075,16 +5708,20 @@ } }, "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-7.0.0.tgz", + "integrity": "sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==", "dev": true, "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" + "locate-path": "^7.2.0", + "path-exists": "^5.0.0", + "unicorn-magic": "^0.1.0" }, "engines": { - "node": ">=8" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/findit": { @@ -5116,21 +5753,6 @@ "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/flat-cache/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/flatted": { "version": "3.2.9", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", @@ -5138,9 +5760,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", + "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", "dev": true, "funding": [ { @@ -5167,51 +5789,22 @@ } }, "node_modules/foreground-child": { - "version": "1.5.6", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", - "integrity": "sha512-3TOY+4TKV0Ml83PXJQY+JFQaHNV38lzQDIzzXYg1kWdBLenGgoZhAs0CKgzI31vi2pWEpQMq/Yi4bpKwCPkw7g==", - "dev": true, - "dependencies": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - } - }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", - "integrity": "sha512-yAXz/pA1tD8Gtg2S98Ekf/sewp3Lcp3YoFKJ4Hkp5h5yLWnKVTDU0kwjKJ8NDCYcfTLfyGkzTikst+jWypT1iA==", - "dev": true, - "dependencies": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - } - }, - "node_modules/foreground-child/node_modules/lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "node_modules/foreground-child/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, "dependencies": { - "isexe": "^2.0.0" + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" }, - "bin": { - "which": "bin/which" + "engines": { + "node": ">=8.0.0" } }, - "node_modules/foreground-child/node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, "node_modules/form-data": { @@ -5424,12 +6017,12 @@ } }, "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", "dev": true, "engines": { - "node": ">=10" + "node": ">=16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -5452,20 +6045,22 @@ } }, "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "version": "10.3.10", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", + "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", "dev": true, "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.5", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" }, "engines": { - "node": "*" + "node": ">=16 || 14 >=14.17" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -5483,6 +6078,22 @@ "node": ">= 6" } }, + "node_modules/glob/node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/global-dirs": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-2.1.0.tgz", @@ -5926,12 +6537,12 @@ "dev": true }, "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", "dev": true, "engines": { - "node": ">=10.17.0" + "node": ">=16.17.0" } }, "node_modules/husky": { @@ -6447,12 +7058,12 @@ } }, "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", "dev": true, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -6580,9 +7191,9 @@ "dev": true }, "node_modules/isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", "dev": true }, "node_modules/isexe": { @@ -6645,21 +7256,6 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/istanbul-lib-processinfo/node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -6774,15 +7370,21 @@ } }, "node_modules/jackspeak": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz", - "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", + "integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==", "dev": true, "dependencies": { - "cliui": "^7.0.4" + "@isaacs/cliui": "^8.0.2" }, "engines": { - "node": ">=8" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" } }, "node_modules/jest": { @@ -6825,27 +7427,134 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-circus": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", - "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "node_modules/jest-changed-files/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "co": "^4.6.0", - "dedent": "^1.0.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^29.7.0", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-changed-files/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/jest-changed-files/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/jest-changed-files/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/jest-changed-files/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", "p-limit": "^3.1.0", "pretty-format": "^29.7.0", "pure-rand": "^6.0.0", @@ -6934,6 +7643,48 @@ } } }, + "node_modules/jest-config/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jest-config/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-config/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/jest-diff": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", @@ -7214,6 +7965,48 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-runtime/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jest-runtime/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-runtime/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/jest-snapshot": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", @@ -7358,11 +8151,12 @@ } }, "node_modules/jsdom": { - "version": "23.1.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-23.1.0.tgz", - "integrity": "sha512-wRscu8dBFxi7O65Cvi0jFRDv0Qa7XEHPix8Qg/vlXHLAMQsRWV1EDeQHBermzXf4Dt7JtFgBLbva3iTcBZDXEQ==", + "version": "23.2.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-23.2.0.tgz", + "integrity": "sha512-L88oL7D/8ufIES+Zjz7v0aes+oBMh2Xnh3ygWvL0OaICOomKEPKuPnIfBJekiXr+BHbbMjrWn/xqrDQuxFTeyA==", "dev": true, "dependencies": { + "@asamuzakjp/dom-selector": "^2.0.1", "cssstyle": "^4.0.1", "data-urls": "^5.0.0", "decimal.js": "^10.4.3", @@ -7371,7 +8165,6 @@ "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.2", "is-potential-custom-element-name": "^1.0.1", - "nwsapi": "^2.2.7", "parse5": "^7.1.2", "rrweb-cssom": "^0.6.0", "saxes": "^6.0.0", @@ -7698,9 +8491,9 @@ } }, "node_modules/just-extend": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", - "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz", + "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==", "dev": true }, "node_modules/keyv": { @@ -7800,6 +8593,30 @@ "node": ">=0.3.1" } }, + "node_modules/libtap/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/libtap/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/libtap/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -7899,15 +8716,18 @@ } }, "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", "dev": true, "dependencies": { - "p-locate": "^4.1.0" + "p-locate": "^6.0.0" }, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/lodash": { @@ -8052,6 +8872,12 @@ "node": ">= 8.16.2" } }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", + "dev": true + }, "node_modules/medium-zoom": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/medium-zoom/-/medium-zoom-1.1.0.tgz", @@ -8096,6 +8922,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/meow/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/merge-descriptors": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", @@ -8176,12 +9011,15 @@ } }, "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", "dev": true, "engines": { - "node": ">=6" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/mimic-response": { @@ -8203,15 +9041,18 @@ } }, "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", "dev": true, "dependencies": { - "brace-expansion": "^1.1.7" + "brace-expansion": "^2.0.1" }, "engines": { - "node": "*" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/minimist": { @@ -8238,23 +9079,14 @@ } }, "node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", + "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" } }, - "node_modules/minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/mitata": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/mitata/-/mitata-0.1.6.tgz", @@ -8328,6 +9160,17 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, + "node_modules/mocha/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, "node_modules/mocha/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -8351,6 +9194,12 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/mocha/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "node_modules/mocha/node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -8387,6 +9236,16 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/mocha/node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/mocha/node_modules/glob/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -8438,15 +9297,6 @@ "node": ">=10" } }, - "node_modules/mocha/node_modules/minimatch/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/mocha/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -8468,6 +9318,46 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/mocha/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/mocha/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mocha/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/mocha/node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -8486,6 +9376,15 @@ "node": ">=10" } }, + "node_modules/mocha/node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/module-not-found-error": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz", @@ -8532,43 +9431,16 @@ "dev": true }, "node_modules/nise": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.5.tgz", - "integrity": "sha512-VJuPIfUFaXNRzETTQEEItTOP8Y171ijr+JLq42wHes3DiryR8vT+1TXQW/Rx8JNUhyYYWyIvjXTU6dOhJcs9Nw==", - "dev": true, - "dependencies": { - "@sinonjs/commons": "^2.0.0", - "@sinonjs/fake-timers": "^10.0.2", - "@sinonjs/text-encoding": "^0.7.1", - "just-extend": "^4.0.2", - "path-to-regexp": "^1.7.0" - } - }, - "node_modules/nise/node_modules/@sinonjs/commons": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-2.0.0.tgz", - "integrity": "sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==", - "dev": true, - "dependencies": { - "type-detect": "4.0.8" - } - }, - "node_modules/nise/node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", - "dev": true, - "dependencies": { - "@sinonjs/commons": "^3.0.0" - } - }, - "node_modules/nise/node_modules/@sinonjs/fake-timers/node_modules/@sinonjs/commons": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz", - "integrity": "sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==", + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.7.tgz", + "integrity": "sha512-wWtNUhkT7k58uvWTB/Gy26eA/EJKtPZFVAhEilN5UYVmmGRYOURbejRUyKm0Uu9XVEW7K5nBOZfR8VMB4QR2RQ==", "dev": true, "dependencies": { - "type-detect": "4.0.8" + "@sinonjs/commons": "^3.0.0", + "@sinonjs/fake-timers": "^11.2.2", + "@sinonjs/text-encoding": "^0.7.2", + "just-extend": "^6.2.0", + "path-to-regexp": "^6.2.1" } }, "node_modules/node-fetch": { @@ -8680,22 +9552,31 @@ } }, "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.2.0.tgz", + "integrity": "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg==", "dev": true, "dependencies": { - "path-key": "^3.0.0" + "path-key": "^4.0.0" }, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/nwsapi": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz", - "integrity": "sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ==", - "dev": true + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/nyc": { "version": "14.1.1", @@ -8757,6 +9638,16 @@ "node": ">=4" } }, + "node_modules/nyc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/nyc/node_modules/cliui": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", @@ -8805,6 +9696,16 @@ "node": ">=6" } }, + "node_modules/nyc/node_modules/cross-spawn": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", + "integrity": "sha512-yAXz/pA1tD8Gtg2S98Ekf/sewp3Lcp3YoFKJ4Hkp5h5yLWnKVTDU0kwjKJ8NDCYcfTLfyGkzTikst+jWypT1iA==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "which": "^1.2.9" + } + }, "node_modules/nyc/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -8840,6 +9741,36 @@ "node": ">=6" } }, + "node_modules/nyc/node_modules/foreground-child": { + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", + "integrity": "sha512-3TOY+4TKV0Ml83PXJQY+JFQaHNV38lzQDIzzXYg1kWdBLenGgoZhAs0CKgzI31vi2pWEpQMq/Yi4bpKwCPkw7g==", + "dev": true, + "dependencies": { + "cross-spawn": "^4", + "signal-exit": "^3.0.0" + } + }, + "node_modules/nyc/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/nyc/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -8970,6 +9901,16 @@ "node": ">=6" } }, + "node_modules/nyc/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, "node_modules/nyc/node_modules/make-dir": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", @@ -8992,6 +9933,18 @@ "semver": "bin/semver" } }, + "node_modules/nyc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/nyc/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -9116,6 +10069,18 @@ "node": ">=6" } }, + "node_modules/nyc/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/nyc/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -9125,6 +10090,12 @@ "semver": "bin/semver.js" } }, + "node_modules/nyc/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/nyc/node_modules/string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -9187,6 +10158,18 @@ "node": ">=6" } }, + "node_modules/nyc/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, "node_modules/nyc/node_modules/wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", @@ -9207,6 +10190,12 @@ "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", "dev": true }, + "node_modules/nyc/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, "node_modules/nyc/node_modules/yargs": { "version": "13.3.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", @@ -9391,15 +10380,15 @@ } }, "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", "dev": true, "dependencies": { - "mimic-fn": "^2.1.0" + "mimic-fn": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -9540,39 +10529,54 @@ } }, "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", + "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", "dev": true, "dependencies": { - "p-limit": "^2.2.0" + "p-limit": "^4.0.0" }, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-locate/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", + "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", "dev": true, "dependencies": { - "p-try": "^2.0.0" + "yocto-queue": "^1.0.0" }, "engines": { - "node": ">=6" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-map": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "node_modules/p-locate/node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" }, "engines": { "node": ">=8" @@ -9705,12 +10709,12 @@ } }, "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", + "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", "dev": true, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" } }, "node_modules/path-is-absolute": { @@ -9737,15 +10741,37 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "node_modules/path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", + "node_modules/path-scurry": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", + "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", "dev": true, "dependencies": { - "isarray": "0.0.1" + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.1.0.tgz", + "integrity": "sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==", + "dev": true, + "engines": { + "node": "14 || >=16.14" } }, + "node_modules/path-to-regexp": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.1.tgz", + "integrity": "sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==", + "dev": true + }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -9898,6 +10924,67 @@ "node": ">=8" } }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/plur": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/plur/-/plur-4.0.0.tgz", @@ -10314,6 +11401,67 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/read-pkg-up/node_modules/type-fest": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", @@ -10607,15 +11755,60 @@ } }, "node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dev": true, "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" } }, "node_modules/rrweb-cssom": { @@ -10674,12 +11867,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/safe-array-concat/node_modules/isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -10701,15 +11888,18 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.2.tgz", + "integrity": "sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.5", + "get-intrinsic": "^1.2.2", "is-regex": "^1.1.4" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -10960,10 +12150,16 @@ } }, "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } }, "node_modules/sinon": { "version": "17.0.1", @@ -11074,50 +12270,149 @@ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/spawn-command": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2.tgz", + "integrity": "sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==", + "dev": true + }, + "node_modules/spawn-sync": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/spawn-sync/-/spawn-sync-1.0.15.tgz", + "integrity": "sha512-9DWBgrgYZzNghseho0JOuh+5fg9u6QWhAWa51QC7+U5rCheZ/j1DrEZnyE0RBBRqZ9uEXGPgSSM0nky6burpVw==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "concat-stream": "^1.4.7", + "os-shim": "^0.1.2" + } + }, + "node_modules/spawn-wrap": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", + "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "dev": true, + "dependencies": { + "foreground-child": "^1.5.6", + "mkdirp": "^0.5.0", + "os-homedir": "^1.0.1", + "rimraf": "^2.6.2", + "signal-exit": "^3.0.2", + "which": "^1.3.0" + } + }, + "node_modules/spawn-wrap/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/spawn-wrap/node_modules/cross-spawn": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", + "integrity": "sha512-yAXz/pA1tD8Gtg2S98Ekf/sewp3Lcp3YoFKJ4Hkp5h5yLWnKVTDU0kwjKJ8NDCYcfTLfyGkzTikst+jWypT1iA==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "which": "^1.2.9" + } + }, + "node_modules/spawn-wrap/node_modules/foreground-child": { + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", + "integrity": "sha512-3TOY+4TKV0Ml83PXJQY+JFQaHNV38lzQDIzzXYg1kWdBLenGgoZhAs0CKgzI31vi2pWEpQMq/Yi4bpKwCPkw7g==", + "dev": true, + "dependencies": { + "cross-spawn": "^4", + "signal-exit": "^3.0.0" + } + }, + "node_modules/spawn-wrap/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/source-map-support": { - "version": "0.5.13", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", - "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "node_modules/spawn-wrap/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", "dev": true, "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" } }, - "node_modules/spawn-command": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2.tgz", - "integrity": "sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==", - "dev": true - }, - "node_modules/spawn-sync": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/spawn-sync/-/spawn-sync-1.0.15.tgz", - "integrity": "sha512-9DWBgrgYZzNghseho0JOuh+5fg9u6QWhAWa51QC7+U5rCheZ/j1DrEZnyE0RBBRqZ9uEXGPgSSM0nky6burpVw==", + "node_modules/spawn-wrap/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, - "hasInstallScript": true, "dependencies": { - "concat-stream": "^1.4.7", - "os-shim": "^0.1.2" + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" } }, - "node_modules/spawn-wrap": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", - "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "node_modules/spawn-wrap/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", "dev": true, "dependencies": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", - "signal-exit": "^3.0.2", - "which": "^1.3.0" + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" } }, + "node_modules/spawn-wrap/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/spawn-wrap/node_modules/which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -11130,6 +12425,12 @@ "which": "bin/which" } }, + "node_modules/spawn-wrap/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, "node_modules/spdx-correct": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", @@ -11314,6 +12615,24 @@ } }, "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", @@ -11327,6 +12646,39 @@ "node": ">=8" } }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/string.prototype.matchall": { "version": "4.0.10", "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz", @@ -11404,6 +12756,19 @@ "node": ">=8" } }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -11414,12 +12779,15 @@ } }, "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", "dev": true, "engines": { - "node": ">=6" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/strip-indent": { @@ -11536,12 +12904,32 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/table/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "node_modules/table/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, + "node_modules/table/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/tap": { "version": "16.3.10", "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.10.tgz", @@ -11634,6 +13022,16 @@ "node": ">= 8" } }, + "node_modules/tap-mocha-reporter/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/tap-mocha-reporter/node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -11669,6 +13067,38 @@ "node": ">=8" } }, + "node_modules/tap-mocha-reporter/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/tap-mocha-reporter/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/tap-mocha-reporter/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -11692,6 +13122,24 @@ "node": ">= 8" } }, + "node_modules/tap-parser/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tap-parser/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/tap-yaml": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz", @@ -12492,14 +13940,64 @@ } }, "node_modules/tap/node_modules/cliui": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", - "wrap-ansi": "^6.2.0" + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/tap/node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/tap/node_modules/cliui/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/tap/node_modules/cliui/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/tap/node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/tap/node_modules/code-excerpt": { @@ -12666,24 +14164,11 @@ "inBundle": true, "license": "MIT", "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tap/node_modules/foreground-child": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^3.0.2" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=8.0.0" + "node": ">=8" } }, "node_modules/tap/node_modules/fs.realpath": { @@ -12923,6 +14408,18 @@ "node": ">=8" } }, + "node_modules/tap/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tap/node_modules/istanbul-lib-hook": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", @@ -12950,6 +14447,18 @@ "node": ">=8" } }, + "node_modules/tap/node_modules/jackspeak": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz", + "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==", + "dev": true, + "dependencies": { + "cliui": "^7.0.4" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/tap/node_modules/js-tokens": { "version": "4.0.0", "dev": true, @@ -13870,6 +15379,64 @@ "node": ">=6" } }, + "node_modules/tap/node_modules/yargs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/tap/node_modules/yargs/node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/tap/node_modules/yargs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/tap/node_modules/yargs/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/tap/node_modules/yargs/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/tap/node_modules/yoga-layout-prebuilt": { "version": "1.10.0", "dev": true, @@ -13929,6 +15496,48 @@ "node": ">=8" } }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -14086,12 +15695,12 @@ } }, "node_modules/tsd": { - "version": "0.29.0", - "resolved": "https://registry.npmjs.org/tsd/-/tsd-0.29.0.tgz", - "integrity": "sha512-5B7jbTj+XLMg6rb9sXRBGwzv7h8KJlGOkTHxY63eWpZJiQ5vJbXEjL0u7JkIxwi5EsrRE1kRVUWmy6buK/ii8A==", + "version": "0.30.3", + "resolved": "https://registry.npmjs.org/tsd/-/tsd-0.30.3.tgz", + "integrity": "sha512-xoEp6JPqpT9Ti9wGX5qgy7URp0lrmxN7YkbsyphBzdc1SYiXvJYgRXSIVvSZz42+/Wd/R1kBOMbgGC6rtiKxqQ==", "dev": true, "dependencies": { - "@tsd/typescript": "~5.2.2", + "@tsd/typescript": "~5.3.3", "eslint-formatter-pretty": "^4.1.0", "globby": "^11.0.1", "jest-diff": "^29.0.3", @@ -14106,6 +15715,15 @@ "node": ">=14.16" } }, + "node_modules/tsd/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/tslib": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", @@ -14274,6 +15892,18 @@ "punycode": "^2.0.0" } }, + "node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/unique-string": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", @@ -14615,12 +16245,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/which-builtin-type/node_modules/isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true - }, "node_modules/which-collection": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", @@ -14673,6 +16297,26 @@ "node": ">=8" } }, + "node_modules/widest-line/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/widest-line/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -14689,6 +16333,24 @@ "dev": true }, "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", @@ -14705,6 +16367,65 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -14724,6 +16445,12 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, "node_modules/ws": { "version": "8.16.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", @@ -14887,12 +16614,12 @@ } }, "node_modules/yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, "engines": { - "node": ">=10" + "node": ">=12" } }, "node_modules/yargs-unparser": { @@ -14943,27 +16670,24 @@ "node": ">=8" } }, - "node_modules/yargs/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { - "node": ">=12" - } - }, - "node_modules/yargs/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "engines": { - "node": ">=12" + "node": ">=8" } }, "node_modules/yocto-queue": { diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index 197a968b392f15..43d11d2538903c 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "6.2.1", + "version": "6.3.0", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { @@ -75,16 +75,18 @@ "build:wasm": "node build/wasm.js --docker", "lint": "standard | snazzy", "lint:fix": "standard --fix | snazzy", - "test": "node scripts/generate-pem && npm run test:tap && npm run test:node-fetch && npm run test:fetch && npm run test:cookies && npm run test:wpt && npm run test:websocket && npm run test:jest && npm run test:typescript", - "test:cookies": "node scripts/verifyVersion 16 || tap test/cookie/*.js", - "test:node-fetch": "node scripts/verifyVersion.js 16 || mocha --exit test/node-fetch", - "test:fetch": "node scripts/verifyVersion.js 16 || (npm run build:node && tap --expose-gc test/fetch/*.js && tap test/webidl/*.js)", - "test:jest": "node scripts/verifyVersion.js 14 || jest", - "test:tap": "tap test/*.js test/diagnostics-channel/*.js", - "test:tdd": "tap test/*.js test/diagnostics-channel/*.js -w", - "test:typescript": "node scripts/verifyVersion.js 14 || tsd && tsc --skipLibCheck test/imports/undici-import.ts", - "test:websocket": "node scripts/verifyVersion.js 18 || tap test/websocket/*.js", - "test:wpt": "node scripts/verifyVersion 18 || (node test/wpt/start-fetch.mjs && node test/wpt/start-FileAPI.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs)", + "test": "node scripts/generate-pem && npm run test:tap && npm run test:node-fetch && npm run test:fetch && npm run test:cookies && npm run test:wpt && npm run test:websocket && npm run test:jest && npm run test:typescript && npm run test:node-test", + "test:cookies": "borp --coverage -p \"test/cookie/*.js\"", + "test:node-fetch": "mocha --exit test/node-fetch", + "test:fetch": "npm run build:node && borp --expose-gc --coverage -p \"test/fetch/*.js\" && borp --coverage -p \"test/webidl/*.js\"", + "test:jest": "jest", + "test:tap": "tap test/*.js", + "test:node-test": "borp --coverage -p \"test/node-test/**/*.js\"", + "test:tdd": "tap test/*.js --coverage -w", + "test:tdd:node-test": "borp -p \"test/node-test/**/*.js\" -w", + "test:typescript": "tsd && tsc --skipLibCheck test/imports/undici-import.ts", + "test:websocket": "borp --coverage -p \"test/websocket/*.js\"", + "test:wpt": "node test/wpt/start-fetch.mjs && node test/wpt/start-FileAPI.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs && node test/wpt/start-cacheStorage.mjs", "coverage": "nyc --reporter=text --reporter=html npm run test", "coverage:ci": "nyc --reporter=lcov npm run test", "bench": "PORT=3042 concurrently -k -s first npm:bench:server npm:bench:run", @@ -96,10 +98,12 @@ "fuzz": "jsfuzz test/fuzzing/fuzz.js corpus" }, "devDependencies": { + "@matteo.collina/tspl": "^0.1.1", "@sinonjs/fake-timers": "^11.1.0", "@types/node": "^18.0.3", "abort-controller": "^3.0.0", "atomic-sleep": "^1.0.0", + "borp": "^0.5.0", "chai": "^4.3.4", "chai-as-promised": "^7.1.1", "chai-iterator": "^3.0.2", @@ -129,7 +133,7 @@ "standard": "^17.0.0", "table": "^6.8.0", "tap": "^16.1.0", - "tsd": "^0.29.0", + "tsd": "^0.30.1", "typescript": "^5.0.2", "wait-on": "^7.0.1", "ws": "^8.11.0" diff --git a/deps/undici/src/types/balanced-pool.d.ts b/deps/undici/src/types/balanced-pool.d.ts index d1e9375875f391..7f930f4108c092 100644 --- a/deps/undici/src/types/balanced-pool.d.ts +++ b/deps/undici/src/types/balanced-pool.d.ts @@ -4,6 +4,8 @@ import { URL } from 'url' export default BalancedPool +type BalancedPoolConnectOptions = Omit; + declare class BalancedPool extends Dispatcher { constructor(url: string | string[] | URL | URL[], options?: Pool.Options); @@ -15,4 +17,13 @@ declare class BalancedPool extends Dispatcher { closed: boolean; /** `true` after `pool.destroyed()` has been called or `pool.close()` has been called and the pool shutdown has completed. */ destroyed: boolean; + + // Override dispatcher APIs. + override connect( + options: BalancedPoolConnectOptions + ): Promise; + override connect( + options: BalancedPoolConnectOptions, + callback: (err: Error | null, data: Dispatcher.ConnectData) => void + ): void; } diff --git a/deps/undici/src/types/client.d.ts b/deps/undici/src/types/client.d.ts index 56e78cc9765bf2..d0a5379f33cd70 100644 --- a/deps/undici/src/types/client.d.ts +++ b/deps/undici/src/types/client.d.ts @@ -3,6 +3,8 @@ import { TlsOptions } from 'tls' import Dispatcher from './dispatcher' import buildConnector from "./connector"; +type ClientConnectOptions = Omit; + /** * A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default. */ @@ -14,6 +16,15 @@ export class Client extends Dispatcher { closed: boolean; /** `true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed. */ destroyed: boolean; + + // Override dispatcher APIs. + override connect( + options: ClientConnectOptions + ): Promise; + override connect( + options: ClientConnectOptions, + callback: (err: Error | null, data: Dispatcher.ConnectData) => void + ): void; } export declare namespace Client { diff --git a/deps/undici/src/types/dispatcher.d.ts b/deps/undici/src/types/dispatcher.d.ts index 24bf1519a257b8..5988c8a9c7dd88 100644 --- a/deps/undici/src/types/dispatcher.d.ts +++ b/deps/undici/src/types/dispatcher.d.ts @@ -121,6 +121,7 @@ declare namespace Dispatcher { expectContinue?: boolean; } export interface ConnectOptions { + origin: string | URL; path: string; /** Default: `null` */ headers?: IncomingHttpHeaders | string[] | null; diff --git a/deps/undici/src/types/pool.d.ts b/deps/undici/src/types/pool.d.ts index 7747d48261ba84..bad5ba0308e978 100644 --- a/deps/undici/src/types/pool.d.ts +++ b/deps/undici/src/types/pool.d.ts @@ -5,6 +5,8 @@ import Dispatcher from "./dispatcher"; export default Pool +type PoolConnectOptions = Omit; + declare class Pool extends Dispatcher { constructor(url: string | URL, options?: Pool.Options) /** `true` after `pool.close()` has been called. */ @@ -13,6 +15,15 @@ declare class Pool extends Dispatcher { destroyed: boolean; /** Aggregate stats for a Pool. */ readonly stats: TPoolStats; + + // Override dispatcher APIs. + override connect( + options: PoolConnectOptions + ): Promise; + override connect( + options: PoolConnectOptions, + callback: (err: Error | null, data: Dispatcher.ConnectData) => void + ): void; } declare namespace Pool { diff --git a/deps/undici/src/types/proxy-agent.d.ts b/deps/undici/src/types/proxy-agent.d.ts index 96b26381ced5df..32e3acbdaad499 100644 --- a/deps/undici/src/types/proxy-agent.d.ts +++ b/deps/undici/src/types/proxy-agent.d.ts @@ -1,9 +1,7 @@ import Agent from './agent' import buildConnector from './connector'; -import Client from './client' import Dispatcher from './dispatcher' import { IncomingHttpHeaders } from './header' -import Pool from './pool' export default ProxyAgent diff --git a/deps/undici/undici.js b/deps/undici/undici.js index 5508ae0fed78c9..2fe8eb10fc36e0 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -1344,6 +1344,13 @@ var require_dataURL = __commonJS({ return byte >= 48 && byte <= 57 || byte >= 65 && byte <= 70 || byte >= 97 && byte <= 102; } __name(isHexCharByte, "isHexCharByte"); + function hexByteToNumber(byte) { + return ( + // 0-9 + byte >= 48 && byte <= 57 ? byte - 48 : (byte & 223) - 55 + ); + } + __name(hexByteToNumber, "hexByteToNumber"); function percentDecode(input) { const length = input.length; const output = new Uint8Array(length); @@ -1355,9 +1362,7 @@ var require_dataURL = __commonJS({ } else if (byte === 37 && !(isHexCharByte(input[i + 1]) && isHexCharByte(input[i + 2]))) { output[j++] = 37; } else { - const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]); - const bytePoint = Number.parseInt(nextTwoBytes, 16); - output[j++] = bytePoint; + output[j++] = hexByteToNumber(input[i + 1]) << 4 | hexByteToNumber(input[i + 2]); i += 2; } } @@ -1525,17 +1530,17 @@ var require_dataURL = __commonJS({ } __name(isHTTPWhiteSpace, "isHTTPWhiteSpace"); function removeHTTPWhitespace(str, leading = true, trailing = true) { - let i = 0; - let j = str.length; + let lead = 0; + let trail = str.length - 1; if (leading) { - while (j > i && isHTTPWhiteSpace(str.charCodeAt(i))) - --i; + while (lead < str.length && isHTTPWhiteSpace(str.charCodeAt(lead))) + lead++; } if (trailing) { - while (j > i && isHTTPWhiteSpace(str.charCodeAt(j - 1))) - --j; + while (trail > 0 && isHTTPWhiteSpace(str.charCodeAt(trail))) + trail--; } - return i === 0 && j === str.length ? str : str.substring(i, j); + return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1); } __name(removeHTTPWhitespace, "removeHTTPWhitespace"); function isASCIIWhitespace(char) { @@ -1543,17 +1548,17 @@ var require_dataURL = __commonJS({ } __name(isASCIIWhitespace, "isASCIIWhitespace"); function removeASCIIWhitespace(str, leading = true, trailing = true) { - let i = 0; - let j = str.length; + let lead = 0; + let trail = str.length - 1; if (leading) { - while (j > i && isASCIIWhitespace(str.charCodeAt(i))) - --i; + while (lead < str.length && isASCIIWhitespace(str.charCodeAt(lead))) + lead++; } if (trailing) { - while (j > i && isASCIIWhitespace(str.charCodeAt(j - 1))) - --j; + while (trail > 0 && isASCIIWhitespace(str.charCodeAt(trail))) + trail--; } - return i === 0 && j === str.length ? str : str.substring(i, j); + return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1); } __name(removeASCIIWhitespace, "removeASCIIWhitespace"); module2.exports = { @@ -1896,7 +1901,7 @@ var require_util2 = __commonJS({ return false; } __name(bytesMatch, "bytesMatch"); - var parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i; + var parseHashWithOptions = /(?sha256|sha384|sha512)-(?[A-Za-z0-9+/]+={0,2}(?=\s|$))( +[!-~]*)?/i; function parseMetadata(metadata) { const result = []; let empty = true; @@ -1949,7 +1954,7 @@ var require_util2 = __commonJS({ return fetchParams.controller.state === "aborted" || fetchParams.controller.state === "terminated"; } __name(isCancelled, "isCancelled"); - var normalizeMethodRecord = { + var normalizeMethodRecordBase = { delete: "DELETE", DELETE: "DELETE", get: "GET", @@ -1963,9 +1968,15 @@ var require_util2 = __commonJS({ put: "PUT", PUT: "PUT" }; + var normalizeMethodRecord = { + ...normalizeMethodRecordBase, + patch: "patch", + PATCH: "PATCH" + }; + Object.setPrototypeOf(normalizeMethodRecordBase, null); Object.setPrototypeOf(normalizeMethodRecord, null); function normalizeMethod(method) { - return normalizeMethodRecord[method.toLowerCase()] ?? method; + return normalizeMethodRecordBase[method.toLowerCase()] ?? method; } __name(normalizeMethod, "normalizeMethod"); function serializeJavascriptValueToJSONString(value) { @@ -2250,7 +2261,8 @@ var require_util2 = __commonJS({ readAllBytes, normalizeMethodRecord, simpleRangeHeaderValue, - buildContentRange + buildContentRange, + parseMetadata }; } }); @@ -2598,13 +2610,13 @@ var require_webidl = __commonJS({ }; webidl.converters.BufferSource = function(V, opts = {}) { if (types.isAnyArrayBuffer(V)) { - return webidl.converters.ArrayBuffer(V, opts); + return webidl.converters.ArrayBuffer(V, { ...opts, allowShared: false }); } if (types.isTypedArray(V)) { - return webidl.converters.TypedArray(V, V.constructor); + return webidl.converters.TypedArray(V, V.constructor, { ...opts, allowShared: false }); } if (types.isDataView(V)) { - return webidl.converters.DataView(V, opts); + return webidl.converters.DataView(V, opts, { ...opts, allowShared: false }); } throw new TypeError(`Could not convert ${V} to a BufferSource.`); }; @@ -5211,7 +5223,7 @@ var require_file = __commonJS({ s = convertLineEndingsNative(s); } bytes.push(encoder.encode(s)); - } else if (types.isAnyArrayBuffer(element) || types.isTypedArray(element)) { + } else if (ArrayBuffer.isView(element) || types.isArrayBuffer(element)) { if (!element.buffer) { bytes.push(new Uint8Array(element)); } else { @@ -5420,7 +5432,7 @@ var require_body = __commonJS({ var { kState } = require_symbols2(); var { webidl } = require_webidl(); var { Blob: Blob2, File: NativeFile } = require("buffer"); - var { kBodyUsed } = require_symbols(); + var { kBodyUsed, kHeadersList } = require_symbols(); var assert = require("assert"); var { isErrored } = require_util(); var { isUint8Array, isArrayBuffer } = require("util/types"); @@ -5636,8 +5648,9 @@ Content-Type: ${value.type || "application/octet-stream"}\r async formData() { webidl.brandCheck(this, instance); throwIfAborted(this[kState]); - const contentType = this.headers.get("Content-Type"); - if (/multipart\/form-data/.test(contentType)) { + const contentType = this.headers[kHeadersList].get("content-type", true); + const mimeType = contentType !== null ? parseMIMEType(contentType) : "failure"; + if (mimeType !== "failure" && mimeType.essence === "multipart/form-data") { const headers = {}; for (const [key, value] of this.headers) headers[key] = value; @@ -5654,7 +5667,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r busboy.on("field", (name, value) => { responseFormData.append(name, value); }); - busboy.on("file", (name, value, filename, encoding, mimeType) => { + busboy.on("file", (name, value, filename, encoding, mimeType2) => { const chunks = []; if (encoding === "base64" || encoding.toLowerCase() === "base64") { let base64chunk = ""; @@ -5666,14 +5679,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r }); value.on("end", () => { chunks.push(Buffer.from(base64chunk, "base64")); - responseFormData.append(name, new File(chunks, filename, { type: mimeType })); + responseFormData.append(name, new File(chunks, filename, { type: mimeType2 })); }); } else { value.on("data", (chunk) => { chunks.push(chunk); }); value.on("end", () => { - responseFormData.append(name, new File(chunks, filename, { type: mimeType })); + responseFormData.append(name, new File(chunks, filename, { type: mimeType2 })); }); } }); @@ -5687,7 +5700,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r busboy.end(); await busboyResolve; return responseFormData; - } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + } else if (mimeType !== "failure" && mimeType.essence === "application/x-www-form-urlencoded") { let entries; try { let text = ""; @@ -6136,7 +6149,7 @@ var require_response = __commonJS({ if (isBlobLike(V)) { return webidl.converters.Blob(V, { strict: false }); } - if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + if (ArrayBuffer.isView(V) || types.isArrayBuffer(V)) { return webidl.converters.BufferSource(V); } if (util.isFormDataLike(V)) { @@ -6215,6 +6228,8 @@ var require_dispatcher_weakref = __commonJS({ }); } } + unregister(key) { + } }; module2.exports = function() { if (process.env.NODE_V8_COVERAGE) { @@ -6265,6 +6280,7 @@ var require_request = __commonJS({ var requestFinalizer = new FinalizationRegistry2(({ signal, abort }) => { signal.removeEventListener("abort", abort); }); + var patchMethodWarning = false; var Request = class _Request { static { __name(this, "Request"); @@ -6434,14 +6450,25 @@ var require_request = __commonJS({ } if (init.method !== void 0) { let method = init.method; - if (!isValidHTTPToken(method)) { - throw new TypeError(`'${method}' is not a valid HTTP method.`); + const mayBeNormalized = normalizeMethodRecord[method]; + if (mayBeNormalized !== void 0) { + request.method = mayBeNormalized; + } else { + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`); + } + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`); + } + method = normalizeMethod(method); + request.method = method; } - if (forbiddenMethodsSet.has(method.toUpperCase())) { - throw new TypeError(`'${method}' HTTP method is unsupported.`); + if (!patchMethodWarning && request.method === "patch") { + process.emitWarning("Using `patch` is highly likely to result in a `405 Method Not Allowed`. `PATCH` is much more likely to succeed.", { + code: "UNDICI-FETCH-patch" + }); + patchMethodWarning = true; } - method = normalizeMethodRecord[method] ?? normalizeMethod(method); - request.method = method; } if (init.signal !== void 0) { signal = init.signal; @@ -6464,6 +6491,8 @@ var require_request = __commonJS({ const abort = /* @__PURE__ */ __name(function() { const ac2 = acRef.deref(); if (ac2 !== void 0) { + requestFinalizer.unregister(abort); + this.removeEventListener("abort", abort); ac2.abort(this.reason); } }, "abort"); @@ -6476,7 +6505,7 @@ var require_request = __commonJS({ } catch { } util.addAbortListener(signal, abort); - requestFinalizer.register(ac, { signal, abort }); + requestFinalizer.register(ac, { signal, abort }, abort); } } this[kHeaders] = new Headers(kConstruct); @@ -6515,7 +6544,7 @@ var require_request = __commonJS({ request.keepalive ); initBody = extractedBody; - if (contentType && !this[kHeaders][kHeadersList].contains("content-type")) { + if (contentType && !this[kHeaders][kHeadersList].contains("content-type", true)) { this[kHeaders].append("content-type", contentType); } } @@ -7313,6 +7342,191 @@ var require_pool_base = __commonJS({ } }); +// lib/core/diagnostics.js +var require_diagnostics = __commonJS({ + "lib/core/diagnostics.js"(exports2, module2) { + "use strict"; + var diagnosticsChannel = require("diagnostics_channel"); + var util = require("util"); + var undiciDebugLog = util.debuglog("undici"); + var fetchDebuglog = util.debuglog("fetch"); + var websocketDebuglog = util.debuglog("websocket"); + var isClientSet = false; + var channels = { + // Client + beforeConnect: diagnosticsChannel.channel("undici:client:beforeConnect"), + connected: diagnosticsChannel.channel("undici:client:connected"), + connectError: diagnosticsChannel.channel("undici:client:connectError"), + sendHeaders: diagnosticsChannel.channel("undici:client:sendHeaders"), + // Request + create: diagnosticsChannel.channel("undici:request:create"), + bodySent: diagnosticsChannel.channel("undici:request:bodySent"), + headers: diagnosticsChannel.channel("undici:request:headers"), + trailers: diagnosticsChannel.channel("undici:request:trailers"), + error: diagnosticsChannel.channel("undici:request:error"), + // WebSocket + open: diagnosticsChannel.channel("undici:websocket:open"), + close: diagnosticsChannel.channel("undici:websocket:close"), + socketError: diagnosticsChannel.channel("undici:websocket:socket_error"), + ping: diagnosticsChannel.channel("undici:websocket:ping"), + pong: diagnosticsChannel.channel("undici:websocket:pong") + }; + if (undiciDebugLog.enabled || fetchDebuglog.enabled) { + const debuglog = fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog; + diagnosticsChannel.channel("undici:client:beforeConnect").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host } + } = evt; + debuglog( + "connecting to %s using %s%s", + `${host}${port ? `:${port}` : ""}`, + protocol, + version + ); + }); + diagnosticsChannel.channel("undici:client:connected").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host } + } = evt; + debuglog( + "connected to %s using %s%s", + `${host}${port ? `:${port}` : ""}`, + protocol, + version + ); + }); + diagnosticsChannel.channel("undici:client:connectError").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host }, + error + } = evt; + debuglog( + "connection to %s using %s%s errored - %s", + `${host}${port ? `:${port}` : ""}`, + protocol, + version, + error.message + ); + }); + diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { + const { + request: { method, path, origin } + } = evt; + debuglog("sending request to %s %s/%s", method, origin, path); + }); + diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { + const { + request: { method, path, origin }, + response: { statusCode } + } = evt; + debuglog( + "received response to %s %s/%s - HTTP %d", + method, + origin, + path, + statusCode + ); + }); + diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { + const { + request: { method, path, origin } + } = evt; + debuglog("trailers received from %s %s/%s", method, origin, path); + }); + diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { + const { + request: { method, path, origin }, + error + } = evt; + debuglog( + "request to %s %s/%s errored - %s", + method, + origin, + path, + error.message + ); + }); + isClientSet = true; + } + if (websocketDebuglog.enabled) { + if (!isClientSet) { + const debuglog = undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog; + diagnosticsChannel.channel("undici:client:beforeConnect").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host } + } = evt; + debuglog( + "connecting to %s%s using %s%s", + host, + port ? `:${port}` : "", + protocol, + version + ); + }); + diagnosticsChannel.channel("undici:client:connected").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host } + } = evt; + debuglog( + "connected to %s%s using %s%s", + host, + port ? `:${port}` : "", + protocol, + version + ); + }); + diagnosticsChannel.channel("undici:client:connectError").subscribe((evt) => { + const { + connectParams: { version, protocol, port, host }, + error + } = evt; + debuglog( + "connection to %s%s using %s%s errored - %s", + host, + port ? `:${port}` : "", + protocol, + version, + error.message + ); + }); + diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { + const { + request: { method, path, origin } + } = evt; + debuglog("sending request to %s %s/%s", method, origin, path); + }); + } + diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { + const { + address: { address, port } + } = evt; + websocketDebuglog("connection opened %s%s", address, port ? `:${port}` : ""); + }); + diagnosticsChannel.channel("undici:websocket:close").subscribe((evt) => { + const { websocket, code, reason } = evt; + websocketDebuglog( + "closed connection to %s - %s %s", + websocket.url, + code, + reason + ); + }); + diagnosticsChannel.channel("undici:websocket:socket_error").subscribe((err) => { + websocketDebuglog("connection errored - %s", err.message); + }); + diagnosticsChannel.channel("undici:websocket:ping").subscribe((evt) => { + websocketDebuglog("ping received"); + }); + diagnosticsChannel.channel("undici:websocket:pong").subscribe((evt) => { + websocketDebuglog("pong received"); + }); + } + module2.exports = { + channels + }; + } +}); + // lib/timers.js var require_timers = __commonJS({ "lib/timers.js"(exports2, module2) { @@ -7411,26 +7625,12 @@ var require_request2 = __commonJS({ var assert = require("assert"); var { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require_symbols(); var util = require_util(); + var { channels } = require_diagnostics(); var { headerNameLowerCasedRecord } = require_constants(); var headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; var invalidPathRegex = /[^\u0021-\u00ff]/; var kHandler = Symbol("handler"); - var channels = {}; var extractBody; - try { - const diagnosticsChannel = require("diagnostics_channel"); - channels.create = diagnosticsChannel.channel("undici:request:create"); - channels.bodySent = diagnosticsChannel.channel("undici:request:bodySent"); - channels.headers = diagnosticsChannel.channel("undici:request:headers"); - channels.trailers = diagnosticsChannel.channel("undici:request:trailers"); - channels.error = diagnosticsChannel.channel("undici:request:error"); - } catch { - channels.create = { hasSubscribers: false }; - channels.bodySent = { hasSubscribers: false }; - channels.headers = { hasSubscribers: false }; - channels.trailers = { hasSubscribers: false }; - channels.error = { hasSubscribers: false }; - } var Request = class _Request { static { __name(this, "Request"); @@ -8501,6 +8701,7 @@ var require_client = __commonJS({ var http = require("http"); var { pipeline } = require("stream"); var util = require_util(); + var { channels } = require_diagnostics(); var timers = require_timers(); var Request = require_request2(); var DispatcherBase = require_dispatcher_base(); @@ -8591,19 +8792,6 @@ var require_client = __commonJS({ var h2ExperimentalWarned = false; var FastBuffer = Buffer[Symbol.species]; var kClosedResolve = Symbol("kClosedResolve"); - var channels = {}; - try { - const diagnosticsChannel = require("diagnostics_channel"); - channels.sendHeaders = diagnosticsChannel.channel("undici:client:sendHeaders"); - channels.beforeConnect = diagnosticsChannel.channel("undici:client:beforeConnect"); - channels.connectError = diagnosticsChannel.channel("undici:client:connectError"); - channels.connected = diagnosticsChannel.channel("undici:client:connected"); - } catch { - channels.sendHeaders = { hasSubscribers: false }; - channels.beforeConnect = { hasSubscribers: false }; - channels.connectError = { hasSubscribers: false }; - channels.connected = { hasSubscribers: false }; - } var Client = class extends DispatcherBase { static { __name(this, "Client"); @@ -9411,6 +9599,7 @@ var require_client = __commonJS({ hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -9488,6 +9677,7 @@ var require_client = __commonJS({ hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -9508,6 +9698,7 @@ var require_client = __commonJS({ hostname, protocol, port, + version: client[kHTTPConnVersion], servername: client[kServerName], localAddress: client[kLocalAddress] }, @@ -9770,23 +9961,31 @@ upgrade: ${upgrade}\r errorRequest(client, request, new Error("Upgrade not supported for H2")); return false; } + if (request.aborted) { + return false; + } + let stream; + const h2State = client[kHTTP2SessionState]; + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]; + headers[HTTP2_HEADER_METHOD] = method; try { request.onConnect((err) => { if (request.aborted || request.completed) { return; } - errorRequest(client, request, err || new RequestAbortedError()); + err = err || new RequestAbortedError(); + if (stream != null) { + util.destroy(stream, err); + h2State.openStreams -= 1; + if (h2State.openStreams === 0) { + session.unref(); + } + } + errorRequest(client, request, err); }); } catch (err) { errorRequest(client, request, err); } - if (request.aborted) { - return false; - } - let stream; - const h2State = client[kHTTP2SessionState]; - headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]; - headers[HTTP2_HEADER_METHOD] = method; if (method === "CONNECT") { session.ref(); stream = session.request(headers, { endStream: false, signal }); @@ -11064,7 +11263,7 @@ var require_fetch = __commonJS({ } if (redirectStatusSet.has(actualResponse.status)) { if (request.redirect !== "manual") { - fetchParams.controller.connection.destroy(); + fetchParams.controller.connection.destroy(void 0, false); } if (request.redirect === "error") { response = makeNetworkError("unexpected redirect"); @@ -11265,10 +11464,12 @@ var require_fetch = __commonJS({ fetchParams.controller.connection = { abort: null, destroyed: false, - destroy(err) { + destroy(err, abort = true) { if (!this.destroyed) { this.destroyed = true; - this.abort?.(err ?? new DOMException("The operation was aborted.", "AbortError")); + if (abort) { + this.abort?.(err ?? new DOMException("The operation was aborted.", "AbortError")); + } } } }; @@ -11470,7 +11671,7 @@ var require_fetch = __commonJS({ } else { const keys = Object.keys(rawHeaders); for (let i = 0; i < keys.length; ++i) { - headersList.append(keys[i], rawHeaders[keys[i]]); + headersList.append(keys[i], rawHeaders[keys[i]], true); } const contentEncoding = rawHeaders["content-encoding"]; if (contentEncoding) { @@ -11982,7 +12183,6 @@ var require_util3 = __commonJS({ var require_connection = __commonJS({ "lib/websocket/connection.js"(exports2, module2) { "use strict"; - var diagnosticsChannel = require("diagnostics_channel"); var { uid, states } = require_constants4(); var { kReadyState, @@ -11991,16 +12191,13 @@ var require_connection = __commonJS({ kReceivedClose } = require_symbols3(); var { fireEvent, failWebsocketConnection } = require_util3(); + var { channels } = require_diagnostics(); var { CloseEvent } = require_events(); var { makeRequest } = require_request(); var { fetching } = require_fetch(); var { Headers } = require_headers(); var { getGlobalDispatcher } = require_global2(); var { kHeadersList } = require_symbols(); - var channels = {}; - channels.open = diagnosticsChannel.channel("undici:websocket:open"); - channels.close = diagnosticsChannel.channel("undici:websocket:close"); - channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error"); var crypto; try { crypto = require("crypto"); @@ -12195,14 +12392,11 @@ var require_receiver = __commonJS({ "lib/websocket/receiver.js"(exports2, module2) { "use strict"; var { Writable } = require("stream"); - var diagnosticsChannel = require("diagnostics_channel"); var { parserStates, opcodes, states, emptyBuffer } = require_constants4(); var { kReadyState, kSentClose, kResponse, kReceivedClose } = require_symbols3(); + var { channels } = require_diagnostics(); var { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require_util3(); var { WebsocketFrameSend } = require_frame(); - var channels = {}; - channels.ping = diagnosticsChannel.channel("undici:websocket:ping"); - channels.pong = diagnosticsChannel.channel("undici:websocket:pong"); var ByteParser = class extends Writable { static { __name(this, "ByteParser"); @@ -12824,7 +13018,7 @@ var require_websocket = __commonJS({ if (isBlobLike(V)) { return webidl.converters.Blob(V, { strict: false }); } - if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + if (ArrayBuffer.isView(V) || types.isArrayBuffer(V)) { return webidl.converters.BufferSource(V); } } @@ -12840,7 +13034,9 @@ var require_websocket = __commonJS({ var fetchImpl = require_fetch().fetch; module.exports.fetch = /* @__PURE__ */ __name(function fetch(resource, init = void 0) { return fetchImpl(resource, init).catch((err) => { - Error.captureStackTrace(err, this); + if (typeof err === "object") { + Error.captureStackTrace(err, this); + } throw err; }); }, "fetch"); diff --git a/doc/api/child_process.md b/doc/api/child_process.md index 5d7842a0730138..eb4095b6b71a3e 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -873,12 +873,6 @@ child registers an event handler for the [`'disconnect'`][] event or the [`'message'`][] event. This allows the child to exit normally without the process being held open by the open IPC channel._ -On Unix-like operating systems, the [`child_process.spawn()`][] method -performs memory operations synchronously before decoupling the event loop -from the child. Applications with a large memory footprint may find frequent -[`child_process.spawn()`][] calls to be a bottleneck. For more information, -see [V8 issue 7381](https://bugs.chromium.org/p/v8/issues/detail?id=7381). - See also: [`child_process.exec()`][] and [`child_process.fork()`][]. ## Synchronous process creation diff --git a/doc/api/cli.md b/doc/api/cli.md index f7724f60ab239b..840585765e7fa6 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -106,7 +106,7 @@ If this flag is passed, the behavior can still be set to not abort through ### `--allow-addons` > Stability: 1.1 - Active development @@ -367,7 +367,7 @@ Currently the support for run-time snapshot is experimental in that: ### `--build-snapshot-config` > Stability: 1 - Experimental diff --git a/doc/api/deprecations.md b/doc/api/deprecations.md index 1a60bb81dad0da..ef54e200f814ad 100644 --- a/doc/api/deprecations.md +++ b/doc/api/deprecations.md @@ -3527,6 +3527,9 @@ deprecated. Get them from `fs.constants` or `fs.promises.constants` instead. -Type: Documentation-only +Type: End-of-Life -The [`util.types.isWebAssemblyCompiledModule`][] API is deprecated. Please use -`value instanceof WebAssembly.Module` instead. +The `util.types.isWebAssemblyCompiledModule` API has been removed. +Please use `value instanceof WebAssembly.Module` instead. ### DEP0178: `dirent.path` @@ -3715,7 +3718,6 @@ Please use the [`crypto.createHash()`][] method to create Hash instances. [`util.log()`]: util.md#utillogstring [`util.promisify`]: util.md#utilpromisifyoriginal [`util.toUSVString()`]: util.md#utiltousvstringstring -[`util.types.isWebAssemblyCompiledModule`]: util.md#utiltypesiswebassemblycompiledmodulevalue [`util.types`]: util.md#utiltypes [`util`]: util.md [`worker.exitedAfterDisconnect`]: cluster.md#workerexitedafterdisconnect diff --git a/doc/api/globals.md b/doc/api/globals.md index e17e0f7eebf10d..e8a46b67d6007d 100644 --- a/doc/api/globals.md +++ b/doc/api/globals.md @@ -484,7 +484,7 @@ changes: description: No longer experimental. - version: v18.0.0 pr-url: https://github.com/nodejs/node/pull/41811 - description: No longer behind `--experimental-global-fetch` CLI flag. + description: No longer behind `--experimental-fetch` CLI flag. --> > Stability: 2 - Stable @@ -514,7 +514,7 @@ changes: description: No longer experimental. - version: v18.0.0 pr-url: https://github.com/nodejs/node/pull/41811 - description: No longer behind `--experimental-global-fetch` CLI flag. + description: No longer behind `--experimental-fetch` CLI flag. --> > Stability: 2 - Stable @@ -553,7 +553,7 @@ changes: description: No longer experimental. - version: v18.0.0 pr-url: https://github.com/nodejs/node/pull/41811 - description: No longer behind `--experimental-global-fetch` CLI flag. + description: No longer behind `--experimental-fetch` CLI flag. --> > Stability: 2 - Stable @@ -901,7 +901,7 @@ changes: description: No longer experimental. - version: v18.0.0 pr-url: https://github.com/nodejs/node/pull/41811 - description: No longer behind `--experimental-global-fetch` CLI flag. + description: No longer behind `--experimental-fetch` CLI flag. --> > Stability: 2 - Stable @@ -921,7 +921,7 @@ changes: description: No longer experimental. - version: v18.0.0 pr-url: https://github.com/nodejs/node/pull/41811 - description: No longer behind `--experimental-global-fetch` CLI flag. + description: No longer behind `--experimental-fetch` CLI flag. --> > Stability: 2 - Stable diff --git a/doc/api/http.md b/doc/api/http.md index e0b2c55a56a3d0..d7b77f1a4c55ad 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2991,12 +2991,12 @@ added: * `value` {string|string\[]} Header value * Returns: {this} -Append a single header value for the header object. +Append a single header value to the header object. -If the value is an array, this is equivalent of calling this method multiple +If the value is an array, this is equivalent to calling this method multiple times. -If there were no previous value for the header, this is equivalent of calling +If there were no previous values for the header, this is equivalent to calling [`outgoingMessage.setHeader(name, value)`][]. Depending of the value of `options.uniqueHeaders` when the client request or the diff --git a/doc/api/http2.md b/doc/api/http2.md index 8b3ce0aad24d3f..288dccd0472ac6 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -2890,6 +2890,19 @@ added: v8.4.0 Returns a [HTTP/2 Settings Object][] containing the deserialized settings from the given `Buffer` as generated by `http2.getPackedSettings()`. +### `http2.performServerHandshake(socket[, options])` + + + +* `socket` {stream.Duplex} +* `options` {Object} + * ...: Any [`http2.createServer()`][] option can be provided. +* Returns: {ServerHttp2Session} + +Create an HTTP/2 server session from an existing socket. + ### `http2.sensitiveHeaders` + +* `name` {string} +* `value` {string|string\[]} + +Append a single header value to the header object. + +If the value is an array, this is equivalent to calling this method multiple +times. + +If there were no previous values for the header, this is equivalent to calling +[`response.setHeader()`][]. + +Attempting to set a header field name or value that contains invalid characters +will result in a [`TypeError`][] being thrown. + +```js +// Returns headers including "set-cookie: a" and "set-cookie: b" +const server = http2.createServer((req, res) => { + res.setHeader('set-cookie', 'a'); + res.appendHeader('set-cookie', 'b'); + res.writeHead(200); + res.end('ok'); +}); +``` + #### `response.connection` > Stability: 1 - Experimental diff --git a/doc/api/net.md b/doc/api/net.md index 8a66a0a68b4c60..c34bd0ced5b6e1 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -694,10 +694,10 @@ See [`net.createConnection()`][]. ### Event: `'connectionAttempt'` -* `ip` {number} The IP which the socket is attempting to connect to. +* `ip` {string} The IP which the socket is attempting to connect to. * `port` {number} The port which the socket is attempting to connect to. * `family` {number} The family of the IP. It can be `6` for IPv6 or `4` for IPv4. @@ -707,10 +707,10 @@ if the family autoselection algorithm is enabled in [`socket.connect(options)`][ ### Event: `'connectionAttemptFailed'` -* `ip` {number} The IP which the socket attempted to connect to. +* `ip` {string} The IP which the socket attempted to connect to. * `port` {number} The port which the socket attempted to connect to. * `family` {number} The family of the IP. It can be `6` for IPv6 or `4` for IPv4. \* `error` {Error} The error associated with the failure. @@ -721,10 +721,10 @@ if the family autoselection algorithm is enabled in [`socket.connect(options)`][ ### Event: `'connectionAttemptTimeout'` -* `ip` {number} The IP which the socket attempted to connect to. +* `ip` {string} The IP which the socket attempted to connect to. * `port` {number} The port which the socket attempted to connect to. * `family` {number} The family of the IP. It can be `6` for IPv6 or `4` for IPv4. diff --git a/doc/api/process.md b/doc/api/process.md index b53986f04ae18c..fcec605380b46b 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -2260,6 +2260,29 @@ process.kill(process.pid, 'SIGHUP'); When `SIGUSR1` is received by a Node.js process, Node.js will start the debugger. See [Signal Events][]. +## `process.loadEnvFile(path)` + + + +> Stability: 1.1 - Active development + +* `path` {string | URL | Buffer | undefined}. **Default:** `'./.env'` + +Loads the `.env` file into `process.env`. Usage of `NODE_OPTIONS` +in the `.env` file will not have any effect on Node.js. + +```cjs +const { loadEnvFile } = require('node:process'); +loadEnvFile(); +``` + +```mjs +import { loadEnvFile } from 'node:process'; +loadEnvFile(); +``` + ## `process.mainModule` + +* `content` {string} + +The raw contents of a `.env` file. + +* Returns: {Object} + +Given an example `.env` file: + +```cjs +const { parseEnv } = require('node:util'); + +parseEnv('HELLO=world\nHELLO=oh my\n'); +// Returns: { HELLO: 'oh my' } +``` + +```mjs +import { parseEnv } from 'node:util'; + +parseEnv('HELLO=world\nHELLO=oh my\n'); +// Returns: { HELLO: 'oh my' } +``` + ## `util.promisify(original)` - -> Stability: 0 - Deprecated: Use `value instanceof WebAssembly.Module` instead. - -* `value` {any} -* Returns: {boolean} - -Returns `true` if the value is a built-in [`WebAssembly.Module`][] instance. - -```js -const module = new WebAssembly.Module(wasmBuffer); -util.types.isWebAssemblyCompiledModule(module); // Returns true -``` - ## Deprecated APIs The following APIs are deprecated and should no longer be used. Existing @@ -3362,7 +3373,6 @@ util.log('Timestamped message.'); [`Uint8ClampedArray`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray [`WeakMap`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap [`WeakSet`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet -[`WebAssembly.Module`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Module [`assert.deepStrictEqual()`]: assert.md#assertdeepstrictequalactual-expected-message [`console.error()`]: console.md#consoleerrordata-args [`mime.toString()`]: #mimetostring diff --git a/doc/changelogs/CHANGELOG_V21.md b/doc/changelogs/CHANGELOG_V21.md index 1f53b574441ebd..18b0c539d3115b 100644 --- a/doc/changelogs/CHANGELOG_V21.md +++ b/doc/changelogs/CHANGELOG_V21.md @@ -8,6 +8,8 @@ +21.6.1
    +21.6.0
    21.5.0
    21.4.0
    21.3.0
    @@ -41,6 +43,189 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + + +## 2024-01-22, Version 21.6.1 (Current), @RafaelGSS + +### Notable Changes + +This release fixes a bug in `undici` using WebStreams + +### Commits + +* \[[`662ac95729`](https://github.com/nodejs/node/commit/662ac95729)] - _**Revert**_ "**stream**: fix cloned webstreams not being unref'd" (Matteo Collina) [#51491](https://github.com/nodejs/node/pull/51491) +* \[[`1b8bba8aee`](https://github.com/nodejs/node/commit/1b8bba8aee)] - **test**: add regression test for 51586 (Matteo Collina) [#51491](https://github.com/nodejs/node/pull/51491) + + + +## 2024-01-15, Version 21.6.0 (Current), @RafaelGSS + +### New connection attempt events + +Three new events were added in the `net.createConnection` flow: + +* `connectionAttempt`: Emitted when a new connection attempt is established. In case of Happy Eyeballs, this might emitted multiple times. +* `connectionAttemptFailed`: Emitted when a connection attempt failed. In case of Happy Eyeballs, this might emitted multiple times. +* `connectionAttemptTimeout`: Emitted when a connection attempt timed out. In case of Happy Eyeballs, this will not be emitted for the last attempt. This is not emitted at all if Happy Eyeballs is not used. + +Additionally, a previous bug has been fixed where a new connection attempt could have been started after a previous one failed and after the connection was destroyed by the user. +This led to a failed assertion. + +Contributed by Paolo Insogna in [#51045](https://github.com/nodejs/node/pull/51045). + +### Changes to the Permission Model + +Node.js 21.6.0 comes with several fixes for the experimental permission model and two new semver-minor commits. +We're adding a new flag `--allow-addons` to enable addon usage when using the Permission Model. + +```console +$ node --experimental-permission --allow-addons +``` + +Contributed by Rafael Gonzaga in [#51183](https://github.com/nodejs/node/pull/51183) + +And relative paths are now supported through the `--allow-fs-*` flags. +Therefore, with this release one can use: + +```console +$ node --experimental-permission --allow-fs-read=./index.js +``` + +To give only read access to the entrypoint of the application. + +Contributed by Rafael Gonzaga and Carlos Espa in [#50758](https://github.com/nodejs/node/pull/50758) + +### Support configurable snapshot through `--build-snapshot-config` flag + +We are adding a new flag `--build-snapshot-config` to configure snapshots through a custom JSON configuration file. + +```console +$ node --build-snapshot-config=/path/to/myconfig.json +``` + +When using this flag, additional script files provided on the command line will +not be executed and instead be interpreted as regular command line arguments. + +These changes were contributed by Joyee Cheung and Anna Henningsen in [#50453](https://github.com/nodejs/node/pull/50453) + +### Other Notable Changes + +* \[[`c31ed51373`](https://github.com/nodejs/node/commit/c31ed51373)] - **(SEMVER-MINOR)** **timers**: export timers.promises (Marco Ippolito) [#51246](https://github.com/nodejs/node/pull/51246) + +### Commits + +* \[[`13a1241b83`](https://github.com/nodejs/node/commit/13a1241b83)] - **assert,crypto**: make KeyObject and CryptoKey testable for equality (Filip Skokan) [#50897](https://github.com/nodejs/node/pull/50897) +* \[[`4dcc5114aa`](https://github.com/nodejs/node/commit/4dcc5114aa)] - **benchmark**: remove dependency on unshipped tools (Adam Majer) [#51146](https://github.com/nodejs/node/pull/51146) +* \[[`2eb41f86b3`](https://github.com/nodejs/node/commit/2eb41f86b3)] - **build**: fix for VScode "Reopen in Container" (Serg Kryvonos) [#51271](https://github.com/nodejs/node/pull/51271) +* \[[`e03ac83c19`](https://github.com/nodejs/node/commit/e03ac83c19)] - **build**: fix arm64 cross-compilation (Michaël Zasso) [#51256](https://github.com/nodejs/node/pull/51256) +* \[[`cd61fce34e`](https://github.com/nodejs/node/commit/cd61fce34e)] - **build**: add `-flax-vector-conversions` to V8 build (Michaël Zasso) [#51257](https://github.com/nodejs/node/pull/51257) +* \[[`e5017a522e`](https://github.com/nodejs/node/commit/e5017a522e)] - **crypto**: update CryptoKey symbol properties (Filip Skokan) [#50897](https://github.com/nodejs/node/pull/50897) +* \[[`c0d2e8be11`](https://github.com/nodejs/node/commit/c0d2e8be11)] - **deps**: update corepack to 0.24.0 (Node.js GitHub Bot) [#51318](https://github.com/nodejs/node/pull/51318) +* \[[`24a9a72492`](https://github.com/nodejs/node/commit/24a9a72492)] - **deps**: update acorn to 8.11.3 (Node.js GitHub Bot) [#51317](https://github.com/nodejs/node/pull/51317) +* \[[`e53cbb22c2`](https://github.com/nodejs/node/commit/e53cbb22c2)] - **deps**: update ngtcp2 and nghttp3 (James M Snell) [#51291](https://github.com/nodejs/node/pull/51291) +* \[[`f00f1204f1`](https://github.com/nodejs/node/commit/f00f1204f1)] - **deps**: update brotli to 1.1.0 (Node.js GitHub Bot) [#50804](https://github.com/nodejs/node/pull/50804) +* \[[`a41dca0c51`](https://github.com/nodejs/node/commit/a41dca0c51)] - **deps**: update zlib to 1.3.0.1-motley-40e35a7 (Node.js GitHub Bot) [#51274](https://github.com/nodejs/node/pull/51274) +* \[[`efa12a89c6`](https://github.com/nodejs/node/commit/efa12a89c6)] - **deps**: update simdutf to 4.0.8 (Node.js GitHub Bot) [#51000](https://github.com/nodejs/node/pull/51000) +* \[[`25eba3d20b`](https://github.com/nodejs/node/commit/25eba3d20b)] - **deps**: V8: cherry-pick de611e69ad51 (Keyhan Vakil) [#51200](https://github.com/nodejs/node/pull/51200) +* \[[`a07d6e23e4`](https://github.com/nodejs/node/commit/a07d6e23e4)] - **deps**: update simdjson to 3.6.3 (Node.js GitHub Bot) [#51104](https://github.com/nodejs/node/pull/51104) +* \[[`6d1bfcb2dd`](https://github.com/nodejs/node/commit/6d1bfcb2dd)] - **deps**: update googletest to 530d5c8 (Node.js GitHub Bot) [#51191](https://github.com/nodejs/node/pull/51191) +* \[[`75e5615c43`](https://github.com/nodejs/node/commit/75e5615c43)] - **deps**: update acorn-walk to 8.3.1 (Node.js GitHub Bot) [#50457](https://github.com/nodejs/node/pull/50457) +* \[[`3ecc7dcc00`](https://github.com/nodejs/node/commit/3ecc7dcc00)] - **deps**: update acorn-walk to 8.3.0 (Node.js GitHub Bot) [#50457](https://github.com/nodejs/node/pull/50457) +* \[[`e2f8d741c8`](https://github.com/nodejs/node/commit/e2f8d741c8)] - **deps**: update zlib to 1.3.0.1-motley-dd5fc13 (Node.js GitHub Bot) [#51105](https://github.com/nodejs/node/pull/51105) +* \[[`4a5d3bda72`](https://github.com/nodejs/node/commit/4a5d3bda72)] - **doc**: the GN files should use Node's license (Cheng Zhao) [#50694](https://github.com/nodejs/node/pull/50694) +* \[[`84127514ba`](https://github.com/nodejs/node/commit/84127514ba)] - **doc**: improve localWindowSize event descriptions (Davy Landman) [#51071](https://github.com/nodejs/node/pull/51071) +* \[[`8ee882a49c`](https://github.com/nodejs/node/commit/8ee882a49c)] - **doc**: mark `--jitless` as experimental (Antoine du Hamel) [#51247](https://github.com/nodejs/node/pull/51247) +* \[[`876743ece1`](https://github.com/nodejs/node/commit/876743ece1)] - **doc**: run license-builder (github-actions\[bot]) [#51199](https://github.com/nodejs/node/pull/51199) +* \[[`ec6fcff009`](https://github.com/nodejs/node/commit/ec6fcff009)] - **doc**: fix limitations and known issues in pm (Rafael Gonzaga) [#51184](https://github.com/nodejs/node/pull/51184) +* \[[`c13a5c0373`](https://github.com/nodejs/node/commit/c13a5c0373)] - **doc**: mention node:wasi in the Threat Model (Rafael Gonzaga) [#51211](https://github.com/nodejs/node/pull/51211) +* \[[`4b19e62444`](https://github.com/nodejs/node/commit/4b19e62444)] - **doc**: remove ambiguous 'considered' (Rich Trott) [#51207](https://github.com/nodejs/node/pull/51207) +* \[[`5453abd6ad`](https://github.com/nodejs/node/commit/5453abd6ad)] - **doc**: set exit code in custom test runner example (Matteo Collina) [#51056](https://github.com/nodejs/node/pull/51056) +* \[[`f9d4e07faf`](https://github.com/nodejs/node/commit/f9d4e07faf)] - **doc**: remove version from `maintaining-dependencies.md` (Antoine du Hamel) [#51195](https://github.com/nodejs/node/pull/51195) +* \[[`df8927a073`](https://github.com/nodejs/node/commit/df8927a073)] - **doc**: mention native addons are restricted in pm (Rafael Gonzaga) [#51185](https://github.com/nodejs/node/pull/51185) +* \[[`e636d83914`](https://github.com/nodejs/node/commit/e636d83914)] - **doc**: correct note on behavior of stats.isDirectory (Nick Reilingh) [#50946](https://github.com/nodejs/node/pull/50946) +* \[[`1c71435c2a`](https://github.com/nodejs/node/commit/1c71435c2a)] - **doc**: fix `TestsStream` parent class (Jungku Lee) [#51181](https://github.com/nodejs/node/pull/51181) +* \[[`2c227b0d64`](https://github.com/nodejs/node/commit/2c227b0d64)] - **doc**: fix simdjson wrong link (Marco Ippolito) [#51177](https://github.com/nodejs/node/pull/51177) +* \[[`efa13e1943`](https://github.com/nodejs/node/commit/efa13e1943)] - **(SEMVER-MINOR)** **doc**: add documentation for --build-snapshot-config (Anna Henningsen) [#50453](https://github.com/nodejs/node/pull/50453) +* \[[`941aedc6fc`](https://github.com/nodejs/node/commit/941aedc6fc)] - **errors**: fix stacktrace of SystemError (uzlopak) [#49956](https://github.com/nodejs/node/pull/49956) +* \[[`47548d9e61`](https://github.com/nodejs/node/commit/47548d9e61)] - **esm**: fix hint on invalid module specifier (Antoine du Hamel) [#51223](https://github.com/nodejs/node/pull/51223) +* \[[`091098f40a`](https://github.com/nodejs/node/commit/091098f40a)] - **fs**: fix fs.promises.realpath for long paths on Windows (翠 / green) [#51032](https://github.com/nodejs/node/pull/51032) +* \[[`e5a8fa01aa`](https://github.com/nodejs/node/commit/e5a8fa01aa)] - **fs**: make offset, position & length args in fh.read() optional (Pulkit Gupta) [#51087](https://github.com/nodejs/node/pull/51087) +* \[[`c87e5d51cc`](https://github.com/nodejs/node/commit/c87e5d51cc)] - **fs**: add missing jsdoc parameters to `readSync` (Yagiz Nizipli) [#51225](https://github.com/nodejs/node/pull/51225) +* \[[`e24249cf37`](https://github.com/nodejs/node/commit/e24249cf37)] - **fs**: remove `internalModuleReadJSON` binding (Yagiz Nizipli) [#51224](https://github.com/nodejs/node/pull/51224) +* \[[`7421467812`](https://github.com/nodejs/node/commit/7421467812)] - **fs**: improve mkdtemp performance for buffer prefix (Yagiz Nizipli) [#51078](https://github.com/nodejs/node/pull/51078) +* \[[`5b229d775f`](https://github.com/nodejs/node/commit/5b229d775f)] - **fs**: validate fd synchronously on c++ (Yagiz Nizipli) [#51027](https://github.com/nodejs/node/pull/51027) +* \[[`c7a135962d`](https://github.com/nodejs/node/commit/c7a135962d)] - **http**: remove misleading warning (Luigi Pinca) [#51204](https://github.com/nodejs/node/pull/51204) +* \[[`a325746ff4`](https://github.com/nodejs/node/commit/a325746ff4)] - **http**: do not override user-provided options object (KuthorX) [#33633](https://github.com/nodejs/node/pull/33633) +* \[[`89eee7763f`](https://github.com/nodejs/node/commit/89eee7763f)] - **http2**: addtl http/2 settings (Marten Richter) [#49025](https://github.com/nodejs/node/pull/49025) +* \[[`624142947f`](https://github.com/nodejs/node/commit/624142947f)] - **lib**: fix use of `--frozen-intrinsics` with `--jitless` (Antoine du Hamel) [#51248](https://github.com/nodejs/node/pull/51248) +* \[[`8f845eb001`](https://github.com/nodejs/node/commit/8f845eb001)] - **lib**: move function declaration outside of loop (Sanjaiyan Parthipan) [#51242](https://github.com/nodejs/node/pull/51242) +* \[[`ed7305e49b`](https://github.com/nodejs/node/commit/ed7305e49b)] - **lib**: reduce overhead of `SafePromiseAllSettledReturnVoid` calls (Antoine du Hamel) [#51243](https://github.com/nodejs/node/pull/51243) +* \[[`291265ce27`](https://github.com/nodejs/node/commit/291265ce27)] - **lib**: expose default prepareStackTrace (Chengzhong Wu) [#50827](https://github.com/nodejs/node/pull/50827) +* \[[`8ff6bc45ca`](https://github.com/nodejs/node/commit/8ff6bc45ca)] - **lib,permission**: handle buffer on fs.symlink (Rafael Gonzaga) [#51212](https://github.com/nodejs/node/pull/51212) +* \[[`416b4f8063`](https://github.com/nodejs/node/commit/416b4f8063)] - **(SEMVER-MINOR)** **lib,src,permission**: port path.resolve to C++ (Rafael Gonzaga) [#50758](https://github.com/nodejs/node/pull/50758) +* \[[`6648a5c576`](https://github.com/nodejs/node/commit/6648a5c576)] - **meta**: notify tsc on changes in SECURITY.md (Rafael Gonzaga) [#51259](https://github.com/nodejs/node/pull/51259) +* \[[`83a99ccedd`](https://github.com/nodejs/node/commit/83a99ccedd)] - **meta**: update artifact actions to v4 (Michaël Zasso) [#51219](https://github.com/nodejs/node/pull/51219) +* \[[`b621ada69a`](https://github.com/nodejs/node/commit/b621ada69a)] - **module**: move the CJS exports cache to internal/modules/cjs/loader (Joyee Cheung) [#51157](https://github.com/nodejs/node/pull/51157) +* \[[`e4be5b60f0`](https://github.com/nodejs/node/commit/e4be5b60f0)] - **(SEMVER-MINOR)** **net**: add connection attempt events (Paolo Insogna) [#51045](https://github.com/nodejs/node/pull/51045) +* \[[`3a492056e2`](https://github.com/nodejs/node/commit/3a492056e2)] - **node-api**: type tag external values without v8::Private (Chengzhong Wu) [#51149](https://github.com/nodejs/node/pull/51149) +* \[[`b2135ae7dc`](https://github.com/nodejs/node/commit/b2135ae7dc)] - **node-api**: segregate nogc APIs from rest via type system (Gabriel Schulhof) [#50060](https://github.com/nodejs/node/pull/50060) +* \[[`8f4325dcd5`](https://github.com/nodejs/node/commit/8f4325dcd5)] - **permission**: fix wildcard when children > 1 (Rafael Gonzaga) [#51209](https://github.com/nodejs/node/pull/51209) +* \[[`7ecf99404e`](https://github.com/nodejs/node/commit/7ecf99404e)] - **quic**: update quic impl to use latest ngtcp2/nghttp3 (James M Snell) [#51291](https://github.com/nodejs/node/pull/51291) +* \[[`5b32e21f3b`](https://github.com/nodejs/node/commit/5b32e21f3b)] - **quic**: add quic internalBinding, refine Endpoint, add types (James M Snell) [#51112](https://github.com/nodejs/node/pull/51112) +* \[[`3310095bea`](https://github.com/nodejs/node/commit/3310095bea)] - **repl**: fix prepareStackTrace frames array order (Chengzhong Wu) [#50827](https://github.com/nodejs/node/pull/50827) +* \[[`a0ff00b526`](https://github.com/nodejs/node/commit/a0ff00b526)] - **src**: avoid draining platform tasks at FreeEnvironment (Chengzhong Wu) [#51290](https://github.com/nodejs/node/pull/51290) +* \[[`115e0585cd`](https://github.com/nodejs/node/commit/115e0585cd)] - **src**: add fast api for Histogram (James M Snell) [#51296](https://github.com/nodejs/node/pull/51296) +* \[[`29b81576c6`](https://github.com/nodejs/node/commit/29b81576c6)] - **src**: refactor `GetCreationContext` calls (Yagiz Nizipli) [#51287](https://github.com/nodejs/node/pull/51287) +* \[[`54dd978400`](https://github.com/nodejs/node/commit/54dd978400)] - **src**: enter isolate before destructing IsolateData (Ben Noordhuis) [#51138](https://github.com/nodejs/node/pull/51138) +* \[[`864ecb0dfa`](https://github.com/nodejs/node/commit/864ecb0dfa)] - **src**: do not treat all paths ending with node\_modules as such (Michaël Zasso) [#51269](https://github.com/nodejs/node/pull/51269) +* \[[`df31c8114c`](https://github.com/nodejs/node/commit/df31c8114c)] - **src**: eliminate duplicate code in histogram.cc (James M Snell) [#51263](https://github.com/nodejs/node/pull/51263) +* \[[`17c73e6d0c`](https://github.com/nodejs/node/commit/17c73e6d0c)] - **src**: fix unix abstract socket path for trace event (theanarkh) [#50858](https://github.com/nodejs/node/pull/50858) +* \[[`96d64edc94`](https://github.com/nodejs/node/commit/96d64edc94)] - **src**: use BignumPointer and use BN\_clear\_free (James M Snell) [#50454](https://github.com/nodejs/node/pull/50454) +* \[[`8a2dd93a14`](https://github.com/nodejs/node/commit/8a2dd93a14)] - **src**: implement FastByteLengthUtf8 with simdutf::utf8\_length\_from\_latin1 (Daniel Lemire) [#50840](https://github.com/nodejs/node/pull/50840) +* \[[`e54ddf898f`](https://github.com/nodejs/node/commit/e54ddf898f)] - **(SEMVER-MINOR)** **src**: support configurable snapshot (Joyee Cheung) [#50453](https://github.com/nodejs/node/pull/50453) +* \[[`a69c7d7bc3`](https://github.com/nodejs/node/commit/a69c7d7bc3)] - **(SEMVER-MINOR)** **src,permission**: add --allow-addon flag (Rafael Gonzaga) [#51183](https://github.com/nodejs/node/pull/51183) +* \[[`e7925e66fc`](https://github.com/nodejs/node/commit/e7925e66fc)] - **src,stream**: improve WriteString (ywave620) [#51155](https://github.com/nodejs/node/pull/51155) +* \[[`82de6603af`](https://github.com/nodejs/node/commit/82de6603af)] - **stream**: fix code style (Mattias Buelens) [#51168](https://github.com/nodejs/node/pull/51168) +* \[[`e443953656`](https://github.com/nodejs/node/commit/e443953656)] - **stream**: fix cloned webstreams not being unref'd (James M Snell) [#51255](https://github.com/nodejs/node/pull/51255) +* \[[`757a84c9ea`](https://github.com/nodejs/node/commit/757a84c9ea)] - **test**: fix flaky conditions for ppc64 SEA tests (Richard Lau) [#51422](https://github.com/nodejs/node/pull/51422) +* \[[`85ee2f7255`](https://github.com/nodejs/node/commit/85ee2f7255)] - **test**: replace forEach() with for...of (Alexander Jones) [#50608](https://github.com/nodejs/node/pull/50608) +* \[[`549e4b4142`](https://github.com/nodejs/node/commit/549e4b4142)] - **test**: replace forEach with for...of (Ospite Privilegiato) [#50787](https://github.com/nodejs/node/pull/50787) +* \[[`ef44f9bef2`](https://github.com/nodejs/node/commit/ef44f9bef2)] - **test**: replace foreach with for of (lucacapocci94-dev) [#50790](https://github.com/nodejs/node/pull/50790) +* \[[`652af45485`](https://github.com/nodejs/node/commit/652af45485)] - **test**: replace forEach() with for...of (Jia) [#50610](https://github.com/nodejs/node/pull/50610) +* \[[`684dd9db2f`](https://github.com/nodejs/node/commit/684dd9db2f)] - **test**: fix inconsistency write size in `test-fs-readfile-tostring-fail` (Jungku Lee) [#51141](https://github.com/nodejs/node/pull/51141) +* \[[`aaf710f535`](https://github.com/nodejs/node/commit/aaf710f535)] - **test**: replace forEach test-http-server-multiheaders2 (Marco Mac) [#50794](https://github.com/nodejs/node/pull/50794) +* \[[`57c64550cc`](https://github.com/nodejs/node/commit/57c64550cc)] - **test**: replace forEach with for-of in test-webcrypto-export-import-ec (Chiara Ricciardi) [#51249](https://github.com/nodejs/node/pull/51249) +* \[[`88e865181b`](https://github.com/nodejs/node/commit/88e865181b)] - **test**: move to for of loop in test-http-hostname-typechecking.js (Luca Del Puppo) [#50782](https://github.com/nodejs/node/pull/50782) +* \[[`3db376f67a`](https://github.com/nodejs/node/commit/3db376f67a)] - **test**: skip test-watch-mode-inspect on arm (Michael Dawson) [#51210](https://github.com/nodejs/node/pull/51210) +* \[[`38232d1c52`](https://github.com/nodejs/node/commit/38232d1c52)] - **test**: replace forEach with for of in file test-trace-events-net.js (Ianna83) [#50789](https://github.com/nodejs/node/pull/50789) +* \[[`f1cb58355a`](https://github.com/nodejs/node/commit/f1cb58355a)] - **test**: replace forEach() with for...of in test/parallel/test-util-log.js (Edoardo Dusi) [#50783](https://github.com/nodejs/node/pull/50783) +* \[[`9bfd84c117`](https://github.com/nodejs/node/commit/9bfd84c117)] - **test**: replace forEach with for of in test-trace-events-api.js (Andrea Pavone) [#50784](https://github.com/nodejs/node/pull/50784) +* \[[`7e9834915a`](https://github.com/nodejs/node/commit/7e9834915a)] - **test**: replace forEach with for-of in test-v8-serders.js (Mattia Iannone) [#50791](https://github.com/nodejs/node/pull/50791) +* \[[`b6f232e841`](https://github.com/nodejs/node/commit/b6f232e841)] - **test**: add URL tests to fs-read in pm (Rafael Gonzaga) [#51213](https://github.com/nodejs/node/pull/51213) +* \[[`8a2178c5f5`](https://github.com/nodejs/node/commit/8a2178c5f5)] - **test**: use tmpdir.refresh() in test-esm-loader-resolve-type.mjs (Luigi Pinca) [#51206](https://github.com/nodejs/node/pull/51206) +* \[[`7e9a0b192a`](https://github.com/nodejs/node/commit/7e9a0b192a)] - **test**: use tmpdir.refresh() in test-esm-json.mjs (Luigi Pinca) [#51205](https://github.com/nodejs/node/pull/51205) +* \[[`d7c2572fe0`](https://github.com/nodejs/node/commit/d7c2572fe0)] - **test**: fix flakiness in worker\*.test-free-called (Jithil P Ponnan) [#51013](https://github.com/nodejs/node/pull/51013) +* \[[`979cebc955`](https://github.com/nodejs/node/commit/979cebc955)] - **test\_runner**: fixed test object is incorrectly passed to setup() (Pulkit Gupta) [#50982](https://github.com/nodejs/node/pull/50982) +* \[[`63db82abe6`](https://github.com/nodejs/node/commit/63db82abe6)] - **test\_runner**: fixed to run after hook if before throws an error (Pulkit Gupta) [#51062](https://github.com/nodejs/node/pull/51062) +* \[[`c31ed51373`](https://github.com/nodejs/node/commit/c31ed51373)] - **(SEMVER-MINOR)** **timers**: export timers.promises (Marco Ippolito) [#51246](https://github.com/nodejs/node/pull/51246) +* \[[`fc10f889eb`](https://github.com/nodejs/node/commit/fc10f889eb)] - **tools**: update lint-md-dependencies to rollup\@4.9.2 (Node.js GitHub Bot) [#51320](https://github.com/nodejs/node/pull/51320) +* \[[`d5a5f12d15`](https://github.com/nodejs/node/commit/d5a5f12d15)] - **tools**: fix dep\_updaters dir updates (Michaël Zasso) [#51294](https://github.com/nodejs/node/pull/51294) +* \[[`bdcb5ed510`](https://github.com/nodejs/node/commit/bdcb5ed510)] - **tools**: update inspector\_protocol to c488ba2 (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`69a46add77`](https://github.com/nodejs/node/commit/69a46add77)] - **tools**: update inspector\_protocol to 9b4a4aa (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`e325f49d19`](https://github.com/nodejs/node/commit/e325f49d19)] - **tools**: update inspector\_protocol to 2f51e05 (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`60d804851b`](https://github.com/nodejs/node/commit/60d804851b)] - **tools**: update inspector\_protocol to d7b099b (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`d18168489f`](https://github.com/nodejs/node/commit/d18168489f)] - **tools**: update inspector\_protocol to 912eb68 (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`ef4f46fc39`](https://github.com/nodejs/node/commit/ef4f46fc39)] - **tools**: update inspector\_protocol to 547c5b8 (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`c3126fc016`](https://github.com/nodejs/node/commit/c3126fc016)] - **tools**: update inspector\_protocol to ca525fc (cola119) [#51293](https://github.com/nodejs/node/pull/51293) +* \[[`917d887dde`](https://github.com/nodejs/node/commit/917d887dde)] - **tools**: update lint-md-dependencies to rollup\@4.9.1 (Node.js GitHub Bot) [#51276](https://github.com/nodejs/node/pull/51276) +* \[[`37594918e0`](https://github.com/nodejs/node/commit/37594918e0)] - **tools**: check timezone current version (Marco Ippolito) [#51178](https://github.com/nodejs/node/pull/51178) +* \[[`d0d2faf899`](https://github.com/nodejs/node/commit/d0d2faf899)] - **tools**: update lint-md-dependencies to rollup\@4.9.0 (Node.js GitHub Bot) [#51193](https://github.com/nodejs/node/pull/51193) +* \[[`c96ef6533c`](https://github.com/nodejs/node/commit/c96ef6533c)] - **tools**: update eslint to 8.56.0 (Node.js GitHub Bot) [#51194](https://github.com/nodejs/node/pull/51194) +* \[[`f4f781d493`](https://github.com/nodejs/node/commit/f4f781d493)] - **util**: pass invalidSubtypeIndex instead of trimmedSubtype to error (Gaurish Sethia) [#51264](https://github.com/nodejs/node/pull/51264) +* \[[`867b484429`](https://github.com/nodejs/node/commit/867b484429)] - **watch**: clarify that the fileName parameter can be null (Luigi Pinca) [#51305](https://github.com/nodejs/node/pull/51305) +* \[[`56e8969b65`](https://github.com/nodejs/node/commit/56e8969b65)] - **watch**: fix null `fileName` on windows systems (vnc5) [#49891](https://github.com/nodejs/node/pull/49891) +* \[[`3f4fd6efbb`](https://github.com/nodejs/node/commit/3f4fd6efbb)] - **watch**: fix infinite loop when passing --watch=true flag (Pulkit Gupta) [#51160](https://github.com/nodejs/node/pull/51160) + ## 2023-12-19, Version 21.5.0 (Current), @RafaelGSS diff --git a/doc/contributing/offboarding.md b/doc/contributing/offboarding.md index f30688a33d0d87..f9d8140b54b4d8 100644 --- a/doc/contributing/offboarding.md +++ b/doc/contributing/offboarding.md @@ -3,7 +3,7 @@ This document is a checklist of things to do when a collaborator becomes emeritus or leaves the project. -* Remove the collaborator from the @nodejs/collaborators team. +* Remove the collaborator from the [`@nodejs/collaborators`][] team. * Open a fast-track pull request to move the collaborator to the collaborator emeriti list in README.md. * Determine what GitHub teams the collaborator belongs to. In consultation with @@ -15,6 +15,11 @@ emeritus or leaves the project. * When in doubt, especially if you are unable to get in contact with the collaborator, remove them from all teams. It is easy enough to add them back later, so we err on the side of privacy and security. -* Open an issue in the [build](https://github.com/nodejs/build) repository - titled `Remove Collaborator from Coverity` asking that the collaborator - be removed from the Node.js coverity project if they had access. +* Remove them from the [`@nodejs`](https://github.com/orgs/nodejs/people) GitHub + org unless they are members for a reason other than being a Collaborator. +* [Open an issue](https://github.com/nodejs/build/issues/new) in the + nodejs/build repository titled `Remove Collaborator from Coverity` asking that + the collaborator be removed from the Node.js coverity project if they had + access. + +[`@nodejs/collaborators`]: https://github.com/orgs/nodejs/teams/collaborators/members diff --git a/doc/contributing/security-release-process.md b/doc/contributing/security-release-process.md index 4408782069970f..4524df2a90047d 100644 --- a/doc/contributing/security-release-process.md +++ b/doc/contributing/security-release-process.md @@ -124,7 +124,7 @@ out a better way, forward the email you receive to `oss-security@lists.openwall.com` as a CC. * [ ] Post in the [nodejs-social channel][] - in the OpenJS slack asking for amplication of the blog post. + in the OpenJS slack asking for amplification of the blog post. ```text Security release pre-alert: @@ -184,7 +184,7 @@ out a better way, forward the email you receive to ``` * [ ] Post in the [nodejs-social channel][] - in the OpenJS slack asking for amplication of the blog post. + in the OpenJS slack asking for amplification of the blog post. ```text Security release: diff --git a/doc/contributing/strategic-initiatives.md b/doc/contributing/strategic-initiatives.md index ca4308daa3ebb8..5af550441db29a 100644 --- a/doc/contributing/strategic-initiatives.md +++ b/doc/contributing/strategic-initiatives.md @@ -14,7 +14,7 @@ agenda to ensure they are active and have the support they need. | Startup Snapshot | [Joyee Cheung][joyeecheung] | | | V8 Currency | [Michaël Zasso][targos] | | | Next-10 | [Michael Dawson][mhdawson] | | -| Single executable apps | [Darshan Sen][RaisinTen] | | +| Single executable apps | | | | Performance | | | | Primordials | [Benjamin Gruenbaum][benjamingr] | | @@ -39,7 +39,6 @@ agenda to ensure they are active and have the support they need. -[RaisinTen]: https://github.com/RaisinTen [aduh95]: https://github.com/aduh95 [benjamingr]: https://github.com/benjamingr [jasnell]: https://github.com/jasnell diff --git a/lib/fs.js b/lib/fs.js index 00ca3f50acef53..22688f631daf88 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -2379,6 +2379,7 @@ function writeFileSync(path, data, options) { * encoding?: string | null; * mode?: number; * flag?: string; + * flush?: boolean; * } | string} [options] * @param {(err?: Error) => any} callback * @returns {void} diff --git a/lib/http2.js b/lib/http2.js index 8db4d918629a1b..8c6ebebb7b6a4c 100644 --- a/lib/http2.js +++ b/lib/http2.js @@ -8,6 +8,7 @@ const { getDefaultSettings, getPackedSettings, getUnpackedSettings, + performServerHandshake, sensitiveHeaders, Http2ServerRequest, Http2ServerResponse, @@ -21,6 +22,7 @@ module.exports = { getDefaultSettings, getPackedSettings, getUnpackedSettings, + performServerHandshake, sensitiveHeaders, Http2ServerRequest, Http2ServerResponse, diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index 7a773d5208e250..8812c3b35184b2 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -172,6 +172,7 @@ const rawMethods = internalBinding('process_methods'); process._kill = rawMethods._kill; const wrapped = perThreadSetup.wrapProcessMethods(rawMethods); + process.loadEnvFile = wrapped.loadEnvFile; process._rawDebug = wrapped._rawDebug; process.cpuUsage = wrapped.cpuUsage; process.resourceUsage = wrapped.resourceUsage; diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js index 57ab47178d033d..f030f537a084d7 100644 --- a/lib/internal/bootstrap/realm.js +++ b/lib/internal/bootstrap/realm.js @@ -350,7 +350,9 @@ class BuiltinModule { const url = `node:${this.id}`; const builtin = this; const exportsKeys = ArrayPrototypeSlice(this.exportKeys); - ArrayPrototypePush(exportsKeys, 'default'); + if (!ArrayPrototypeIncludes(exportsKeys, 'default')) { + ArrayPrototypePush(exportsKeys, 'default'); + } this.module = new ModuleWrap( url, undefined, exportsKeys, function() { diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 42364c78c667cb..3272608bf7d04d 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -365,6 +365,7 @@ class FileHandle extends EventEmitter { * autoClose?: boolean; * emitClose?: boolean; * start: number; + * highWaterMark?: number; * flush?: boolean; * }} [options] * @returns {WriteStream} @@ -1254,7 +1255,7 @@ async function* _watch(filename, options = kEmptyObject) { // e.g. Linux due to the limitations of inotify. if (options.recursive && !isOSX && !isWindows) { const watcher = new nonNativeWatcher.FSWatcher(options); - await watcher[kFSWatchStart](filename); + watcher[kFSWatchStart](filename); yield* watcher; return; } diff --git a/lib/internal/fs/recursive_watch.js b/lib/internal/fs/recursive_watch.js index 54298832da5a1b..7d8b12eeb93445 100644 --- a/lib/internal/fs/recursive_watch.js +++ b/lib/internal/fs/recursive_watch.js @@ -1,10 +1,7 @@ 'use strict'; const { - ArrayPrototypePush, - SafePromiseAllReturnVoid, Promise, - PromisePrototypeThen, SafeMap, SafeSet, StringPrototypeStartsWith, @@ -31,47 +28,19 @@ const { } = require('path'); let internalSync; -let internalPromises; - -function lazyLoadFsPromises() { - internalPromises ??= require('fs/promises'); - return internalPromises; -} function lazyLoadFsSync() { internalSync ??= require('fs'); return internalSync; } -let kResistStopPropagation; - -async function traverse(dir, files = new SafeMap(), symbolicLinks = new SafeSet()) { - const { opendir } = lazyLoadFsPromises(); - - const filenames = await opendir(dir); - const subdirectories = []; - - for await (const file of filenames) { - const f = pathJoin(dir, file.name); - - files.set(f, file); - - // Do not follow symbolic links - if (file.isSymbolicLink()) { - symbolicLinks.add(f); - } else if (file.isDirectory()) { - ArrayPrototypePush(subdirectories, traverse(f, files)); - } - } - - await SafePromiseAllReturnVoid(subdirectories); - return files; -} +let kResistStopPropagation; class FSWatcher extends EventEmitter { #options = null; #closed = false; #files = new SafeMap(); + #watchers = new SafeMap(); #symbolicFiles = new SafeSet(); #rootPath = pathResolve(); #watchingFile = false; @@ -111,11 +80,11 @@ class FSWatcher extends EventEmitter { return; } - const { unwatchFile } = lazyLoadFsSync(); this.#closed = true; for (const file of this.#files.keys()) { - unwatchFile(file); + this.#watchers.get(file).close(); + this.#watchers.delete(file); } this.#files.clear(); @@ -124,24 +93,26 @@ class FSWatcher extends EventEmitter { } #unwatchFiles(file) { - const { unwatchFile } = lazyLoadFsSync(); - this.#symbolicFiles.delete(file); for (const filename of this.#files.keys()) { if (StringPrototypeStartsWith(filename, file)) { - unwatchFile(filename); + this.#files.delete(filename); + this.#watchers.get(filename).close(); + this.#watchers.delete(filename); } } } - async #watchFolder(folder) { - const { opendir } = lazyLoadFsPromises(); + #watchFolder(folder) { + const { readdirSync } = lazyLoadFsSync(); try { - const files = await opendir(folder); + const files = readdirSync(folder, { + withFileTypes: true, + }); - for await (const file of files) { + for (const file of files) { if (this.#closed) { break; } @@ -155,11 +126,9 @@ class FSWatcher extends EventEmitter { this.#symbolicFiles.add(f); } - this.#files.set(f, file); - if (file.isFile()) { - this.#watchFile(f); - } else if (file.isDirectory() && !file.isSymbolicLink()) { - await this.#watchFolder(f); + this.#watchFile(f); + if (file.isDirectory() && !file.isSymbolicLink()) { + this.#watchFolder(f); } } } @@ -173,22 +142,30 @@ class FSWatcher extends EventEmitter { return; } - const { watchFile } = lazyLoadFsSync(); - const existingStat = this.#files.get(file); + const { watch, statSync } = lazyLoadFsSync(); + + if (this.#files.has(file)) { + return; + } + + { + const existingStat = statSync(file); + this.#files.set(file, existingStat); + } - watchFile(file, { + const watcher = watch(file, { persistent: this.#options.persistent, - }, (currentStats, previousStats) => { - if (existingStat && !existingStat.isDirectory() && - currentStats.nlink !== 0 && existingStat.mtimeMs === currentStats.mtimeMs) { - return; - } + }, (eventType, filename) => { + const existingStat = this.#files.get(file); + const currentStats = statSync(file); this.#files.set(file, currentStats); - if (currentStats.birthtimeMs === 0 && previousStats.birthtimeMs !== 0) { + if (currentStats.birthtimeMs === 0 && existingStat.birthtimeMs !== 0) { // The file is now deleted this.#files.delete(file); + this.#watchers.delete(file); + watcher.close(); this.emit('change', 'rename', pathRelative(this.#rootPath, file)); this.#unwatchFiles(file); } else if (file === this.#rootPath && this.#watchingFile) { @@ -205,6 +182,7 @@ class FSWatcher extends EventEmitter { this.emit('change', 'change', pathRelative(this.#rootPath, file)); } }); + this.#watchers.set(file, watcher); } [kFSWatchStart](filename) { @@ -217,19 +195,9 @@ class FSWatcher extends EventEmitter { this.#closed = false; this.#watchingFile = file.isFile(); + this.#watchFile(filename); if (file.isDirectory()) { - this.#files.set(filename, file); - - PromisePrototypeThen( - traverse(filename, this.#files, this.#symbolicFiles), - () => { - for (const f of this.#files.keys()) { - this.#watchFile(f); - } - }, - ); - } else { - this.#watchFile(filename); + this.#watchFolder(filename); } } catch (error) { if (error.code === 'ENOENT') { @@ -264,7 +232,10 @@ class FSWatcher extends EventEmitter { resolve({ __proto__: null, value: { eventType, filename } }); }); } : (resolve, reject) => { - const onAbort = () => reject(new AbortError(undefined, { cause: signal.reason })); + const onAbort = () => { + this.close(); + reject(new AbortError(undefined, { cause: signal.reason })); + }; if (signal.aborted) return onAbort(); kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation; signal.addEventListener('abort', onAbort, { __proto__: null, once: true, [kResistStopPropagation]: true }); @@ -277,6 +248,10 @@ class FSWatcher extends EventEmitter { next: () => (this.#closed ? { __proto__: null, done: true } : new Promise(promiseExecutor)), + return: () => { + this.close(); + return { __proto__: null, done: true }; + }, [SymbolAsyncIterator]() { return this; }, }; } diff --git a/lib/internal/http2/compat.js b/lib/internal/http2/compat.js index 7bf079900c652f..f0dd26de0d6359 100644 --- a/lib/internal/http2/compat.js +++ b/lib/internal/http2/compat.js @@ -76,6 +76,7 @@ const kRawHeaders = Symbol('rawHeaders'); const kTrailers = Symbol('trailers'); const kRawTrailers = Symbol('rawTrailers'); const kSetHeader = Symbol('setHeader'); +const kAppendHeader = Symbol('appendHeader'); const kAborted = Symbol('aborted'); let statusMessageWarned = false; @@ -652,6 +653,47 @@ class Http2ServerResponse extends Stream { this[kHeaders][name] = value; } + appendHeader(name, value) { + validateString(name, 'name'); + if (this[kStream].headersSent) + throw new ERR_HTTP2_HEADERS_SENT(); + + this[kAppendHeader](name, value); + } + + [kAppendHeader](name, value) { + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); + assertValidHeader(name, value); + + if (!isConnectionHeaderAllowed(name, value)) { + return; + } + + if (name[0] === ':') + assertValidPseudoHeader(name); + else if (!checkIsHttpToken(name)) + this.destroy(new ERR_INVALID_HTTP_TOKEN('Header name', name)); + + // Handle various possible cases the same as OutgoingMessage.appendHeader: + const headers = this[kHeaders]; + if (headers === null || !headers[name]) { + return this.setHeader(name, value); + } + + if (!ArrayIsArray(headers[name])) { + headers[name] = [headers[name]]; + } + + const existingValues = headers[name]; + if (ArrayIsArray(value)) { + for (let i = 0, length = value.length; i < length; i++) { + existingValues.push(value[i]); + } + } else { + existingValues.push(value); + } + } + get statusMessage() { statusMessageWarn(); @@ -684,10 +726,33 @@ class Http2ServerResponse extends Stream { let i; if (ArrayIsArray(headers)) { + if (this[kHeaders]) { + // Headers in obj should override previous headers but still + // allow explicit duplicates. To do so, we first remove any + // existing conflicts, then use appendHeader. This is the + // slow path, which only applies when you use setHeader and + // then pass headers in writeHead too. + + // We need to handle both the tuple and flat array formats, just + // like the logic further below. + if (headers.length && ArrayIsArray(headers[0])) { + for (let n = 0; n < headers.length; n += 1) { + const key = headers[n + 0][0]; + this.removeHeader(key); + } + } else { + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0]; + this.removeHeader(key); + } + } + } + + // Append all the headers provided in the array: if (headers.length && ArrayIsArray(headers[0])) { for (i = 0; i < headers.length; i++) { const header = headers[i]; - this[kSetHeader](header[0], header[1]); + this[kAppendHeader](header[0], header[1]); } } else { if (headers.length % 2 !== 0) { @@ -695,7 +760,7 @@ class Http2ServerResponse extends Stream { } for (i = 0; i < headers.length; i += 2) { - this[kSetHeader](headers[i], headers[i + 1]); + this[kAppendHeader](headers[i], headers[i + 1]); } } } else if (typeof headers === 'object') { diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 69956d2885e1f6..4e67eba4ecac40 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -1228,12 +1228,6 @@ class Http2Session extends EventEmitter { constructor(type, options, socket) { super(); - if (!socket._handle || !socket._handle.isStreamBase) { - socket = new JSStreamSocket(socket); - } - socket.on('error', socketOnError); - socket.on('close', socketOnClose); - // No validation is performed on the input parameters because this // constructor is not exported directly for users. @@ -1245,6 +1239,12 @@ class Http2Session extends EventEmitter { socket[kSession] = this; + if (!socket._handle || !socket._handle.isStreamBase) { + socket = new JSStreamSocket(socket); + } + socket.on('error', socketOnError); + socket.on('close', socketOnClose); + this[kState] = { destroyCode: NGHTTP2_NO_ERROR, flags: SESSION_FLAGS_PENDING, @@ -1644,7 +1644,7 @@ class ServerHttp2Session extends Http2Session { // not be an issue in practice. Additionally, the 'priority' event on // server instances (or any other object) is fully undocumented. this[kNativeFields][kSessionPriorityListenerCount] = - server.listenerCount('priority'); + server ? server.listenerCount('priority') : 0; } get server() { @@ -3435,6 +3435,11 @@ function getUnpackedSettings(buf, options = kEmptyObject) { return settings; } +function performServerHandshake(socket, options = {}) { + options = initializeOptions(options); + return new ServerHttp2Session(options, socket, undefined); +} + binding.setCallbackFunctions( onSessionInternalError, onPriority, @@ -3458,6 +3463,7 @@ module.exports = { getDefaultSettings, getPackedSettings, getUnpackedSettings, + performServerHandshake, sensitiveHeaders: kSensitiveHeaders, Http2Session, Http2Stream, diff --git a/lib/internal/js_stream_socket.js b/lib/internal/js_stream_socket.js index a6aee73f468b08..3e01327202be1a 100644 --- a/lib/internal/js_stream_socket.js +++ b/lib/internal/js_stream_socket.js @@ -17,6 +17,7 @@ let debug = require('internal/util/debuglog').debuglog( ); const { owner_symbol } = require('internal/async_hooks').symbols; const { ERR_STREAM_WRAP } = require('internal/errors').codes; +const { kSession } = require('internal/stream_base_commons'); const kCurrentWriteRequest = Symbol('kCurrentWriteRequest'); const kCurrentShutdownRequest = Symbol('kCurrentShutdownRequest'); @@ -263,6 +264,14 @@ class JSStreamSocket extends Socket { cb(); }); } + + get [kSession]() { + return this.stream[kSession]; + } + + set [kSession](session) { + this.stream[kSession] = session; + } } module.exports = JSStreamSocket; diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 44abacb41a3430..e5b47d8874aeb7 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -469,15 +469,15 @@ function tryPackage(requestPath, exts, isMain, originalPath) { } /** - * Check if the file exists and is not a directory if using `--preserve-symlinks` and `isMain` is false, keep symlinks - * intact, otherwise resolve to the absolute realpath. + * Check if the file exists and is not a directory if using `--preserve-symlinks` and `isMain` is false or + * `--preserve-symlinks-main` and `isMain` is true , keep symlinks intact, otherwise resolve to the absolute realpath. * @param {string} requestPath The path to the file to load. * @param {boolean} isMain Whether the file is the main module. */ function tryFile(requestPath, isMain) { const rc = _stat(requestPath); if (rc !== 0) { return; } - if (getOptionValue('--preserve-symlinks') && !isMain) { + if (getOptionValue(isMain ? '--preserve-symlinks-main' : '--preserve-symlinks')) { return path.resolve(requestPath); } return toRealPath(requestPath); diff --git a/lib/internal/process/per_thread.js b/lib/internal/process/per_thread.js index 9b86f20053da3b..b45f2a61e0ddaf 100644 --- a/lib/internal/process/per_thread.js +++ b/lib/internal/process/per_thread.js @@ -46,6 +46,8 @@ const { validateNumber, validateObject, } = require('internal/validators'); +const { getValidatedPath } = require('internal/fs/utils'); +const { toNamespacedPath } = require('path'); const constants = internalBinding('constants').os.signals; const kInternal = Symbol('internal properties'); @@ -57,21 +59,13 @@ const { exitCodes: { kNoFailure } } = internalBinding('errors'); const binding = internalBinding('process_methods'); -let hrValues; -let hrBigintValues; - -function refreshHrtimeBuffer() { - // The 3 entries filled in by the original process.hrtime contains - // the upper/lower 32 bits of the second part of the value, - // and the remaining nanoseconds of the value. - hrValues = binding.hrtimeBuffer; - // Use a BigUint64Array in the closure because this is actually a bit - // faster than simply returning a BigInt from C++ in V8 7.1. - hrBigintValues = new BigUint64Array(binding.hrtimeBuffer.buffer, 0, 1); -} - -// Create the buffers. -refreshHrtimeBuffer(); +// The 3 entries filled in by the original process.hrtime contains +// the upper/lower 32 bits of the second part of the value, +// and the remaining nanoseconds of the value. +const hrValues = binding.hrtimeBuffer; +// Use a BigUint64Array because this is actually a bit +// faster than simply returning a BigInt from C++ in V8 7.1. +const hrBigintValues = new BigUint64Array(binding.hrtimeBuffer.buffer, 0, 1); function hrtime(time) { binding.hrtime(); @@ -108,6 +102,7 @@ function wrapProcessMethods(binding) { memoryUsage: _memoryUsage, rss, resourceUsage: _resourceUsage, + loadEnvFile: _loadEnvFile, } = binding; function _rawDebug(...args) { @@ -258,6 +253,19 @@ function wrapProcessMethods(binding) { }; } + /** + * Loads the `.env` file to process.env. + * @param {string | URL | Buffer | undefined} path + */ + function loadEnvFile(path = undefined) { // Provide optional value so that `loadEnvFile.length` returns 0 + if (path != null) { + path = getValidatedPath(path); + _loadEnvFile(toNamespacedPath(path)); + } else { + _loadEnvFile(); + } + } + return { _rawDebug, @@ -266,6 +274,7 @@ function wrapProcessMethods(binding) { memoryUsage, kill, exit, + loadEnvFile, }; } @@ -425,5 +434,4 @@ module.exports = { wrapProcessMethods, hrtime, hrtimeBigInt, - refreshHrtimeBuffer, }; diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js index b6bdb4785003f7..98533b7828d3ff 100644 --- a/lib/internal/process/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -211,8 +211,6 @@ function patchProcessObject(expandArgv1) { const binding = internalBinding('process_methods'); binding.patchProcessObject(process); - require('internal/process/per_thread').refreshHrtimeBuffer(); - // Since we replace process.argv[0] below, preserve the original value in case the user needs it. ObjectDefineProperty(process, 'argv0', { __proto__: null, diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index e38b1a67c09660..ee2c345e7a0403 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -132,6 +132,14 @@ class TestContext { return this.#test.name; } + get error() { + return this.#test.error; + } + + get passed() { + return this.#test.passed; + } + diagnostic(message) { this.#test.diagnostic(message); } @@ -639,12 +647,17 @@ class Test extends AsyncResource { return; } - await afterEach(); - await after(); this.pass(); + try { + await afterEach(); + await after(); + } catch (err) { + // If one of the after hooks has thrown unset endTime so that the + // catch below can do its cancel/fail logic. + this.endTime = null; + throw err; + } } catch (err) { - try { await afterEach(); } catch { /* test is already failing, let's ignore the error */ } - try { await after(); } catch { /* Ignore error. */ } if (isTestFailureError(err)) { if (err.failureType === kTestTimeoutFailure) { this.#cancel(err); @@ -654,6 +667,8 @@ class Test extends AsyncResource { } else { this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); } + try { await afterEach(); } catch { /* test is already failing, let's ignore the error */ } + try { await after(); } catch { /* Ignore error. */ } } finally { stopPromise?.[SymbolDispose](); diff --git a/lib/internal/url.js b/lib/internal/url.js index 38f97926064595..0e69ff52b5edef 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -206,6 +206,7 @@ class URLContext { } } +let setURLSearchParamsModified; let setURLSearchParamsContext; let getURLSearchParamsList; let setURLSearchParams; @@ -475,8 +476,9 @@ class URLSearchParams { name = StringPrototypeToWellFormed(`${name}`); value = StringPrototypeToWellFormed(`${value}`); ArrayPrototypePush(this.#searchParams, name, value); + if (this.#context) { - this.#context.search = this.toString(); + setURLSearchParamsModified(this.#context); } } @@ -509,8 +511,9 @@ class URLSearchParams { } } } + if (this.#context) { - this.#context.search = this.toString(); + setURLSearchParamsModified(this.#context); } } @@ -615,7 +618,7 @@ class URLSearchParams { } if (this.#context) { - this.#context.search = this.toString(); + setURLSearchParamsModified(this.#context); } } @@ -664,7 +667,7 @@ class URLSearchParams { } if (this.#context) { - this.#context.search = this.toString(); + setURLSearchParamsModified(this.#context); } } @@ -769,6 +772,20 @@ function isURL(self) { class URL { #context = new URLContext(); #searchParams; + #searchParamsModified; + + static { + setURLSearchParamsModified = (obj) => { + // When URLSearchParams changes, we lazily update URL on the next read/write for performance. + obj.#searchParamsModified = true; + + // If URL has an existing search, remove it without cascading back to URLSearchParams. + // Do this to avoid any internal confusion about whether URLSearchParams or URL is up-to-date. + if (obj.#context.hasSearch) { + obj.#updateContext(bindingUrl.update(obj.#context.href, updateActions.kSearch, '')); + } + }; + } constructor(input, base = undefined) { markTransferMode(this, false, false); @@ -814,7 +831,37 @@ class URL { return `${constructor.name} ${inspect(obj, opts)}`; } - #updateContext(href) { + #getSearchFromContext() { + if (!this.#context.hasSearch) return ''; + let endsAt = this.#context.href.length; + if (this.#context.hasHash) endsAt = this.#context.hash_start; + if (endsAt - this.#context.search_start <= 1) return ''; + return StringPrototypeSlice(this.#context.href, this.#context.search_start, endsAt); + } + + #getSearchFromParams() { + if (!this.#searchParams?.size) return ''; + return `?${this.#searchParams}`; + } + + #ensureSearchParamsUpdated() { + // URL is updated lazily to greatly improve performance when URLSearchParams is updated repeatedly. + // If URLSearchParams has been modified, reflect that back into URL, without cascading back. + if (this.#searchParamsModified) { + this.#searchParamsModified = false; + this.#updateContext(bindingUrl.update(this.#context.href, updateActions.kSearch, this.#getSearchFromParams())); + } + } + + /** + * Update the internal context state for URL. + * @param {string} href New href string from `bindingUrl.update`. + * @param {boolean} [shouldUpdateSearchParams] If the update has potential to update search params (href/search). + */ + #updateContext(href, shouldUpdateSearchParams = false) { + const previousSearch = shouldUpdateSearchParams && this.#searchParams && + (this.#searchParamsModified ? this.#getSearchFromParams() : this.#getSearchFromContext()); + this.#context.href = href; const { @@ -840,19 +887,31 @@ class URL { this.#context.scheme_type = scheme_type; if (this.#searchParams) { - if (this.#context.hasSearch) { - setURLSearchParams(this.#searchParams, this.search); - } else { - setURLSearchParams(this.#searchParams, undefined); + // If the search string has updated, URL becomes the source of truth, and we update URLSearchParams. + // Only do this when we're expecting it to have changed, otherwise a change to hash etc. + // would incorrectly compare the URLSearchParams state to the empty URL search state. + if (shouldUpdateSearchParams) { + const currentSearch = this.#getSearchFromContext(); + if (previousSearch !== currentSearch) { + setURLSearchParams(this.#searchParams, currentSearch); + this.#searchParamsModified = false; + } } + + // If we have a URLSearchParams, ensure that URL is up-to-date with any modification to it. + this.#ensureSearchParamsUpdated(); } } toString() { + // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync. + this.#ensureSearchParamsUpdated(); return this.#context.href; } get href() { + // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync. + this.#ensureSearchParamsUpdated(); return this.#context.href; } @@ -860,7 +919,7 @@ class URL { value = `${value}`; const href = bindingUrl.update(this.#context.href, updateActions.kHref, value); if (!href) { throw new ERR_INVALID_URL(value); } - this.#updateContext(href); + this.#updateContext(href, true); } // readonly @@ -1002,17 +1061,15 @@ class URL { } get search() { - if (!this.#context.hasSearch) { return ''; } - let endsAt = this.#context.href.length; - if (this.#context.hasHash) { endsAt = this.#context.hash_start; } - if (endsAt - this.#context.search_start <= 1) { return ''; } - return StringPrototypeSlice(this.#context.href, this.#context.search_start, endsAt); + // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync. + this.#ensureSearchParamsUpdated(); + return this.#getSearchFromContext(); } set search(value) { const href = bindingUrl.update(this.#context.href, updateActions.kSearch, StringPrototypeToWellFormed(`${value}`)); if (href) { - this.#updateContext(href); + this.#updateContext(href, true); } } @@ -1020,8 +1077,9 @@ class URL { get searchParams() { // Create URLSearchParams on demand to greatly improve the URL performance. if (this.#searchParams == null) { - this.#searchParams = new URLSearchParams(this.search); + this.#searchParams = new URLSearchParams(this.#getSearchFromContext()); setURLSearchParamsContext(this.#searchParams, this); + this.#searchParamsModified = false; } return this.#searchParams; } @@ -1041,6 +1099,8 @@ class URL { } toJSON() { + // Updates to URLSearchParams are lazily propagated to URL, so we need to check we're in sync. + this.#ensureSearchParamsUpdated(); return this.#context.href; } diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index 5103591312e479..d4526011bec46d 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -598,8 +598,6 @@ class ReadableStream { [kTransferList]() { const { port1, port2 } = new MessageChannel(); - port1.unref(); - port2.unref(); this[kState].transfer.port1 = port1; this[kState].transfer.port2 = port2; return [ port2 ]; @@ -609,7 +607,11 @@ class ReadableStream { const transfer = lazyTransfer(); setupReadableStreamDefaultControllerFromSource( this, - new transfer.CrossRealmTransformReadableSource(port), + // The MessagePort is set to be referenced when reading. + // After two MessagePorts are closed, there is a problem with + // lingering promise not being properly resolved. + // https://github.com/nodejs/node/issues/51486 + new transfer.CrossRealmTransformReadableSource(port, true), 0, () => 1); } } diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js index c4cb4077f88403..9835e6ab272c98 100644 --- a/lib/internal/webstreams/transfer.js +++ b/lib/internal/webstreams/transfer.js @@ -102,10 +102,11 @@ function InternalCloneableDOMException() { InternalCloneableDOMException[kDeserialize] = () => {}; class CrossRealmTransformReadableSource { - constructor(port) { + constructor(port, unref) { this[kState] = { port, controller: undefined, + unref, }; port.onmessage = ({ data }) => { @@ -152,6 +153,10 @@ class CrossRealmTransformReadableSource { } async pull() { + if (this[kState].unref) { + this[kState].unref = false; + this[kState].port.ref(); + } this[kState].port.postMessage({ type: 'pull' }); } @@ -172,11 +177,12 @@ class CrossRealmTransformReadableSource { } class CrossRealmTransformWritableSink { - constructor(port) { + constructor(port, unref) { this[kState] = { port, controller: undefined, backpressurePromise: createDeferredPromise(), + unref, }; port.onmessage = ({ data }) => { @@ -212,6 +218,7 @@ class CrossRealmTransformWritableSink { error); port.close(); }; + port.unref(); } @@ -220,6 +227,10 @@ class CrossRealmTransformWritableSink { } async write(chunk) { + if (this[kState].unref) { + this[kState].unref = false; + this[kState].port.ref(); + } if (this[kState].backpressurePromise === undefined) { this[kState].backpressurePromise = { promise: PromiseResolve(), @@ -264,12 +275,12 @@ class CrossRealmTransformWritableSink { } function newCrossRealmReadableStream(writable, port) { - const readable = - new ReadableStream( - new CrossRealmTransformReadableSource(port)); + // MessagePort should always be unref. + // There is a problem with the process not terminating. + // https://github.com/nodejs/node/issues/44985 + const readable = new ReadableStream(new CrossRealmTransformReadableSource(port, false)); - const promise = - readableStreamPipeTo(readable, writable, false, false, false); + const promise = readableStreamPipeTo(readable, writable, false, false, false); setPromiseHandled(promise); @@ -280,12 +291,15 @@ function newCrossRealmReadableStream(writable, port) { } function newCrossRealmWritableSink(readable, port) { - const writable = - new WritableStream( - new CrossRealmTransformWritableSink(port)); + // MessagePort should always be unref. + // There is a problem with the process not terminating. + // https://github.com/nodejs/node/issues/44985 + const writable = new WritableStream(new CrossRealmTransformWritableSink(port, false)); const promise = readableStreamPipeTo(readable, writable, false, false, false); + setPromiseHandled(promise); + return { writable, promise, diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js index 3bc77fc6fb7067..121afc0e181ec6 100644 --- a/lib/internal/webstreams/writablestream.js +++ b/lib/internal/webstreams/writablestream.js @@ -263,8 +263,6 @@ class WritableStream { this[kState].transfer.readable = readable; this[kState].transfer.promise = promise; - setPromiseHandled(this[kState].transfer.promise); - return { data: { port: this[kState].transfer.port2 }, deserializeInfo: @@ -274,8 +272,6 @@ class WritableStream { [kTransferList]() { const { port1, port2 } = new MessageChannel(); - port1.unref(); - port2.unref(); this[kState].transfer.port1 = port1; this[kState].transfer.port2 = port2; return [ port2 ]; @@ -285,7 +281,11 @@ class WritableStream { const transfer = lazyTransfer(); setupWritableStreamDefaultControllerFromSink( this, - new transfer.CrossRealmTransformWritableSink(port), + // The MessagePort is set to be referenced when reading. + // After two MessagePorts are closed, there is a problem with + // lingering promise not being properly resolved. + // https://github.com/nodejs/node/issues/51486 + new transfer.CrossRealmTransformWritableSink(port, true), 1, () => 1); } diff --git a/lib/util.js b/lib/util.js index 13a437c9318d05..e4cb01a8bd13f2 100644 --- a/lib/util.js +++ b/lib/util.js @@ -67,9 +67,11 @@ const { debuglog } = require('internal/util/debuglog'); const { validateFunction, validateNumber, + validateString, } = require('internal/validators'); const { isBuffer } = require('buffer').Buffer; const types = require('internal/util/types'); +const binding = internalBinding('util'); const { deprecate, @@ -371,6 +373,16 @@ function _exceptionWithHostPort(...args) { return new ExceptionWithHostPort(...args); } +/** + * Parses the content of a `.env` file. + * @param {string} content + * @returns {Record} + */ +function parseEnv(content) { + validateString(content, 'content'); + return binding.parseEnv(content); +} + // Keep the `exports =` so that various functions can still be monkeypatched module.exports = { _errnoException, @@ -465,6 +477,7 @@ module.exports = { return lazyAbortController().aborted; }, types, + parseEnv, }; defineLazyProperties( diff --git a/node.gni b/node.gni index 2be97a17a2f710..2445d09b8179a6 100644 --- a/node.gni +++ b/node.gni @@ -1,8 +1,3 @@ -# Copyright 2019 the V8 project authors. All rights reserved. -# Copyright 2023 Microsoft Inc. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - # This file is used by GN for building, which is NOT the build system used for # building official binaries. # Please take a look at node.gyp if you are making changes to build system. diff --git a/src/async_wrap.h b/src/async_wrap.h index 01e981aa671a23..7234d88b67a961 100644 --- a/src/async_wrap.h +++ b/src/async_wrap.h @@ -102,17 +102,9 @@ namespace node { #define NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V) #endif // HAVE_OPENSSL -#if HAVE_INSPECTOR -#define NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V) \ - V(INSPECTORJSBINDING) -#else -#define NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V) -#endif // HAVE_INSPECTOR - -#define NODE_ASYNC_PROVIDER_TYPES(V) \ - NODE_ASYNC_NON_CRYPTO_PROVIDER_TYPES(V) \ - NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V) \ - NODE_ASYNC_INSPECTOR_PROVIDER_TYPES(V) +#define NODE_ASYNC_PROVIDER_TYPES(V) \ + NODE_ASYNC_NON_CRYPTO_PROVIDER_TYPES(V) \ + NODE_ASYNC_CRYPTO_PROVIDER_TYPES(V) class Environment; class DestroyParam; diff --git a/src/base64_version.h b/src/base64_version.h index c3737f4beebfcb..ce3d7c03f8c979 100644 --- a/src/base64_version.h +++ b/src/base64_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-base64.sh #ifndef SRC_BASE64_VERSION_H_ #define SRC_BASE64_VERSION_H_ -#define BASE64_VERSION "0.5.1" +#define BASE64_VERSION "0.5.2" #endif // SRC_BASE64_VERSION_H_ diff --git a/src/inspector/node_inspector.gypi b/src/inspector/node_inspector.gypi index ba24e6acfc0744..a2dfdcb42db196 100644 --- a/src/inspector/node_inspector.gypi +++ b/src/inspector/node_inspector.gypi @@ -39,7 +39,6 @@ ], 'node_protocol_files': [ '<(protocol_tool_path)/lib/Allocator_h.template', - '<(protocol_tool_path)/lib/Array_h.template', '<(protocol_tool_path)/lib/base_string_adapter_cc.template', '<(protocol_tool_path)/lib/base_string_adapter_h.template', '<(protocol_tool_path)/lib/DispatcherBase_cpp.template', diff --git a/src/inspector/tracing_agent.cc b/src/inspector/tracing_agent.cc index cdbdd63f8aff9f..e7b6d3b3ea63bd 100644 --- a/src/inspector/tracing_agent.cc +++ b/src/inspector/tracing_agent.cc @@ -147,8 +147,8 @@ DispatchResponse TracingAgent::start( std::set categories_set; protocol::Array* categories = traceConfig->getIncludedCategories(); - for (size_t i = 0; i < categories->length(); i++) - categories_set.insert(categories->get(i)); + for (size_t i = 0; i < categories->size(); i++) + categories_set.insert((*categories)[i]); if (categories_set.empty()) return DispatchResponse::Error("At least one category should be enabled"); @@ -172,29 +172,29 @@ DispatchResponse TracingAgent::stop() { DispatchResponse TracingAgent::getCategories( std::unique_ptr>* categories) { - *categories = Array::create(); + *categories = std::make_unique>(); protocol::Array* categories_list = categories->get(); // In alphabetical order - categories_list->addItem("node"); - categories_list->addItem("node.async_hooks"); - categories_list->addItem("node.bootstrap"); - categories_list->addItem("node.console"); - categories_list->addItem("node.dns.native"); - categories_list->addItem("node.environment"); - categories_list->addItem("node.fs.async"); - categories_list->addItem("node.fs.sync"); - categories_list->addItem("node.fs_dir.async"); - categories_list->addItem("node.fs_dir.sync"); - categories_list->addItem("node.http"); - categories_list->addItem("node.net.native"); - categories_list->addItem("node.perf"); - categories_list->addItem("node.perf.timerify"); - categories_list->addItem("node.perf.usertiming"); - categories_list->addItem("node.promises.rejections"); - categories_list->addItem("node.threadpoolwork.async"); - categories_list->addItem("node.threadpoolwork.sync"); - categories_list->addItem("node.vm.script"); - categories_list->addItem("v8"); + categories_list->emplace_back("node"); + categories_list->emplace_back("node.async_hooks"); + categories_list->emplace_back("node.bootstrap"); + categories_list->emplace_back("node.console"); + categories_list->emplace_back("node.dns.native"); + categories_list->emplace_back("node.environment"); + categories_list->emplace_back("node.fs.async"); + categories_list->emplace_back("node.fs.sync"); + categories_list->emplace_back("node.fs_dir.async"); + categories_list->emplace_back("node.fs_dir.sync"); + categories_list->emplace_back("node.http"); + categories_list->emplace_back("node.net.native"); + categories_list->emplace_back("node.perf"); + categories_list->emplace_back("node.perf.timerify"); + categories_list->emplace_back("node.perf.usertiming"); + categories_list->emplace_back("node.promises.rejections"); + categories_list->emplace_back("node.threadpoolwork.async"); + categories_list->emplace_back("node.threadpoolwork.sync"); + categories_list->emplace_back("node.vm.script"); + categories_list->emplace_back("v8"); return DispatchResponse::OK(); } diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 5c66757afd1a7a..0a2d9e2ec84b08 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -1,4 +1,3 @@ -#include "async_wrap-inl.h" #include "base_object-inl.h" #include "inspector_agent.h" #include "inspector_io.h" @@ -61,7 +60,7 @@ struct MainThreadConnection { }; template -class JSBindingsConnection : public AsyncWrap { +class JSBindingsConnection : public BaseObject { public: class JSBindingsSessionDelegate : public InspectorSessionDelegate { public: @@ -91,15 +90,16 @@ class JSBindingsConnection : public AsyncWrap { JSBindingsConnection(Environment* env, Local wrap, Local callback) - : AsyncWrap(env, wrap, PROVIDER_INSPECTORJSBINDING), - callback_(env->isolate(), callback) { + : BaseObject(env, wrap), callback_(env->isolate(), callback) { Agent* inspector = env->inspector_agent(); session_ = ConnectionType::Connect( inspector, std::make_unique(env, this)); } void OnMessage(Local value) { - MakeCallback(callback_.Get(env()->isolate()), 1, &value); + auto result = callback_.Get(env()->isolate()) + ->Call(env()->context(), object(), 1, &value); + (void)result; } static void Bind(Environment* env, Local target) { @@ -108,7 +108,6 @@ class JSBindingsConnection : public AsyncWrap { NewFunctionTemplate(isolate, JSBindingsConnection::New); tmpl->InstanceTemplate()->SetInternalFieldCount( JSBindingsConnection::kInternalFieldCount); - tmpl->Inherit(AsyncWrap::GetConstructorTemplate(env)); SetProtoMethod(isolate, tmpl, "dispatch", JSBindingsConnection::Dispatch); SetProtoMethod( isolate, tmpl, "disconnect", JSBindingsConnection::Disconnect); diff --git a/src/node.cc b/src/node.cc index cec88c4f4ecd84..f053474decc238 100644 --- a/src/node.cc +++ b/src/node.cc @@ -869,13 +869,20 @@ static ExitCode InitializeNodeWithArgsInternal( if (!file_paths.empty()) { CHECK(!per_process::v8_initialized); - auto cwd = Environment::GetCwd(Environment::GetExecPath(*argv)); for (const auto& file_path : file_paths) { - std::string path = cwd + kPathSeparator + file_path; - auto path_exists = per_process::dotenv_file.ParsePath(path); - - if (!path_exists) errors->push_back(file_path + ": not found"); + switch (per_process::dotenv_file.ParsePath(file_path)) { + case Dotenv::ParseResult::Valid: + break; + case Dotenv::ParseResult::InvalidContent: + errors->push_back(file_path + ": invalid format"); + break; + case Dotenv::ParseResult::FileError: + errors->push_back(file_path + ": not found"); + break; + default: + UNREACHABLE(); + } } per_process::dotenv_file.AssignNodeOptionsIfAvailable(&node_options); diff --git a/src/node_dotenv.cc b/src/node_dotenv.cc index 992633c50b9a14..718e5407040505 100644 --- a/src/node_dotenv.cc +++ b/src/node_dotenv.cc @@ -5,7 +5,9 @@ namespace node { +using v8::Local; using v8::NewStringType; +using v8::Object; using v8::String; std::vector Dotenv::GetPathFromArgs( @@ -64,14 +66,47 @@ void Dotenv::SetEnvironment(node::Environment* env) { } } -bool Dotenv::ParsePath(const std::string_view path) { +Local Dotenv::ToObject(Environment* env) { + Local result = Object::New(env->isolate()); + + for (const auto& entry : store_) { + auto key = entry.first; + auto value = entry.second; + + result + ->Set( + env->context(), + v8::String::NewFromUtf8( + env->isolate(), key.data(), NewStringType::kNormal, key.size()) + .ToLocalChecked(), + v8::String::NewFromUtf8(env->isolate(), + value.data(), + NewStringType::kNormal, + value.size()) + .ToLocalChecked()) + .Check(); + } + + return result; +} + +void Dotenv::ParseContent(const std::string_view content) { + using std::string_view_literals::operator""sv; + auto lines = SplitString(content, "\n"sv); + + for (const auto& line : lines) { + ParseLine(line); + } +} + +Dotenv::ParseResult Dotenv::ParsePath(const std::string_view path) { uv_fs_t req; auto defer_req_cleanup = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); }); uv_file file = uv_fs_open(nullptr, &req, path.data(), 0, 438, nullptr); if (req.result < 0) { // req will be cleaned up by scope leave. - return false; + return ParseResult::FileError; } uv_fs_req_cleanup(&req); @@ -89,7 +124,7 @@ bool Dotenv::ParsePath(const std::string_view path) { auto r = uv_fs_read(nullptr, &req, file, &buf, 1, -1, nullptr); if (req.result < 0) { // req will be cleaned up by scope leave. - return false; + return ParseResult::InvalidContent; } uv_fs_req_cleanup(&req); if (r <= 0) { @@ -98,13 +133,8 @@ bool Dotenv::ParsePath(const std::string_view path) { result.append(buf.base, r); } - using std::string_view_literals::operator""sv; - auto lines = SplitString(result, "\n"sv); - - for (const auto& line : lines) { - ParseLine(line); - } - return true; + ParseContent(result); + return ParseResult::Valid; } void Dotenv::AssignNodeOptionsIfAvailable(std::string* node_options) { @@ -142,7 +172,7 @@ void Dotenv::ParseLine(const std::string_view line) { auto quote_character = value[quotation_index]; value.erase(0, 1); - auto end_quotation_index = value.find_last_of(quote_character); + auto end_quotation_index = value.find(quote_character); // We couldn't find the closing quotation character. Terminate. if (end_quotation_index == std::string::npos) { diff --git a/src/node_dotenv.h b/src/node_dotenv.h index cc87008d149f43..f2a9ce56f41570 100644 --- a/src/node_dotenv.h +++ b/src/node_dotenv.h @@ -4,6 +4,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "util-inl.h" +#include "v8.h" #include @@ -11,6 +12,8 @@ namespace node { class Dotenv { public: + enum ParseResult { Valid, FileError, InvalidContent }; + Dotenv() = default; Dotenv(const Dotenv& d) = default; Dotenv(Dotenv&& d) noexcept = default; @@ -18,9 +21,11 @@ class Dotenv { Dotenv& operator=(const Dotenv& d) = default; ~Dotenv() = default; - bool ParsePath(const std::string_view path); + void ParseContent(const std::string_view content); + ParseResult ParsePath(const std::string_view path); void AssignNodeOptionsIfAvailable(std::string* node_options); void SetEnvironment(Environment* env); + v8::Local ToObject(Environment* env); static std::vector GetPathFromArgs( const std::vector& args); diff --git a/src/node_process.h b/src/node_process.h index ee6e6a81676bd6..0a1f65e9bdfa24 100644 --- a/src/node_process.h +++ b/src/node_process.h @@ -84,6 +84,8 @@ class BindingData : public SnapshotableObject { static void SlowBigInt(const v8::FunctionCallbackInfo& args); + static void LoadEnvFile(const v8::FunctionCallbackInfo& args); + private: // Buffer length in uint32. static constexpr size_t kHrTimeBufferLength = 3; diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index cdbe07947b01d1..e8d621ddb48bd2 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -4,6 +4,7 @@ #include "env-inl.h" #include "memory_tracker-inl.h" #include "node.h" +#include "node_dotenv.h" #include "node_errors.h" #include "node_external_reference.h" #include "node_internals.h" @@ -463,6 +464,38 @@ static void ReallyExit(const FunctionCallbackInfo& args) { env->Exit(code); } +static void LoadEnvFile(const v8::FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + std::string path = ".env"; + if (args.Length() == 1) { + Utf8Value path_value(args.GetIsolate(), args[0]); + path = path_value.ToString(); + } + + THROW_IF_INSUFFICIENT_PERMISSIONS( + env, permission::PermissionScope::kFileSystemRead, path); + + Dotenv dotenv{}; + + switch (dotenv.ParsePath(path)) { + case dotenv.ParseResult::Valid: { + dotenv.SetEnvironment(env); + break; + } + case dotenv.ParseResult::InvalidContent: { + THROW_ERR_INVALID_ARG_TYPE( + env, "Contents of '%s' should be a valid string.", path.c_str()); + break; + } + case dotenv.ParseResult::FileError: { + env->ThrowUVException(UV_ENOENT, "Failed to load '%s'.", path.c_str()); + break; + } + default: + UNREACHABLE(); + } +} + namespace process { BindingData::BindingData(Realm* realm, @@ -616,6 +649,8 @@ static void CreatePerIsolateProperties(IsolateData* isolate_data, SetMethod(isolate, target, "reallyExit", ReallyExit); SetMethodNoSideEffect(isolate, target, "uptime", Uptime); SetMethod(isolate, target, "patchProcessObject", PatchProcessObject); + + SetMethod(isolate, target, "loadEnvFile", LoadEnvFile); } static void CreatePerContextProperties(Local target, @@ -653,6 +688,8 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(ReallyExit); registry->Register(Uptime); registry->Register(PatchProcessObject); + + registry->Register(LoadEnvFile); } } // namespace process diff --git a/src/node_util.cc b/src/node_util.cc index dfdd87d297e1e2..8fb7426605869d 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -1,4 +1,5 @@ #include "base_object-inl.h" +#include "node_dotenv.h" #include "node_errors.h" #include "node_external_reference.h" #include "util-inl.h" @@ -237,6 +238,16 @@ static uint32_t FastGuessHandleType(Local receiver, const uint32_t fd) { CFunction fast_guess_handle_type_(CFunction::Make(FastGuessHandleType)); +static void ParseEnv(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + CHECK_EQ(args.Length(), 1); // content + CHECK(args[0]->IsString()); + Utf8Value content(env->isolate(), args[0]); + Dotenv dotenv{}; + dotenv.ParseContent(content.ToStringView()); + args.GetReturnValue().Set(dotenv.ToObject(env)); +} + void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GetPromiseDetails); registry->Register(GetProxyDetails); @@ -251,6 +262,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GuessHandleType); registry->Register(FastGuessHandleType); registry->Register(fast_guess_handle_type_.GetTypeInfo()); + registry->Register(ParseEnv); } void Initialize(Local target, @@ -348,6 +360,7 @@ void Initialize(Local target, context, target, "getConstructorName", GetConstructorName); SetMethodNoSideEffect(context, target, "getExternalValue", GetExternalValue); SetMethod(context, target, "sleep", Sleep); + SetMethod(context, target, "parseEnv", ParseEnv); SetMethod( context, target, "arrayBufferViewHasBuffer", ArrayBufferViewHasBuffer); diff --git a/src/undici_version.h b/src/undici_version.h index 3c74788caee88a..99bfb55d3118e6 100644 --- a/src/undici_version.h +++ b/src/undici_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-undici.sh #ifndef SRC_UNDICI_VERSION_H_ #define SRC_UNDICI_VERSION_H_ -#define UNDICI_VERSION "6.2.1" +#define UNDICI_VERSION "6.3.0" #endif // SRC_UNDICI_VERSION_H_ diff --git a/test/es-module/test-esm-preserve-symlinks-main.js b/test/es-module/test-esm-preserve-symlinks-main.js index 6f921f656fe22f..5eb58ca8c1bbbd 100644 --- a/test/es-module/test-esm-preserve-symlinks-main.js +++ b/test/es-module/test-esm-preserve-symlinks-main.js @@ -1,12 +1,19 @@ 'use strict'; -const common = require('../common'); -const { spawn } = require('child_process'); -const assert = require('assert'); -const path = require('path'); -const fs = require('fs'); - +const { spawnPromisified, skip } = require('../common'); const tmpdir = require('../common/tmpdir'); + +// Invoke the main file via a symlink. In this case --preserve-symlinks-main +// dictates that it'll resolve relative imports in the main file relative to +// the symlink, and not relative to the symlink target; the file structure set +// up below requires this to not crash when loading ./submodule_link.js + +const assert = require('node:assert'); +const fs = require('node:fs'); +const path = require('node:path'); +const { execPath } = require('node:process'); +const { describe, it } = require('node:test'); + tmpdir.refresh(); const tmpDir = tmpdir.path; @@ -14,7 +21,7 @@ fs.mkdirSync(path.join(tmpDir, 'nested')); fs.mkdirSync(path.join(tmpDir, 'nested2')); const entry = path.join(tmpDir, 'nested', 'entry.js'); -const entry_link_absolute_path = path.join(tmpDir, 'link.js'); +const entry_link_absolute_path = path.join(tmpDir, 'index.js'); const submodule = path.join(tmpDir, 'nested2', 'submodule.js'); const submodule_link_absolute_path = path.join(tmpDir, 'submodule_link.js'); @@ -31,27 +38,39 @@ try { fs.symlinkSync(submodule, submodule_link_absolute_path); } catch (err) { if (err.code !== 'EPERM') throw err; - common.skip('insufficient privileges for symlinks'); + skip('insufficient privileges for symlinks'); } -function doTest(flags, done) { - // Invoke the main file via a symlink. In this case --preserve-symlinks-main - // dictates that it'll resolve relative imports in the main file relative to - // the symlink, and not relative to the symlink target; the file structure set - // up above requires this to not crash when loading ./submodule_link.js - spawn(process.execPath, [ - '--preserve-symlinks', - '--preserve-symlinks-main', - entry_link_absolute_path, - ], { stdio: 'inherit' }) - .on('exit', (code) => { - assert.strictEqual(code, 0); - done(); - }); -} +describe('Invoke the main file via a symlink.', { concurrency: true }, () => { + it('should resolve relative imports in the main file', async () => { + const { code } = await spawnPromisified(execPath, [ + '--preserve-symlinks', + '--preserve-symlinks-main', + entry_link_absolute_path, + ]); + + assert.strictEqual(code, 0); + }); + + it('should resolve relative imports in the main file when file extension is omitted', async () => { + const entry_link_absolute_path_without_ext = path.join(tmpDir, 'index'); + + const { code } = await spawnPromisified(execPath, [ + '--preserve-symlinks', + '--preserve-symlinks-main', + entry_link_absolute_path_without_ext, + ]); + + assert.strictEqual(code, 0); + }); + + it('should resolve relative imports in the main file when filename(index.js) is omitted', async () => { + const { code } = await spawnPromisified(execPath, [ + '--preserve-symlinks', + '--preserve-symlinks-main', + tmpDir, + ]); -// First test the commonjs module loader -doTest([], () => { - // Now test the new loader - doTest([], () => {}); + assert.strictEqual(code, 0); + }); }); diff --git a/test/es-module/test-vm-source-text-module-leak.js b/test/es-module/test-vm-source-text-module-leak.js index d05e812ac32c95..6e770f897528b4 100644 --- a/test/es-module/test-vm-source-text-module-leak.js +++ b/test/es-module/test-vm-source-text-module-leak.js @@ -1,21 +1,24 @@ -// Flags: --experimental-vm-modules --max-old-space-size=16 --trace-gc +// Flags: --expose-internals --experimental-vm-modules --max-old-space-size=16 --trace-gc 'use strict'; // This tests that vm.SourceTextModule() does not leak. // See: https://github.com/nodejs/node/issues/33439 -require('../common'); -const { checkIfCollectable } = require('../common/gc'); +const common = require('../common'); +const { checkIfCollectableByCounting } = require('../common/gc'); const vm = require('vm'); -async function createSourceTextModule() { - // Try to reach the maximum old space size. - const m = new vm.SourceTextModule(` - const bar = new Array(512).fill("----"); - export { bar }; - `); - await m.link(() => {}); - await m.evaluate(); - return m; -} +const outer = 32; +const inner = 128; -checkIfCollectable(createSourceTextModule, 4096, 1024); +checkIfCollectableByCounting(async (i) => { + for (let j = 0; j < inner; j++) { + // Try to reach the maximum old space size. + const m = new vm.SourceTextModule(` + const bar = new Array(512).fill("----"); + export { bar }; + `); + await m.link(() => {}); + await m.evaluate(); + } + return inner; +}, vm.SourceTextModule, outer).then(common.mustCall()); diff --git a/test/fixtures/dotenv/.env b/test/fixtures/dotenv/.env new file mode 100644 index 00000000000000..f098ff6c5e8258 --- /dev/null +++ b/test/fixtures/dotenv/.env @@ -0,0 +1 @@ +BASIC=basic diff --git a/test/fixtures/dotenv/basic-valid.env b/test/fixtures/dotenv/basic-valid.env new file mode 100644 index 00000000000000..8b7407914c7404 --- /dev/null +++ b/test/fixtures/dotenv/basic-valid.env @@ -0,0 +1 @@ +BASIC=overriden diff --git a/test/fixtures/dotenv/valid.env b/test/fixtures/dotenv/valid.env index c1c12b112b965b..980d3621b0c4df 100644 --- a/test/fixtures/dotenv/valid.env +++ b/test/fixtures/dotenv/valid.env @@ -33,3 +33,4 @@ RETAIN_INNER_QUOTES_AS_BACKTICKS=`{"foo": "bar's"}` TRIM_SPACE_FROM_UNQUOTED= some spaced out string EMAIL=therealnerdybeast@example.tld SPACED_KEY = parsed +EDGE_CASE_INLINE_COMMENTS="VALUE1" # or "VALUE2" or "VALUE3" diff --git a/test/fixtures/permission/fs-write.js b/test/fixtures/permission/fs-write.js index 390ac4e53b1910..5e7e1ac588ee76 100644 --- a/test/fixtures/permission/fs-write.js +++ b/test/fixtures/permission/fs-write.js @@ -11,6 +11,7 @@ const regularFolder = process.env.ALLOWEDFOLDER; const regularFile = process.env.ALLOWEDFILE; const blockedFolder = process.env.BLOCKEDFOLDER; const blockedFile = process.env.BLOCKEDFILE; +const blockedFileURL = require('url').pathToFileURL(process.env.BLOCKEDFILE); const relativeProtectedFile = process.env.RELATIVEBLOCKEDFILE; const relativeProtectedFolder = process.env.RELATIVEBLOCKEDFOLDER; const absoluteProtectedFile = path.resolve(relativeProtectedFile); @@ -30,6 +31,13 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); permission: 'FileSystemWrite', resource: path.toNamespacedPath(blockedFile), }); + assert.throws(() => { + fs.writeFile(blockedFileURL, 'example', () => {}); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + resource: path.toNamespacedPath(blockedFile), + }); assert.throws(() => { fs.writeFile(relativeProtectedFile, 'example', () => {}); }, { @@ -91,6 +99,13 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); permission: 'FileSystemWrite', resource: path.toNamespacedPath(blockedFile), }); + assert.throws(() => { + fs.utimes(blockedFileURL, new Date(), new Date(), () => {}); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + resource: path.toNamespacedPath(blockedFile), + }); assert.throws(() => { fs.utimes(relativeProtectedFile, new Date(), new Date(), () => {}); }, { @@ -117,6 +132,13 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); permission: 'FileSystemWrite', resource: path.toNamespacedPath(blockedFile), }); + assert.throws(() => { + fs.lutimes(blockedFileURL, new Date(), new Date(), () => {}); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + resource: path.toNamespacedPath(blockedFile), + }); } // fs.mkdir @@ -169,6 +191,15 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); permission: 'FileSystemWrite', resource: path.toNamespacedPath(blockedFile), }); + assert.throws(() => { + fs.rename(blockedFileURL, path.join(blockedFile, 'renamed'), (err) => { + assert.ifError(err); + }); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + resource: path.toNamespacedPath(blockedFile), + }); assert.throws(() => { fs.rename(relativeProtectedFile, path.join(relativeProtectedFile, 'renamed'), (err) => { assert.ifError(err); @@ -263,6 +294,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.open(blockedFileURL, fs.constants.O_RDWR | 0x10000000, common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.open(blockedFile, fs.constants.O_RDWR | fs.constants.O_NOFOLLOW); }, { @@ -290,6 +327,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.chmod(blockedFileURL, 0o755, common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.chmod(blockedFile, 0o755); }, { @@ -324,6 +367,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.appendFile(blockedFileURL, 'new data', common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.appendFile(blockedFile, 'new data'); }, { @@ -340,6 +389,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.chown(blockedFileURL, 1541, 999, common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.chown(blockedFile, 1541, 999); }, { @@ -356,6 +411,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.lchown(blockedFileURL, 1541, 999, common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.lchown(blockedFile, 1541, 999); }, { @@ -372,6 +433,12 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); code: 'ERR_ACCESS_DENIED', permission: 'FileSystemWrite', }); + assert.throws(() => { + fs.link(blockedFileURL, path.join(blockedFolder, '/linked'), common.mustNotCall()); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + }); assert.rejects(async () => { await fs.promises.link(blockedFile, path.join(blockedFolder, '/linked')); }, { @@ -391,4 +458,13 @@ const absoluteProtectedFolder = path.resolve(relativeProtectedFolder); permission: 'FileSystemWrite', resource: path.toNamespacedPath(blockedFile), }); -} + assert.throws(() => { + fs.unlink(blockedFileURL, (err) => { + assert.ifError(err); + }); + }, { + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + resource: path.toNamespacedPath(blockedFile), + }); +} \ No newline at end of file diff --git a/test/fixtures/test-runner/output/hooks.js b/test/fixtures/test-runner/output/hooks.js index 531615fd97d69b..38de02e1119da4 100644 --- a/test/fixtures/test-runner/output/hooks.js +++ b/test/fixtures/test-runner/output/hooks.js @@ -152,6 +152,25 @@ test('afterEach when test fails', async (t) => { await t.test('2', () => {}); }); +test('afterEach context when test passes', async (t) => { + t.afterEach(common.mustCall((ctx) => { + assert.strictEqual(ctx.name, '1'); + assert.strictEqual(ctx.passed, true); + assert.strictEqual(ctx.error, null); + })); + await t.test('1', () => {}); +}); + +test('afterEach context when test fails', async (t) => { + const err = new Error('test'); + t.afterEach(common.mustCall((ctx) => { + assert.strictEqual(ctx.name, '1'); + assert.strictEqual(ctx.passed, false); + assert.strictEqual(ctx.error, err); + })); + await t.test('1', () => { throw err }); +}); + test('afterEach throws and test fails', async (t) => { t.after(common.mustCall()); t.afterEach(() => { throw new Error('afterEach'); }); diff --git a/test/fixtures/test-runner/output/hooks.snapshot b/test/fixtures/test-runner/output/hooks.snapshot index edb455b025ff58..c9c1632e2b9e45 100644 --- a/test/fixtures/test-runner/output/hooks.snapshot +++ b/test/fixtures/test-runner/output/hooks.snapshot @@ -505,6 +505,41 @@ not ok 12 - afterEach when test fails error: '1 subtest failed' code: 'ERR_TEST_FAILURE' ... +# Subtest: afterEach context when test passes + # Subtest: 1 + ok 1 - 1 + --- + duration_ms: * + ... + 1..1 +ok 13 - afterEach context when test passes + --- + duration_ms: * + ... +# Subtest: afterEach context when test fails + # Subtest: 1 + not ok 1 - 1 + --- + duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' + failureType: 'testCodeFailure' + error: 'test' + code: 'ERR_TEST_FAILURE' + stack: |- + * + * + * + * + ... + 1..1 +not ok 14 - afterEach context when test fails + --- + duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' + failureType: 'subtestsFailed' + error: '1 subtest failed' + code: 'ERR_TEST_FAILURE' + ... # Subtest: afterEach throws and test fails # Subtest: 1 not ok 1 - 1 @@ -546,7 +581,7 @@ not ok 12 - afterEach when test fails * ... 1..2 -not ok 13 - afterEach throws and test fails +not ok 15 - afterEach throws and test fails --- duration_ms: * location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' @@ -555,7 +590,7 @@ not ok 13 - afterEach throws and test fails code: 'ERR_TEST_FAILURE' ... # Subtest: t.after() is called if test body throws -not ok 14 - t.after() is called if test body throws +not ok 16 - t.after() is called if test body throws --- duration_ms: * location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' @@ -580,7 +615,7 @@ not ok 14 - t.after() is called if test body throws code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 15 - run after when before throws +not ok 17 - run after when before throws --- duration_ms: * type: 'suite' @@ -599,15 +634,15 @@ not ok 15 - run after when before throws * * ... -1..15 +1..17 # before 1 called # before 2 called # after 1 called # after 2 called -# tests 39 +# tests 43 # suites 9 -# pass 14 -# fail 22 +# pass 16 +# fail 24 # cancelled 3 # skipped 0 # todo 0 diff --git a/test/fixtures/test-runner/output/hooks_spec_reporter.snapshot b/test/fixtures/test-runner/output/hooks_spec_reporter.snapshot index 4cd47bf042b95d..c9b749d571674f 100644 --- a/test/fixtures/test-runner/output/hooks_spec_reporter.snapshot +++ b/test/fixtures/test-runner/output/hooks_spec_reporter.snapshot @@ -258,6 +258,20 @@ 2 (*ms) afterEach when test fails (*ms) + afterEach context when test passes + 1 (*ms) + afterEach context when test passes (*ms) + + afterEach context when test fails + 1 (*ms) + Error: test + * + * + * + * + + afterEach context when test fails (*ms) + afterEach throws and test fails 1 (*ms) Error: test @@ -315,10 +329,10 @@ before 2 called after 1 called after 2 called - tests 39 + tests 43 suites 9 - pass 14 - fail 22 + pass 16 + fail 24 cancelled 3 skipped 0 todo 0 @@ -551,6 +565,14 @@ * * +* + 1 (*ms) + Error: test + * + * + * + * + * 1 (*ms) Error: test diff --git a/test/fixtures/test-runner/output/junit_reporter.snapshot b/test/fixtures/test-runner/output/junit_reporter.snapshot index e0a4c331dd5c22..562c7696cbd0ec 100644 --- a/test/fixtures/test-runner/output/junit_reporter.snapshot +++ b/test/fixtures/test-runner/output/junit_reporter.snapshot @@ -155,6 +155,7 @@ true !== false Error [ERR_TEST_FAILURE]: thrown from subtest sync throw fail + * * { code: 'ERR_TEST_FAILURE', failureType: 'testCodeFailure', @@ -338,6 +339,7 @@ Error [ERR_TEST_FAILURE]: thrown from callback async throw Error [ERR_TEST_FAILURE]: thrown from subtest sync throw fails at first + * * { code: 'ERR_TEST_FAILURE', failureType: 'testCodeFailure', diff --git a/test/fixtures/tz-version.txt b/test/fixtures/tz-version.txt index 7daa77e00d9977..cd9c3f6d6e52d0 100644 --- a/test/fixtures/tz-version.txt +++ b/test/fixtures/tz-version.txt @@ -1 +1 @@ -2023c +2023d diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 33af47703674bb..a4ca98dc7ffa4f 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -32,7 +32,7 @@ Last update: - user-timing: https://github.com/web-platform-tests/wpt/tree/5ae85bf826/user-timing - wasm/jsapi: https://github.com/web-platform-tests/wpt/tree/cde25e7e3c/wasm/jsapi - wasm/webapi: https://github.com/web-platform-tests/wpt/tree/fd1b23eeaa/wasm/webapi -- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/d4e14d714c/WebCryptoAPI +- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/5e042cbc4e/WebCryptoAPI - webidl/ecmascript-binding/es-exceptions: https://github.com/web-platform-tests/wpt/tree/a370aad338/webidl/ecmascript-binding/es-exceptions - webmessaging/broadcastchannel: https://github.com/web-platform-tests/wpt/tree/e97fac4791/webmessaging/broadcastchannel diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js index 4e12ca0eb711fa..ef6905e574c158 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js @@ -60,6 +60,15 @@ function define_tests() { }, algorithmName + " mixed case parameters"); // Null length + // "Null" is not valid per the current spec + // - https://github.com/w3c/webcrypto/issues/322 + // - https://github.com/w3c/webcrypto/issues/329 + // + // Proposal for a spec change: + // - https://github.com/w3c/webcrypto/pull/345 + // + // This test case may be replaced by these new tests: + // - https://github.com/web-platform-tests/wpt/pull/43400 promise_test(function(test) { return subtle.deriveBits({name: algorithmName, public: publicKeys[algorithmName]}, privateKeys[algorithmName], null) .then(function(derivation) { diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/ecdh_bits.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/ecdh_bits.js index e52ffc6bfdb6f6..cb9747a529fd53 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/ecdh_bits.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/ecdh_bits.js @@ -56,6 +56,15 @@ function define_tests() { }, namedCurve + " mixed case parameters"); // Null length + // "Null" is not valid per the current spec + // - https://github.com/w3c/webcrypto/issues/322 + // - https://github.com/w3c/webcrypto/issues/329 + // + // Proposal for a spec change: + // - https://github.com/w3c/webcrypto/pull/345 + // + // This test case may be replaced by these new tests: + // - https://github.com/web-platform-tests/wpt/pull/43400 promise_test(function(test) { return subtle.deriveBits({name: "ECDH", public: publicKeys[namedCurve]}, privateKeys[namedCurve], null) .then(function(derivation) { diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/hkdf.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/hkdf.js index 2bb58533473eb9..3903da5cddff94 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/hkdf.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/hkdf.js @@ -140,6 +140,15 @@ function define_tests() { }, testName + " with missing info"); // length null (OperationError) + // "Null" is not valid per the current spec + // - https://github.com/w3c/webcrypto/issues/322 + // - https://github.com/w3c/webcrypto/issues/329 + // + // Proposal for a spec change: + // - https://github.com/w3c/webcrypto/pull/345 + // + // This test case may be replaced by these new tests: + // - https://github.com/web-platform-tests/wpt/pull/43400 subsetTest(promise_test, function(test) { return subtle.deriveBits(algorithm, baseKeys[derivedKeySize], null) .then(function(derivation) { diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/pbkdf2.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/pbkdf2.js index 0403f382e1479c..4e4ae79d800a40 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/pbkdf2.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/pbkdf2.js @@ -105,6 +105,15 @@ function define_tests() { // Test various error conditions for deriveBits below: // length null (OperationError) + // "Null" is not valid per the current spec + // - https://github.com/w3c/webcrypto/issues/322 + // - https://github.com/w3c/webcrypto/issues/329 + // + // Proposal for a spec change: + // - https://github.com/w3c/webcrypto/pull/345 + // + // This test case may be replaced by these new tests: + // - https://github.com/web-platform-tests/wpt/pull/43400 subsetTest(promise_test, function(test) { return subtle.deriveBits({name: "PBKDF2", salt: salts[saltSize], hash: hashName, iterations: parseInt(iterations)}, baseKeys[passwordSize], null) .then(function(derivation) { diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js index 31f062e313f6fe..a01bfbb0ef2e18 100644 --- a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js +++ b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js @@ -80,6 +80,9 @@ } testFormat(format, algorithm, data, curve, usages, extractable); + if (vector.name === 'ECDH' && format === 'jwk') { + testEcdhJwkAlg(algorithm, { ...data.jwk, alg: 'any alg works here' }, curve, usages, extractable); + } }); }); @@ -90,11 +93,13 @@ var data = keyData[curve]; allValidUsages(vector.privateUsages).forEach(function(usages) { testFormat(format, algorithm, data, curve, usages, extractable); + if (vector.name === 'ECDH' && format === 'jwk') { + testEcdhJwkAlg(algorithm, { ...data.jwk, alg: 'any alg works here' }, curve, usages, extractable); + } }); testEmptyUsages(format, algorithm, data, curve, extractable); }); }); - }); }); @@ -151,6 +156,21 @@ }, "Empty Usages: " + keySize.toString() + " bits " + parameterString(format, false, keyData, algorithm, extractable, usages)); } + // Test ECDH importKey with a JWK format + // Should succeed with any "alg" value + function testEcdhJwkAlg(algorithm, keyData, keySize, usages, extractable) { + const format = "jwk"; + promise_test(function(test) { + return subtle.importKey(format, keyData, algorithm, extractable, usages). + then(function(key) { + assert_equals(key.constructor, CryptoKey, "Imported a CryptoKey object"); + assert_goodCryptoKey(key, algorithm, extractable, usages, keyData.d ? 'private' : 'public'); + }, function(err) { + assert_unreached("Threw an unexpected error: " + err.toString()); + }); + }, "ECDH any JWK alg: " + keySize.toString() + " bits " + parameterString(format, false, keyData, algorithm, extractable, usages)); + } + // Helper methods follow: diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 0e04684b381f2f..a7c655125b5c22 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -88,7 +88,7 @@ "path": "wasm/webapi" }, "WebCryptoAPI": { - "commit": "d4e14d714c5242e174ba9aec43caf5eb514d0f09", + "commit": "5e042cbc4ecab7b2279a5fd411c6daa24ca886c6", "path": "WebCryptoAPI" }, "webidl/ecmascript-binding/es-exceptions": { diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 98288c5c1228c2..7b7a72675234b9 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -26,8 +26,6 @@ test-worker-nearheaplimit-deadlock: PASS, FLAKY [$system==linux] # https://github.com/nodejs/node/issues/39368 test-domain-error-types: PASS,FLAKY -# https://github.com/nodejs/node/issues/47420 -test-file-write-stream4: PASS,FLAKY # https://github.com/nodejs/node/issues/43465 test-http-server-request-timeouts-mixed: PASS, FLAKY diff --git a/test/parallel/test-dotenv-edge-cases.js b/test/parallel/test-dotenv-edge-cases.js index 9735a89618172c..23e3e037418351 100644 --- a/test/parallel/test-dotenv-edge-cases.js +++ b/test/parallel/test-dotenv-edge-cases.js @@ -2,6 +2,7 @@ const common = require('../common'); const assert = require('node:assert'); +const path = require('node:path'); const { describe, it } = require('node:test'); const validEnvFilePath = '../fixtures/dotenv/valid.env'; @@ -25,6 +26,18 @@ describe('.env supports edge cases', () => { assert.strictEqual(child.code, 0); }); + it('supports absolute paths', async () => { + const code = ` + require('assert').strictEqual(process.env.BASIC, 'basic'); + `.trim(); + const child = await common.spawnPromisified( + process.execPath, + [ `--env-file=${path.resolve(__dirname, validEnvFilePath)}`, '--eval', code ], + ); + assert.strictEqual(child.stderr, ''); + assert.strictEqual(child.code, 0); + }); + it('should handle non-existent .env file', async () => { const code = ` require('assert').strictEqual(1, 1) diff --git a/test/parallel/test-dotenv.js b/test/parallel/test-dotenv.js index 9c374c8735910d..efc5a164b39334 100644 --- a/test/parallel/test-dotenv.js +++ b/test/parallel/test-dotenv.js @@ -68,3 +68,5 @@ assert.strictEqual(process.env.TRIM_SPACE_FROM_UNQUOTED, 'some spaced out string assert.strictEqual(process.env.EMAIL, 'therealnerdybeast@example.tld'); // Parses keys and values surrounded by spaces assert.strictEqual(process.env.SPACED_KEY, 'parsed'); +// Parse inline comments correctly when multiple quotes +assert.strictEqual(process.env.EDGE_CASE_INLINE_COMMENTS, 'VALUE1'); diff --git a/test/parallel/test-fs-watch-recursive-add-file-to-existing-subfolder.js b/test/parallel/test-fs-watch-recursive-add-file-to-existing-subfolder.js index 5563dc6a525958..995c82743e49ea 100644 --- a/test/parallel/test-fs-watch-recursive-add-file-to-existing-subfolder.js +++ b/test/parallel/test-fs-watch-recursive-add-file-to-existing-subfolder.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); -const { setTimeout } = require('timers/promises'); if (common.isIBMi) common.skip('IBMi does not support `fs.watch()`'); @@ -21,39 +20,36 @@ const tmpdir = require('../common/tmpdir'); const testDir = tmpdir.path; tmpdir.refresh(); -(async () => { - // Add a file to subfolder of a watching folder +// Add a file to subfolder of a watching folder - const rootDirectory = fs.mkdtempSync(testDir + path.sep); - const testDirectory = path.join(rootDirectory, 'test-4'); - fs.mkdirSync(testDirectory); +const rootDirectory = fs.mkdtempSync(testDir + path.sep); +const testDirectory = path.join(rootDirectory, 'test-4'); +fs.mkdirSync(testDirectory); - const file = 'folder-5'; - const filePath = path.join(testDirectory, file); - fs.mkdirSync(filePath); +const file = 'folder-5'; +const filePath = path.join(testDirectory, file); +fs.mkdirSync(filePath); - const subfolderPath = path.join(filePath, 'subfolder-6'); - fs.mkdirSync(subfolderPath); +const subfolderPath = path.join(filePath, 'subfolder-6'); +fs.mkdirSync(subfolderPath); - const childrenFile = 'file-7.txt'; - const childrenAbsolutePath = path.join(subfolderPath, childrenFile); - const relativePath = path.join(file, path.basename(subfolderPath), childrenFile); +const childrenFile = 'file-7.txt'; +const childrenAbsolutePath = path.join(subfolderPath, childrenFile); +const relativePath = path.join(file, path.basename(subfolderPath), childrenFile); - const watcher = fs.watch(testDirectory, { recursive: true }); - let watcherClosed = false; - watcher.on('change', function(event, filename) { - assert.strictEqual(event, 'rename'); +const watcher = fs.watch(testDirectory, { recursive: true }); +let watcherClosed = false; +watcher.on('change', function(event, filename) { + assert.strictEqual(event, 'rename'); - if (filename === relativePath) { - watcher.close(); - watcherClosed = true; - } - }); + if (filename === relativePath) { + watcher.close(); + watcherClosed = true; + } +}); - await setTimeout(common.platformTimeout(100)); - fs.writeFileSync(childrenAbsolutePath, 'world'); +fs.writeFileSync(childrenAbsolutePath, 'world'); - process.once('exit', function() { - assert(watcherClosed, 'watcher Object was not closed'); - }); -})().then(common.mustCall()); +process.once('exit', function() { + assert(watcherClosed, 'watcher Object was not closed'); +}); diff --git a/test/parallel/test-fs-watch-recursive-add-file-to-new-folder.js b/test/parallel/test-fs-watch-recursive-add-file-to-new-folder.js index 9b74cd281b62ec..1d5f0098428c03 100644 --- a/test/parallel/test-fs-watch-recursive-add-file-to-new-folder.js +++ b/test/parallel/test-fs-watch-recursive-add-file-to-new-folder.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); -const { setTimeout } = require('timers/promises'); if (common.isIBMi) common.skip('IBMi does not support `fs.watch()`'); @@ -21,37 +20,33 @@ const tmpdir = require('../common/tmpdir'); const testDir = tmpdir.path; tmpdir.refresh(); -(async () => { - // Add a file to newly created folder to already watching folder +// Add a file to newly created folder to already watching folder - const rootDirectory = fs.mkdtempSync(testDir + path.sep); - const testDirectory = path.join(rootDirectory, 'test-3'); - fs.mkdirSync(testDirectory); +const rootDirectory = fs.mkdtempSync(testDir + path.sep); +const testDirectory = path.join(rootDirectory, 'test-3'); +fs.mkdirSync(testDirectory); - const filePath = path.join(testDirectory, 'folder-3'); +const filePath = path.join(testDirectory, 'folder-3'); - const childrenFile = 'file-4.txt'; - const childrenAbsolutePath = path.join(filePath, childrenFile); - const childrenRelativePath = path.join(path.basename(filePath), childrenFile); +const childrenFile = 'file-4.txt'; +const childrenAbsolutePath = path.join(filePath, childrenFile); +const childrenRelativePath = path.join(path.basename(filePath), childrenFile); - const watcher = fs.watch(testDirectory, { recursive: true }); - let watcherClosed = false; - watcher.on('change', function(event, filename) { - assert.strictEqual(event, 'rename'); - assert.ok(filename === path.basename(filePath) || filename === childrenRelativePath); +const watcher = fs.watch(testDirectory, { recursive: true }); +let watcherClosed = false; +watcher.on('change', function(event, filename) { + assert.strictEqual(event, 'rename'); + assert.ok(filename === path.basename(filePath) || filename === childrenRelativePath); - if (filename === childrenRelativePath) { - watcher.close(); - watcherClosed = true; - } - }); + if (filename === childrenRelativePath) { + watcher.close(); + watcherClosed = true; + } +}); - await setTimeout(common.platformTimeout(100)); - fs.mkdirSync(filePath); - await setTimeout(common.platformTimeout(100)); - fs.writeFileSync(childrenAbsolutePath, 'world'); +fs.mkdirSync(filePath); +fs.writeFileSync(childrenAbsolutePath, 'world'); - process.once('exit', function() { - assert(watcherClosed, 'watcher Object was not closed'); - }); -})().then(common.mustCall()); +process.once('exit', function() { + assert(watcherClosed, 'watcher Object was not closed'); +}); diff --git a/test/parallel/test-fs-watch-recursive-add-file.js b/test/parallel/test-fs-watch-recursive-add-file.js index d23d417cfaa410..d03a4144ac81bb 100644 --- a/test/parallel/test-fs-watch-recursive-add-file.js +++ b/test/parallel/test-fs-watch-recursive-add-file.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); -const { setTimeout } = require('timers/promises'); if (common.isIBMi) common.skip('IBMi does not support `fs.watch()`'); @@ -21,30 +20,27 @@ const tmpdir = require('../common/tmpdir'); const testDir = tmpdir.path; tmpdir.refresh(); -(async () => { - // Add a file to already watching folder +// Add a file to already watching folder - const rootDirectory = fs.mkdtempSync(testDir + path.sep); - const testDirectory = path.join(rootDirectory, 'test-1'); - fs.mkdirSync(testDirectory); +const rootDirectory = fs.mkdtempSync(testDir + path.sep); +const testDirectory = path.join(rootDirectory, 'test-1'); +fs.mkdirSync(testDirectory); - const testFile = path.join(testDirectory, 'file-1.txt'); +const testFile = path.join(testDirectory, 'file-1.txt'); - const watcher = fs.watch(testDirectory, { recursive: true }); - let watcherClosed = false; - watcher.on('change', function(event, filename) { - assert.strictEqual(event, 'rename'); +const watcher = fs.watch(testDirectory, { recursive: true }); +let watcherClosed = false; +watcher.on('change', function(event, filename) { + assert.strictEqual(event, 'rename'); - if (filename === path.basename(testFile)) { - watcher.close(); - watcherClosed = true; - } - }); + if (filename === path.basename(testFile)) { + watcher.close(); + watcherClosed = true; + } +}); - await setTimeout(common.platformTimeout(100)); - fs.writeFileSync(testFile, 'world'); +fs.writeFileSync(testFile, 'world'); - process.once('exit', function() { - assert(watcherClosed, 'watcher Object was not closed'); - }); -})().then(common.mustCall()); +process.once('exit', function() { + assert(watcherClosed, 'watcher Object was not closed'); +}); diff --git a/test/parallel/test-fs-watch-recursive-assert-leaks.js b/test/parallel/test-fs-watch-recursive-assert-leaks.js index ac2010cfb26376..9d178fcfe8212b 100644 --- a/test/parallel/test-fs-watch-recursive-assert-leaks.js +++ b/test/parallel/test-fs-watch-recursive-assert-leaks.js @@ -21,28 +21,25 @@ const tmpdir = require('../common/tmpdir'); const testDir = tmpdir.path; tmpdir.refresh(); -(async () => { - // Assert recursive watch does not leak handles - const rootDirectory = fs.mkdtempSync(testDir + path.sep); - const testDirectory = path.join(rootDirectory, 'test-7'); - const filePath = path.join(testDirectory, 'only-file.txt'); - fs.mkdirSync(testDirectory); - - let watcherClosed = false; - const watcher = fs.watch(testDirectory, { recursive: true }); - watcher.on('change', common.mustCallAtLeast(async (event, filename) => { - await setTimeout(common.platformTimeout(100)); - if (filename === path.basename(filePath)) { - watcher.close(); - watcherClosed = true; - } - await setTimeout(common.platformTimeout(100)); - assert(!process._getActiveHandles().some((handle) => handle.constructor.name === 'StatWatcher')); - })); - - process.on('exit', function() { - assert(watcherClosed, 'watcher Object was not closed'); - }); +// Assert recursive watch does not leak handles +const rootDirectory = fs.mkdtempSync(testDir + path.sep); +const testDirectory = path.join(rootDirectory, 'test-7'); +const filePath = path.join(testDirectory, 'only-file.txt'); +fs.mkdirSync(testDirectory); + +let watcherClosed = false; +const watcher = fs.watch(testDirectory, { recursive: true }); +watcher.on('change', common.mustCallAtLeast(async (event, filename) => { await setTimeout(common.platformTimeout(100)); - fs.writeFileSync(filePath, 'content'); -})().then(common.mustCall()); + if (filename === path.basename(filePath)) { + watcher.close(); + watcherClosed = true; + } + await setTimeout(common.platformTimeout(100)); + assert(!process._getActiveHandles().some((handle) => handle.constructor.name === 'StatWatcher')); +})); + +process.on('exit', function() { + assert(watcherClosed, 'watcher Object was not closed'); +}); +fs.writeFileSync(filePath, 'content'); diff --git a/test/parallel/test-fs-watch-recursive-sync-write.js b/test/parallel/test-fs-watch-recursive-sync-write.js new file mode 100644 index 00000000000000..38dce82fb115aa --- /dev/null +++ b/test/parallel/test-fs-watch-recursive-sync-write.js @@ -0,0 +1,35 @@ +'use strict'; + +const common = require('../common'); +const { watch, writeFileSync } = require('node:fs'); +const { join } = require('node:path'); +const tmpdir = require('../common/tmpdir.js'); +const assert = require('assert'); + +if (common.isIBMi) + common.skip('IBMi does not support `fs.watch()`'); + +// fs-watch on folders have limited capability in AIX. +// The testcase makes use of folder watching, and causes +// hang. This behavior is documented. Skip this for AIX. + +if (common.isAIX) + common.skip('folder watch capability is limited in AIX.'); + +tmpdir.refresh(); + +const tmpDir = tmpdir.path; +const filename = join(tmpDir, 'test.file'); + +const keepalive = setTimeout(() => { + throw new Error('timed out'); +}, common.platformTimeout(30_000)); + +const watcher = watch(tmpDir, { recursive: true }, common.mustCall((eventType, _filename) => { + clearTimeout(keepalive); + watcher.close(); + assert.strictEqual(eventType, 'rename'); + assert.strictEqual(join(tmpDir, _filename), filename); +})); + +writeFileSync(filename, 'foobar2'); diff --git a/test/parallel/test-fs-watch-recursive-update-file.js b/test/parallel/test-fs-watch-recursive-update-file.js index 57d3bffc7a92b0..ee8e8fe52b4374 100644 --- a/test/parallel/test-fs-watch-recursive-update-file.js +++ b/test/parallel/test-fs-watch-recursive-update-file.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); -const { setTimeout } = require('timers/promises'); if (common.isIBMi) common.skip('IBMi does not support `fs.watch()`'); @@ -21,32 +20,23 @@ const tmpdir = require('../common/tmpdir'); const testDir = tmpdir.path; tmpdir.refresh(); -(async () => { - // Watch a folder and update an already existing file in it. +// Watch a folder and update an already existing file in it. - const rootDirectory = fs.mkdtempSync(testDir + path.sep); - const testDirectory = path.join(rootDirectory, 'test-0'); - fs.mkdirSync(testDirectory); +const rootDirectory = fs.mkdtempSync(testDir + path.sep); +const testDirectory = path.join(rootDirectory, 'test-0'); +fs.mkdirSync(testDirectory); - const testFile = path.join(testDirectory, 'file-1.txt'); - fs.writeFileSync(testFile, 'hello'); +const testFile = path.join(testDirectory, 'file-1.txt'); +fs.writeFileSync(testFile, 'hello'); - const watcher = fs.watch(testDirectory, { recursive: true }); - let watcherClosed = false; - watcher.on('change', common.mustCallAtLeast(function(event, filename) { - // Libuv inconsistenly emits a rename event for the file we are watching - assert.ok(event === 'change' || event === 'rename'); +const watcher = fs.watch(testDirectory, { recursive: true }); +watcher.on('change', common.mustCallAtLeast(function(event, filename) { + // Libuv inconsistenly emits a rename event for the file we are watching + assert.ok(event === 'change' || event === 'rename'); - if (filename === path.basename(testFile)) { - watcher.close(); - watcherClosed = true; - } - })); + if (filename === path.basename(testFile)) { + watcher.close(); + } +})); - await setTimeout(common.platformTimeout(100)); - fs.writeFileSync(testFile, 'hello'); - - process.once('exit', function() { - assert(watcherClosed, 'watcher Object was not closed'); - }); -})().then(common.mustCall()); +fs.writeFileSync(testFile, 'hello'); diff --git a/test/parallel/test-http2-compat-serverresponse-headers.js b/test/parallel/test-http2-compat-serverresponse-headers.js index 6065a9f8f28e16..95423fd09d8e57 100644 --- a/test/parallel/test-http2-compat-serverresponse-headers.js +++ b/test/parallel/test-http2-compat-serverresponse-headers.js @@ -38,8 +38,18 @@ server.listen(0, common.mustCall(function() { response.setHeader(denormalised, expectedValue); assert.strictEqual(response.getHeader(denormalised), expectedValue); assert.strictEqual(response.hasHeader(denormalised), true); + assert.strictEqual(response.hasHeader(real), true); + + response.appendHeader(real, expectedValue); + assert.deepStrictEqual(response.getHeader(real), [ + expectedValue, + expectedValue, + ]); + assert.strictEqual(response.hasHeader(real), true); + response.removeHeader(denormalised); assert.strictEqual(response.hasHeader(denormalised), false); + assert.strictEqual(response.hasHeader(real), false); ['hasHeader', 'getHeader', 'removeHeader'].forEach((fnName) => { assert.throws( diff --git a/test/parallel/test-http2-compat-serverresponse-writehead-array.js b/test/parallel/test-http2-compat-serverresponse-writehead-array.js index 1d0706f5ed945f..a0cb65d4bfb70f 100644 --- a/test/parallel/test-http2-compat-serverresponse-writehead-array.js +++ b/test/parallel/test-http2-compat-serverresponse-writehead-array.js @@ -16,6 +16,7 @@ const http2 = require('http2'); server.once('request', common.mustCall((request, response) => { const returnVal = response.writeHead(200, [ ['foo', 'bar'], + ['foo', 'baz'], ['ABC', 123], ]); assert.strictEqual(returnVal, response); @@ -26,7 +27,7 @@ const http2 = require('http2'); const request = client.request(); request.on('response', common.mustCall((headers) => { - assert.strictEqual(headers.foo, 'bar'); + assert.strictEqual(headers.foo, 'bar, baz'); assert.strictEqual(headers.abc, '123'); assert.strictEqual(headers[':status'], 200); }, 1)); @@ -45,7 +46,7 @@ const http2 = require('http2'); const port = server.address().port; server.once('request', common.mustCall((request, response) => { - const returnVal = response.writeHead(200, ['foo', 'bar', 'ABC', 123]); + const returnVal = response.writeHead(200, ['foo', 'bar', 'foo', 'baz', 'ABC', 123]); assert.strictEqual(returnVal, response); response.end(common.mustCall(() => { server.close(); })); })); @@ -54,7 +55,7 @@ const http2 = require('http2'); const request = client.request(); request.on('response', common.mustCall((headers) => { - assert.strictEqual(headers.foo, 'bar'); + assert.strictEqual(headers.foo, 'bar, baz'); assert.strictEqual(headers.abc, '123'); assert.strictEqual(headers[':status'], 200); }, 1)); diff --git a/test/parallel/test-http2-perform-server-handshake.js b/test/parallel/test-http2-perform-server-handshake.js new file mode 100644 index 00000000000000..b7fa67ec59d8fa --- /dev/null +++ b/test/parallel/test-http2-perform-server-handshake.js @@ -0,0 +1,48 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const http2 = require('http2'); +const stream = require('stream'); +const makeDuplexPair = require('../common/duplexpair'); + +// Basic test +{ + const { clientSide, serverSide } = makeDuplexPair(); + + const client = http2.connect('http://example.com', { + createConnection: () => clientSide, + }); + + const session = http2.performServerHandshake(serverSide); + + session.on('stream', common.mustCall((stream, headers) => { + assert.strictEqual(headers[':path'], '/test'); + stream.respond({ + ':status': 200, + }); + stream.end('hi!'); + })); + + const req = client.request({ ':path': '/test' }); + req.on('response', common.mustCall()); + req.end(); +} + +// Double bind should fail +{ + const socket = new stream.Duplex({ + read() {}, + write() {}, + }); + + http2.performServerHandshake(socket); + + assert.throws(() => { + http2.performServerHandshake(socket); + }, { code: 'ERR_HTTP2_SOCKET_BOUND' }); +} diff --git a/test/parallel/test-inspector-async-context-brk.js b/test/parallel/test-inspector-async-context-brk.js new file mode 100644 index 00000000000000..1fd2b45e535966 --- /dev/null +++ b/test/parallel/test-inspector-async-context-brk.js @@ -0,0 +1,56 @@ +'use strict'; +const common = require('../common'); +const { AsyncLocalStorage } = require('async_hooks'); +const als = new AsyncLocalStorage(); + +function getStore() { + return als.getStore(); +} + +common.skipIfInspectorDisabled(); + +const assert = require('assert'); +const { Session } = require('inspector'); +const path = require('path'); +const { pathToFileURL } = require('url'); + +let valueInFunction = 0; +let valueInBreakpoint = 0; + +function debugged() { + valueInFunction = getStore(); + return 42; +} + +async function test() { + const session = new Session(); + + session.connect(); + session.post('Debugger.enable'); + + session.on('Debugger.paused', () => { + valueInBreakpoint = getStore(); + }); + + await new Promise((resolve, reject) => { + session.post('Debugger.setBreakpointByUrl', { + 'lineNumber': 22, + 'url': pathToFileURL(path.resolve(__dirname, __filename)).toString(), + 'columnNumber': 0, + 'condition': '' + }, (error, result) => { + return error ? reject(error) : resolve(result); + }); + }); + + als.run(1, debugged); + assert.strictEqual(valueInFunction, valueInBreakpoint); + assert.strictEqual(valueInFunction, 1); + + session.disconnect(); +} + +const interval = setInterval(() => {}, 1000); +test().then(common.mustCall(() => { + clearInterval(interval); +})); diff --git a/test/parallel/test-inspector-multisession-js.js b/test/parallel/test-inspector-multisession-js.js index 31aa0c5f569854..81c29e49183645 100644 --- a/test/parallel/test-inspector-multisession-js.js +++ b/test/parallel/test-inspector-multisession-js.js @@ -1,4 +1,3 @@ -// Flags: --expose-internals 'use strict'; const common = require('../common'); diff --git a/test/parallel/test-process-default.js b/test/parallel/test-process-default.js new file mode 100644 index 00000000000000..a6ceda2af3ee25 --- /dev/null +++ b/test/parallel/test-process-default.js @@ -0,0 +1,8 @@ +'use strict'; +const common = require('../common'); +const assert = require('node:assert'); + +process.default = 1; +import('node:process').then(common.mustCall((processModule) => { + assert.strictEqual(processModule.default.default, 1); +})); diff --git a/test/parallel/test-process-load-env-file.js b/test/parallel/test-process-load-env-file.js new file mode 100644 index 00000000000000..a07ba6771bb381 --- /dev/null +++ b/test/parallel/test-process-load-env-file.js @@ -0,0 +1,89 @@ +'use strict'; + +const common = require('../common'); +const fixtures = require('../../test/common/fixtures'); +const assert = require('node:assert'); +const { describe, it } = require('node:test'); + +const basicValidEnvFilePath = fixtures.path('dotenv/basic-valid.env'); +const validEnvFilePath = fixtures.path('dotenv/valid.env'); +const missingEnvFile = fixtures.path('dotenv/non-existent-file.env'); + +describe('process.loadEnvFile()', () => { + + it('supports passing path', async () => { + const code = ` + process.loadEnvFile(${JSON.stringify(validEnvFilePath)}); + const assert = require('assert'); + assert.strictEqual(process.env.BASIC, 'basic'); + `.trim(); + const child = await common.spawnPromisified( + process.execPath, + [ '--eval', code ], + { cwd: __dirname }, + ); + assert.strictEqual(child.stderr, ''); + assert.strictEqual(child.code, 0); + }); + + it('supports not-passing a path', async () => { + // Uses `../fixtures/dotenv/.env` file. + const code = ` + process.loadEnvFile(); + const assert = require('assert'); + assert.strictEqual(process.env.BASIC, 'basic'); + `.trim(); + const child = await common.spawnPromisified( + process.execPath, + [ '--eval', code ], + { cwd: fixtures.path('dotenv/') }, + ); + assert.strictEqual(child.stderr, ''); + assert.strictEqual(child.code, 0); + }); + + it('should throw when file does not exist', async () => { + assert.throws(() => { + process.loadEnvFile(missingEnvFile); + }, { code: 'ENOENT' }); + }); + + it('should throw when `.env` does not exist', async () => { + assert.throws(() => { + process.loadEnvFile(); + }, { code: 'ENOENT' }); + }); + + it('should check for permissions', async () => { + const code = ` + process.loadEnvFile(${JSON.stringify(missingEnvFile)}); + `.trim(); + const child = await common.spawnPromisified( + process.execPath, + [ '--eval', code, '--experimental-permission' ], + { cwd: __dirname }, + ); + assert.match(child.stderr, /Error: Access to this API has been restricted/); + assert.match(child.stderr, /code: 'ERR_ACCESS_DENIED'/); + assert.match(child.stderr, /permission: 'FileSystemRead'/); + if (!common.isWindows) { + assert(child.stderr.includes(`resource: '${JSON.stringify(missingEnvFile).replaceAll('"', '')}'`)); + } + assert.strictEqual(child.code, 1); + }); + + it('loadEnvFile does not mutate --env-file output', async () => { + const code = ` + process.loadEnvFile(${JSON.stringify(basicValidEnvFilePath)}); + require('assert')(process.env.BASIC === 'basic'); + `.trim(); + const child = await common.spawnPromisified( + process.execPath, + [ `--env-file=${validEnvFilePath}`, '--eval', code ], + { cwd: __dirname }, + ); + assert.strictEqual(child.stdout, ''); + assert.strictEqual(child.stderr, ''); + assert.strictEqual(child.code, 0); + }); +}); diff --git a/test/parallel/test-webstream-structured-clone-no-leftovers.mjs b/test/parallel/test-webstream-structured-clone-no-leftovers.mjs new file mode 100644 index 00000000000000..e8af095cce18d5 --- /dev/null +++ b/test/parallel/test-webstream-structured-clone-no-leftovers.mjs @@ -0,0 +1,28 @@ +import '../common/index.mjs'; +import { test } from 'node:test'; +import assert from 'node:assert'; + +test('do not leak promises', async () => { + const buf = new Uint8Array(1); + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(buf); + controller.close(); + } + }); + + const [out1, out2] = readable.tee(); + const cloned = structuredClone(out2, { transfer: [out2] }); + + for await (const chunk of cloned) { + assert.deepStrictEqual(chunk, buf); + } + + for await (const chunk of out2) { + assert.deepStrictEqual(chunk, buf); + } + + for await (const chunk of out1) { + assert.deepStrictEqual(chunk, buf); + } +}); diff --git a/test/parallel/test-whatwg-url-custom-searchparams.js b/test/parallel/test-whatwg-url-custom-searchparams.js index 75fa1779bdeb45..faec86e017a2ec 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams.js +++ b/test/parallel/test-whatwg-url-custom-searchparams.js @@ -43,6 +43,42 @@ assert.strictEqual(sp.toString(), serialized); assert.strictEqual(m.search, `?${serialized}`); +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +assert.strictEqual(m.href, `http://example.org/?${serialized}`); + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +assert.strictEqual(m.toString(), `http://example.org/?${serialized}`); + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +assert.strictEqual(m.toJSON(), `http://example.org/?${serialized}`); + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +m.href = 'http://example.org'; +assert.strictEqual(m.href, 'http://example.org/'); +assert.strictEqual(sp.size, 0); + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +m.search = ''; +assert.strictEqual(m.href, 'http://example.org/'); +assert.strictEqual(sp.size, 0); + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +m.pathname = '/test'; +assert.strictEqual(m.href, `http://example.org/test?${serialized}`); +m.pathname = ''; + +sp.delete('a'); +values.forEach((i) => sp.append('a', i)); +m.hash = '#test'; +assert.strictEqual(m.href, `http://example.org/?${serialized}#test`); +m.hash = ''; + assert.strictEqual(sp[Symbol.iterator], sp.entries); let key, val; diff --git a/test/parallel/test-whatwg-url-invalidthis.js b/test/parallel/test-whatwg-url-invalidthis.js index b46b5d8cceb8fa..f4d9f91ada0095 100644 --- a/test/parallel/test-whatwg-url-invalidthis.js +++ b/test/parallel/test-whatwg-url-invalidthis.js @@ -11,12 +11,26 @@ const assert = require('assert'); ].forEach((i) => { assert.throws(() => Reflect.apply(URL.prototype[i], [], {}), { name: 'TypeError', - message: /Cannot read private member/, + message: /Receiver must be an instance of class/, }); }); [ 'href', + 'search', +].forEach((i) => { + assert.throws(() => Reflect.get(URL.prototype, i, {}), { + name: 'TypeError', + message: /Receiver must be an instance of class/, + }); + + assert.throws(() => Reflect.set(URL.prototype, i, null, {}), { + name: 'TypeError', + message: /Cannot read private member/, + }); +}); + +[ 'protocol', 'username', 'password', @@ -24,7 +38,6 @@ const assert = require('assert'); 'hostname', 'port', 'pathname', - 'search', 'hash', ].forEach((i) => { assert.throws(() => Reflect.get(URL.prototype, i, {}), { diff --git a/test/parallel/util-parse-env.js b/test/parallel/util-parse-env.js new file mode 100644 index 00000000000000..54488e50da91c8 --- /dev/null +++ b/test/parallel/util-parse-env.js @@ -0,0 +1,59 @@ +'use strict'; + +require('../common'); +const fixtures = require('../../test/common/fixtures'); +const assert = require('node:assert'); +const util = require('node:util'); +const fs = require('node:fs'); + +{ + const validEnvFilePath = fixtures.path('dotenv/valid.env'); + const validContent = fs.readFileSync(validEnvFilePath, 'utf8'); + + assert.deepStrictEqual(util.parseEnv(validContent), { + AFTER_LINE: 'after_line', + BACKTICKS: 'backticks', + BACKTICKS_INSIDE_DOUBLE: '`backticks` work inside double quotes', + BACKTICKS_INSIDE_SINGLE: '`backticks` work inside single quotes', + BACKTICKS_SPACED: ' backticks ', + BASIC: 'basic', + DOUBLE_AND_SINGLE_QUOTES_INSIDE_BACKTICKS: 'double "quotes" and single \'quotes\' work inside backticks', + DOUBLE_QUOTES: 'double_quotes', + DOUBLE_QUOTES_INSIDE_BACKTICKS: 'double "quotes" work inside backticks', + DOUBLE_QUOTES_INSIDE_SINGLE: 'double "quotes" work inside single quotes', + DOUBLE_QUOTES_SPACED: ' double quotes ', + DOUBLE_QUOTES_WITH_NO_SPACE_BRACKET: '{ port: $MONGOLAB_PORT}', + EMAIL: 'therealnerdybeast@example.tld', + EMPTY: '', + EMPTY_BACKTICKS: '', + EMPTY_DOUBLE_QUOTES: '', + EMPTY_SINGLE_QUOTES: '', + EQUAL_SIGNS: 'equals==', + INLINE_COMMENTS: 'inline comments', + INLINE_COMMENTS_BACKTICKS: 'inline comments outside of #backticks', + INLINE_COMMENTS_DOUBLE_QUOTES: 'inline comments outside of #doublequotes', + INLINE_COMMENTS_SINGLE_QUOTES: 'inline comments outside of #singlequotes', + INLINE_COMMENTS_SPACE: 'inline comments start with a', + RETAIN_INNER_QUOTES: '{"foo": "bar"}', + RETAIN_INNER_QUOTES_AS_BACKTICKS: '{"foo": "bar\'s"}', + RETAIN_INNER_QUOTES_AS_STRING: '{"foo": "bar"}', + SINGLE_QUOTES: 'single_quotes', + SINGLE_QUOTES_INSIDE_BACKTICKS: "single 'quotes' work inside backticks", + SINGLE_QUOTES_INSIDE_DOUBLE: "single 'quotes' work inside double quotes", + SINGLE_QUOTES_SPACED: ' single quotes ', + SPACED_KEY: 'parsed', + TRIM_SPACE_FROM_UNQUOTED: 'some spaced out string' + }); +} + +assert.deepStrictEqual(util.parseEnv(''), {}); +assert.deepStrictEqual(util.parseEnv('FOO=bar\nFOO=baz\n'), { FOO: 'baz' }); + +// Test for invalid input. +assert.throws(() => { + for (const value of [null, undefined, {}, []]) { + util.parseEnv(value); + } +}, { + code: 'ERR_INVALID_ARG_TYPE', +}); diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index c3f2cda226dcbe..1549463cec3282 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -10,6 +10,8 @@ test-cpu-prof-dir-worker: PASS, FLAKY # https://github.com/nodejs/node/issues/44898 test-watch-mode: PASS, FLAKY test-watch-mode-inspect: PASS, FLAKY +# https://github.com/nodejs/node/issues/47409 +test-http2-large-file: PASS, FLAKY [$system==win32] # https://github.com/nodejs/node/issues/47116 @@ -47,8 +49,9 @@ test-watch-mode-inspect: SKIP # https://github.com/nodejs/node/issues/41286 test-performance-eventloopdelay: PASS, FLAKY -[$system==ppc || $system==ppc64] +[$system==linux && $arch==ppc64] # https://github.com/nodejs/node/issues/50740 +test-single-executable-application-disable-experimental-sea-warning: PASS, FLAKY test-single-executable-application-empty: PASS, FLAKY test-single-executable-application-snapshot-and-code-cache: PASS, FLAKY test-single-executable-application-snapshot: PASS, FLAKY diff --git a/test/sequential/test-async-wrap-getasyncid.js b/test/sequential/test-async-wrap-getasyncid.js index 7fc7c820d06cc9..cd5957de11e157 100644 --- a/test/sequential/test-async-wrap-getasyncid.js +++ b/test/sequential/test-async-wrap-getasyncid.js @@ -47,8 +47,6 @@ const { getSystemErrorName } = require('util'); delete providers.WORKER; // TODO(danbev): Test for these delete providers.JSUDPWRAP; - if (!common.isMainThread) - delete providers.INSPECTORJSBINDING; delete providers.KEYPAIRGENREQUEST; delete providers.KEYGENREQUEST; delete providers.KEYEXPORTREQUEST; @@ -316,13 +314,6 @@ if (common.hasCrypto) { // eslint-disable-line node-core/crypto-check testInitialized(req, 'SendWrap'); } -if (process.features.inspector && common.isMainThread) { - const binding = internalBinding('inspector'); - const handle = new binding.Connection(() => {}); - testInitialized(handle, 'Connection'); - handle.disconnect(); -} - // PROVIDER_HEAPDUMP { v8.getHeapSnapshot().destroy(); diff --git a/tools/dep_updaters/update-root-certs.mjs b/tools/dep_updaters/update-root-certs.mjs index a5516860248603..64f3c88b851b7f 100644 --- a/tools/dep_updaters/update-root-certs.mjs +++ b/tools/dep_updaters/update-root-certs.mjs @@ -28,7 +28,7 @@ const getCertdataURL = (version) => { return certdataURL; }; -const normalizeTD = (text) => { +const normalizeTD = (text = '') => { // Remove whitespace and any HTML tags. return text?.trim().replace(/<.*?>/g, ''); }; diff --git a/tools/inspector_protocol/code_generator.py b/tools/inspector_protocol/code_generator.py index 0b8baea0ae710e..a6e163c6fe911f 100755 --- a/tools/inspector_protocol/code_generator.py +++ b/tools/inspector_protocol/code_generator.py @@ -638,7 +638,6 @@ def main(): "Object_h.template", "ValueConversions_h.template", "Maybe_h.template", - "Array_h.template", "DispatcherBase_h.template", "Parser_h.template", "encoding_h.template", diff --git a/tools/inspector_protocol/inspector_protocol.gni b/tools/inspector_protocol/inspector_protocol.gni index d612fb6aebb52c..3e934526b82ae1 100644 --- a/tools/inspector_protocol/inspector_protocol.gni +++ b/tools/inspector_protocol/inspector_protocol.gni @@ -36,7 +36,6 @@ template("inspector_protocol_generate") { "$inspector_protocol_dir/lib/encoding_h.template", "$inspector_protocol_dir/lib/encoding_cpp.template", "$inspector_protocol_dir/lib/Allocator_h.template", - "$inspector_protocol_dir/lib/Array_h.template", "$inspector_protocol_dir/lib/DispatcherBase_cpp.template", "$inspector_protocol_dir/lib/DispatcherBase_h.template", "$inspector_protocol_dir/lib/ErrorSupport_cpp.template", diff --git a/tools/inspector_protocol/inspector_protocol.gypi b/tools/inspector_protocol/inspector_protocol.gypi index d614474e69c32e..c11386dc05174e 100644 --- a/tools/inspector_protocol/inspector_protocol.gypi +++ b/tools/inspector_protocol/inspector_protocol.gypi @@ -8,7 +8,6 @@ 'lib/encoding_h.template', 'lib/encoding_cpp.template', 'lib/Allocator_h.template', - 'lib/Array_h.template', 'lib/DispatcherBase_cpp.template', 'lib/DispatcherBase_h.template', 'lib/ErrorSupport_cpp.template', diff --git a/tools/inspector_protocol/lib/Array_h.template b/tools/inspector_protocol/lib/Array_h.template deleted file mode 100644 index c420a0f7e9650a..00000000000000 --- a/tools/inspector_protocol/lib/Array_h.template +++ /dev/null @@ -1,138 +0,0 @@ -// This file is generated by Array_h.template. - -// Copyright 2016 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef {{"_".join(config.protocol.namespace)}}_Array_h -#define {{"_".join(config.protocol.namespace)}}_Array_h - -//#include "ErrorSupport.h" -//#include "Forward.h" -//#include "ValueConversions.h" -//#include "Values.h" - -{% for namespace in config.protocol.namespace %} -namespace {{namespace}} { -{% endfor %} - -template -class Array { -public: - static std::unique_ptr> create() - { - return std::unique_ptr>(new Array()); - } - - static std::unique_ptr> fromValue(protocol::Value* value, ErrorSupport* errors) - { - protocol::ListValue* array = ListValue::cast(value); - if (!array) { - errors->addError("array expected"); - return nullptr; - } - std::unique_ptr> result(new Array()); - errors->push(); - for (size_t i = 0; i < array->size(); ++i) { - errors->setName(StringUtil::fromInteger(i)); - std::unique_ptr item = ValueConversions::fromValue(array->at(i), errors); - result->m_vector.push_back(std::move(item)); - } - errors->pop(); - if (errors->hasErrors()) - return nullptr; - return result; - } - - void addItem(std::unique_ptr value) - { - m_vector.push_back(std::move(value)); - } - - size_t length() - { - return m_vector.size(); - } - - T* get(size_t index) - { - return m_vector[index].get(); - } - - std::unique_ptr toValue() - { - std::unique_ptr result = ListValue::create(); - for (auto& item : m_vector) - result->pushValue(ValueConversions::toValue(item)); - return result; - } - -private: - std::vector> m_vector; -}; - -template -class ArrayBase { -public: - static std::unique_ptr> create() - { - return std::unique_ptr>(new Array()); - } - - static std::unique_ptr> fromValue(protocol::Value* value, ErrorSupport* errors) - { - protocol::ListValue* array = ListValue::cast(value); - if (!array) { - errors->addError("array expected"); - return nullptr; - } - errors->push(); - std::unique_ptr> result(new Array()); - for (size_t i = 0; i < array->size(); ++i) { - errors->setName(StringUtil::fromInteger(i)); - T item = ValueConversions::fromValue(array->at(i), errors); - result->m_vector.push_back(item); - } - errors->pop(); - if (errors->hasErrors()) - return nullptr; - return result; - } - - void addItem(const T& value) - { - m_vector.push_back(value); - } - - size_t length() - { - return m_vector.size(); - } - - T get(size_t index) - { - return m_vector[index]; - } - - std::unique_ptr toValue() - { - std::unique_ptr result = ListValue::create(); - for (auto& item : m_vector) - result->pushValue(ValueConversions::toValue(item)); - return result; - } - -private: - std::vector m_vector; -}; - -template<> class Array : public ArrayBase {}; -template<> class Array : public ArrayBase {}; -template<> class Array : public ArrayBase {}; -template<> class Array : public ArrayBase {}; - -{% for namespace in config.protocol.namespace %} -} // namespace {{namespace}} -{% endfor %} - -#endif // !defined({{"_".join(config.protocol.namespace)}}_Array_h) diff --git a/tools/inspector_protocol/lib/Forward_h.template b/tools/inspector_protocol/lib/Forward_h.template index 746ba20bba15f4..a6c4b12feca013 100644 --- a/tools/inspector_protocol/lib/Forward_h.template +++ b/tools/inspector_protocol/lib/Forward_h.template @@ -22,7 +22,6 @@ namespace {{namespace}} { {% endfor %} -template class Array; class DictionaryValue; class DispatchResponse; class ErrorSupport; @@ -35,6 +34,26 @@ class StringValue; class UberDispatcher; class Value; +namespace detail { +template +struct ArrayTypedef { typedef std::vector> type; }; + +template <> +struct ArrayTypedef { typedef std::vector type; }; + +template <> +struct ArrayTypedef { typedef std::vector type; }; + +template <> +struct ArrayTypedef { typedef std::vector type; }; + +template <> +struct ArrayTypedef { typedef std::vector type; }; +} // namespace detail + +template +using Array = typename detail::ArrayTypedef::type; + {% for namespace in config.protocol.namespace %} } // namespace {{namespace}} {% endfor %} diff --git a/tools/inspector_protocol/lib/ValueConversions_h.template b/tools/inspector_protocol/lib/ValueConversions_h.template index 2ee5b724545a33..63baf689c6e11c 100644 --- a/tools/inspector_protocol/lib/ValueConversions_h.template +++ b/tools/inspector_protocol/lib/ValueConversions_h.template @@ -128,6 +128,72 @@ struct ValueConversions { } }; +template +struct ValueConversions>> { + static std::unique_ptr>> fromValue(protocol::Value* value, ErrorSupport* errors) { + protocol::ListValue* array = ListValue::cast(value); + if (!array) { + errors->addError("array expected"); + return nullptr; + } + errors->push(); + std::unique_ptr>> result( + new std::vector>()); + result->reserve(array->size()); + for (size_t i = 0; i < array->size(); ++i) { + errors->setName(StringUtil::fromInteger(i)); + auto item = ValueConversions::fromValue(array->at(i), errors); + result->emplace_back(std::move(item)); + } + errors->pop(); + if (errors->hasErrors()) + return nullptr; + return result; + } + + static std::unique_ptr toValue(std::vector>* v) + { + std::unique_ptr result = ListValue::create(); + result->reserve(v->size()); + for (auto& item : *v) + result->pushValue(ValueConversions::toValue(item.get())); + return result; + } + +}; + +template +struct ValueConversions> { + static std::unique_ptr> fromValue(protocol::Value* value, ErrorSupport* errors) { + protocol::ListValue* array = ListValue::cast(value); + if (!array) { + errors->addError("array expected"); + return nullptr; + } + errors->push(); + std::unique_ptr> result(new std::vector()); + result->reserve(array->size()); + for (size_t i = 0; i < array->size(); ++i) { + errors->setName(StringUtil::fromInteger(i)); + auto item = ValueConversions::fromValue(array->at(i), errors); + result->emplace_back(std::move(item)); + } + errors->pop(); + if (errors->hasErrors()) + return nullptr; + return result; + } + + static std::unique_ptr toValue(std::vector* v) + { + std::unique_ptr result = ListValue::create(); + result->reserve(v->size()); + for (auto& item : *v) + result->pushValue(ValueConversions::toValue(item)); + return result; + } +}; + template<> struct ValueConversions { static std::unique_ptr fromValue(protocol::Value* value, ErrorSupport* errors) diff --git a/tools/inspector_protocol/lib/Values_h.template b/tools/inspector_protocol/lib/Values_h.template index 4a2e58f4cd6850..4d6fde07d4df2c 100644 --- a/tools/inspector_protocol/lib/Values_h.template +++ b/tools/inspector_protocol/lib/Values_h.template @@ -271,6 +271,7 @@ public: Value* at(size_t index); size_t size() const { return m_data.size(); } + void reserve(size_t capacity) { m_data.reserve(capacity); } private: ListValue(); diff --git a/tools/lint-md/package-lock.json b/tools/lint-md/package-lock.json index 711b67857a541e..0365b39686666c 100644 --- a/tools/lint-md/package-lock.json +++ b/tools/lint-md/package-lock.json @@ -18,7 +18,7 @@ "devDependencies": { "@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-node-resolve": "^15.2.3", - "rollup": "^4.9.4", + "rollup": "^4.9.5", "rollup-plugin-cleanup": "^3.2.1" } }, @@ -101,9 +101,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.9.4.tgz", - "integrity": "sha512-ub/SN3yWqIv5CWiAZPHVS1DloyZsJbtXmX4HxUTIpS0BHm9pW5iYBo2mIZi+hE3AeiTzHz33blwSnhdUo+9NpA==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.9.5.tgz", + "integrity": "sha512-idWaG8xeSRCfRq9KpRysDHJ/rEHBEXcHuJ82XY0yYFIWnLMjZv9vF/7DOq8djQ2n3Lk6+3qfSH8AqlmHlmi1MA==", "cpu": [ "arm" ], @@ -114,9 +114,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.9.4.tgz", - "integrity": "sha512-ehcBrOR5XTl0W0t2WxfTyHCR/3Cq2jfb+I4W+Ch8Y9b5G+vbAecVv0Fx/J1QKktOrgUYsIKxWAKgIpvw56IFNA==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.9.5.tgz", + "integrity": "sha512-f14d7uhAMtsCGjAYwZGv6TwuS3IFaM4ZnGMUn3aCBgkcHAYErhV1Ad97WzBvS2o0aaDv4mVz+syiN0ElMyfBPg==", "cpu": [ "arm64" ], @@ -127,9 +127,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.9.4.tgz", - "integrity": "sha512-1fzh1lWExwSTWy8vJPnNbNM02WZDS8AW3McEOb7wW+nPChLKf3WG2aG7fhaUmfX5FKw9zhsF5+MBwArGyNM7NA==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.9.5.tgz", + "integrity": "sha512-ndoXeLx455FffL68OIUrVr89Xu1WLzAG4n65R8roDlCoYiQcGGg6MALvs2Ap9zs7AHg8mpHtMpwC8jBBjZrT/w==", "cpu": [ "arm64" ], @@ -140,9 +140,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.9.4.tgz", - "integrity": "sha512-Gc6cukkF38RcYQ6uPdiXi70JB0f29CwcQ7+r4QpfNpQFVHXRd0DfWFidoGxjSx1DwOETM97JPz1RXL5ISSB0pA==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.9.5.tgz", + "integrity": "sha512-UmElV1OY2m/1KEEqTlIjieKfVwRg0Zwg4PLgNf0s3glAHXBN99KLpw5A5lrSYCa1Kp63czTpVll2MAqbZYIHoA==", "cpu": [ "x64" ], @@ -153,9 +153,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.9.4.tgz", - "integrity": "sha512-g21RTeFzoTl8GxosHbnQZ0/JkuFIB13C3T7Y0HtKzOXmoHhewLbVTFBQZu+z5m9STH6FZ7L/oPgU4Nm5ErN2fw==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.9.5.tgz", + "integrity": "sha512-Q0LcU61v92tQB6ae+udZvOyZ0wfpGojtAKrrpAaIqmJ7+psq4cMIhT/9lfV6UQIpeItnq/2QDROhNLo00lOD1g==", "cpu": [ "arm" ], @@ -166,9 +166,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.9.4.tgz", - "integrity": "sha512-TVYVWD/SYwWzGGnbfTkrNpdE4HON46orgMNHCivlXmlsSGQOx/OHHYiQcMIOx38/GWgwr/po2LBn7wypkWw/Mg==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.9.5.tgz", + "integrity": "sha512-dkRscpM+RrR2Ee3eOQmRWFjmV/payHEOrjyq1VZegRUa5OrZJ2MAxBNs05bZuY0YCtpqETDy1Ix4i/hRqX98cA==", "cpu": [ "arm64" ], @@ -179,9 +179,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.9.4.tgz", - "integrity": "sha512-XcKvuendwizYYhFxpvQ3xVpzje2HHImzg33wL9zvxtj77HvPStbSGI9czrdbfrf8DGMcNNReH9pVZv8qejAQ5A==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.9.5.tgz", + "integrity": "sha512-QaKFVOzzST2xzY4MAmiDmURagWLFh+zZtttuEnuNn19AiZ0T3fhPyjPPGwLNdiDT82ZE91hnfJsUiDwF9DClIQ==", "cpu": [ "arm64" ], @@ -192,9 +192,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.9.4.tgz", - "integrity": "sha512-LFHS/8Q+I9YA0yVETyjonMJ3UA+DczeBd/MqNEzsGSTdNvSJa1OJZcSH8GiXLvcizgp9AlHs2walqRcqzjOi3A==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.9.5.tgz", + "integrity": "sha512-HeGqmRJuyVg6/X6MpE2ur7GbymBPS8Np0S/vQFHDmocfORT+Zt76qu+69NUoxXzGqVP1pzaY6QIi0FJWLC3OPA==", "cpu": [ "riscv64" ], @@ -205,9 +205,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.9.4.tgz", - "integrity": "sha512-dIYgo+j1+yfy81i0YVU5KnQrIJZE8ERomx17ReU4GREjGtDW4X+nvkBak2xAUpyqLs4eleDSj3RrV72fQos7zw==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.9.5.tgz", + "integrity": "sha512-Dq1bqBdLaZ1Gb/l2e5/+o3B18+8TI9ANlA1SkejZqDgdU/jK/ThYaMPMJpVMMXy2uRHvGKbkz9vheVGdq3cJfA==", "cpu": [ "x64" ], @@ -218,9 +218,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.9.4.tgz", - "integrity": "sha512-RoaYxjdHQ5TPjaPrLsfKqR3pakMr3JGqZ+jZM0zP2IkDtsGa4CqYaWSfQmZVgFUCgLrTnzX+cnHS3nfl+kB6ZQ==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.9.5.tgz", + "integrity": "sha512-ezyFUOwldYpj7AbkwyW9AJ203peub81CaAIVvckdkyH8EvhEIoKzaMFJj0G4qYJ5sw3BpqhFrsCc30t54HV8vg==", "cpu": [ "x64" ], @@ -231,9 +231,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.9.4.tgz", - "integrity": "sha512-T8Q3XHV+Jjf5e49B4EAaLKV74BbX7/qYBRQ8Wop/+TyyU0k+vSjiLVSHNWdVd1goMjZcbhDmYZUYW5RFqkBNHQ==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.9.5.tgz", + "integrity": "sha512-aHSsMnUw+0UETB0Hlv7B/ZHOGY5bQdwMKJSzGfDfvyhnpmVxLMGnQPGNE9wgqkLUs3+gbG1Qx02S2LLfJ5GaRQ==", "cpu": [ "arm64" ], @@ -244,9 +244,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.9.4.tgz", - "integrity": "sha512-z+JQ7JirDUHAsMecVydnBPWLwJjbppU+7LZjffGf+Jvrxq+dVjIE7By163Sc9DKc3ADSU50qPVw0KonBS+a+HQ==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.9.5.tgz", + "integrity": "sha512-AiqiLkb9KSf7Lj/o1U3SEP9Zn+5NuVKgFdRIZkvd4N0+bYrTOovVd0+LmYCPQGbocT4kvFyK+LXCDiXPBF3fyA==", "cpu": [ "ia32" ], @@ -257,9 +257,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.9.4.tgz", - "integrity": "sha512-LfdGXCV9rdEify1oxlN9eamvDSjv9md9ZVMAbNHA87xqIfFCxImxan9qZ8+Un54iK2nnqPlbnSi4R54ONtbWBw==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.9.5.tgz", + "integrity": "sha512-1q+mykKE3Vot1kaFJIDoUFv5TuW+QQVaf2FmTT9krg86pQrGStOSJJ0Zil7CFagyxDuouTepzt5Y5TVzyajOdQ==", "cpu": [ "x64" ], @@ -5791,9 +5791,9 @@ } }, "node_modules/rollup": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.9.4.tgz", - "integrity": "sha512-2ztU7pY/lrQyXSCnnoU4ICjT/tCG9cdH3/G25ERqE3Lst6vl2BCM5hL2Nw+sslAvAf+ccKsAq1SkKQALyqhR7g==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.9.5.tgz", + "integrity": "sha512-E4vQW0H/mbNMw2yLSqJyjtkHY9dslf/p0zuT1xehNRqUTBOFMqEjguDvqhXr7N7r/4ttb2jr4T41d3dncmIgbQ==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -5806,19 +5806,19 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.9.4", - "@rollup/rollup-android-arm64": "4.9.4", - "@rollup/rollup-darwin-arm64": "4.9.4", - "@rollup/rollup-darwin-x64": "4.9.4", - "@rollup/rollup-linux-arm-gnueabihf": "4.9.4", - "@rollup/rollup-linux-arm64-gnu": "4.9.4", - "@rollup/rollup-linux-arm64-musl": "4.9.4", - "@rollup/rollup-linux-riscv64-gnu": "4.9.4", - "@rollup/rollup-linux-x64-gnu": "4.9.4", - "@rollup/rollup-linux-x64-musl": "4.9.4", - "@rollup/rollup-win32-arm64-msvc": "4.9.4", - "@rollup/rollup-win32-ia32-msvc": "4.9.4", - "@rollup/rollup-win32-x64-msvc": "4.9.4", + "@rollup/rollup-android-arm-eabi": "4.9.5", + "@rollup/rollup-android-arm64": "4.9.5", + "@rollup/rollup-darwin-arm64": "4.9.5", + "@rollup/rollup-darwin-x64": "4.9.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.9.5", + "@rollup/rollup-linux-arm64-gnu": "4.9.5", + "@rollup/rollup-linux-arm64-musl": "4.9.5", + "@rollup/rollup-linux-riscv64-gnu": "4.9.5", + "@rollup/rollup-linux-x64-gnu": "4.9.5", + "@rollup/rollup-linux-x64-musl": "4.9.5", + "@rollup/rollup-win32-arm64-msvc": "4.9.5", + "@rollup/rollup-win32-ia32-msvc": "4.9.5", + "@rollup/rollup-win32-x64-msvc": "4.9.5", "fsevents": "~2.3.2" } }, diff --git a/tools/lint-md/package.json b/tools/lint-md/package.json index 2a7ac9a8942372..75a5dbaec6f908 100644 --- a/tools/lint-md/package.json +++ b/tools/lint-md/package.json @@ -16,7 +16,7 @@ "devDependencies": { "@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-node-resolve": "^15.2.3", - "rollup": "^4.9.4", + "rollup": "^4.9.5", "rollup-plugin-cleanup": "^3.2.1" } } diff --git a/tools/make-v8.sh b/tools/make-v8.sh index 62cabc70a6eac7..f51623be9eaeda 100755 --- a/tools/make-v8.sh +++ b/tools/make-v8.sh @@ -9,6 +9,11 @@ cd deps/v8 || exit find . -type d -name .git -print0 | xargs -0 rm -rf ../../tools/v8/fetch_deps.py . +JOBS_ARG= +if [ "${JOBS}" ]; then + JOBS_ARG="-j ${JOBS}" +fi + ARCH=$(arch) if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then TARGET_ARCH=$ARCH @@ -46,10 +51,10 @@ if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then gcc --version export PKG_CONFIG_PATH=$BUILD_TOOLS/pkg-config gn gen -v "out.gn/$BUILD_ARCH_TYPE" --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true $CC_WRAPPER" - ninja -v -C "out.gn/$BUILD_ARCH_TYPE" d8 cctest inspector-test + ninja -v -C "out.gn/$BUILD_ARCH_TYPE" "${JOBS_ARG}" d8 cctest inspector-test else DEPOT_TOOLS_DIR="$(cd _depot_tools && pwd)" # shellcheck disable=SC2086 PATH="$DEPOT_TOOLS_DIR":$PATH tools/dev/v8gen.py "$BUILD_ARCH_TYPE" --no-goma $V8_BUILD_OPTIONS - PATH="$DEPOT_TOOLS_DIR":$PATH ninja -C "out.gn/$BUILD_ARCH_TYPE/" d8 cctest inspector-test + PATH="$DEPOT_TOOLS_DIR":$PATH ninja -C "out.gn/$BUILD_ARCH_TYPE/" "${JOBS_ARG}" d8 cctest inspector-test fi diff --git a/tools/v8_gypfiles/v8.gyp b/tools/v8_gypfiles/v8.gyp index 4d69a59fcbc5b5..cd9e76bda44b15 100644 --- a/tools/v8_gypfiles/v8.gyp +++ b/tools/v8_gypfiles/v8.gyp @@ -1024,6 +1024,18 @@ 'sources': [ '; } diff --git a/vcbuild.bat b/vcbuild.bat index d093138b31c6a4..c3852c89cc5e18 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -337,7 +337,8 @@ if errorlevel 1 ( if "%target%" == "Clean" goto exit :after-build -rd %config% +:: Check existence of %config% before removing it. +if exist %config% rd %config% if errorlevel 1 echo "Old build output exists at 'out\%config%'. Please remove." & exit /B :: Use /J because /D (symlink) requires special permissions. if EXIST out\%config% mklink /J %config% out\%config%