From 288416a76463891b911fcdd97ca21a7986c370a5 Mon Sep 17 00:00:00 2001
From: npm CLI robot
Date: Sat, 23 Nov 2024 23:55:06 -0800
Subject: [PATCH] deps: upgrade npm to 10.9.1
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/55951
Reviewed-By: Luigi Pinca
Reviewed-By: Michaël Zasso
---
deps/npm/docs/content/commands/npm-ls.md | 2 +-
deps/npm/docs/content/commands/npm.md | 2 +-
deps/npm/docs/output/commands/npm-access.html | 4 +-
.../npm/docs/output/commands/npm-adduser.html | 4 +-
deps/npm/docs/output/commands/npm-audit.html | 4 +-
deps/npm/docs/output/commands/npm-bugs.html | 4 +-
deps/npm/docs/output/commands/npm-cache.html | 4 +-
deps/npm/docs/output/commands/npm-ci.html | 4 +-
.../docs/output/commands/npm-completion.html | 4 +-
deps/npm/docs/output/commands/npm-config.html | 4 +-
deps/npm/docs/output/commands/npm-dedupe.html | 4 +-
.../docs/output/commands/npm-deprecate.html | 4 +-
deps/npm/docs/output/commands/npm-diff.html | 4 +-
.../docs/output/commands/npm-dist-tag.html | 4 +-
deps/npm/docs/output/commands/npm-docs.html | 4 +-
deps/npm/docs/output/commands/npm-doctor.html | 4 +-
deps/npm/docs/output/commands/npm-edit.html | 4 +-
deps/npm/docs/output/commands/npm-exec.html | 4 +-
.../npm/docs/output/commands/npm-explain.html | 4 +-
.../npm/docs/output/commands/npm-explore.html | 4 +-
.../docs/output/commands/npm-find-dupes.html | 4 +-
deps/npm/docs/output/commands/npm-fund.html | 4 +-
.../docs/output/commands/npm-help-search.html | 4 +-
deps/npm/docs/output/commands/npm-help.html | 4 +-
deps/npm/docs/output/commands/npm-hook.html | 4 +-
deps/npm/docs/output/commands/npm-init.html | 4 +-
.../output/commands/npm-install-ci-test.html | 4 +-
.../output/commands/npm-install-test.html | 4 +-
.../npm/docs/output/commands/npm-install.html | 4 +-
deps/npm/docs/output/commands/npm-link.html | 4 +-
deps/npm/docs/output/commands/npm-login.html | 4 +-
deps/npm/docs/output/commands/npm-logout.html | 4 +-
deps/npm/docs/output/commands/npm-ls.html | 6 +-
deps/npm/docs/output/commands/npm-org.html | 4 +-
.../docs/output/commands/npm-outdated.html | 4 +-
deps/npm/docs/output/commands/npm-owner.html | 4 +-
deps/npm/docs/output/commands/npm-pack.html | 4 +-
deps/npm/docs/output/commands/npm-ping.html | 4 +-
deps/npm/docs/output/commands/npm-pkg.html | 4 +-
deps/npm/docs/output/commands/npm-prefix.html | 4 +-
.../npm/docs/output/commands/npm-profile.html | 4 +-
deps/npm/docs/output/commands/npm-prune.html | 4 +-
.../npm/docs/output/commands/npm-publish.html | 4 +-
deps/npm/docs/output/commands/npm-query.html | 4 +-
.../npm/docs/output/commands/npm-rebuild.html | 4 +-
deps/npm/docs/output/commands/npm-repo.html | 4 +-
.../npm/docs/output/commands/npm-restart.html | 4 +-
deps/npm/docs/output/commands/npm-root.html | 4 +-
.../docs/output/commands/npm-run-script.html | 4 +-
deps/npm/docs/output/commands/npm-sbom.html | 4 +-
deps/npm/docs/output/commands/npm-search.html | 4 +-
.../docs/output/commands/npm-shrinkwrap.html | 4 +-
deps/npm/docs/output/commands/npm-star.html | 4 +-
deps/npm/docs/output/commands/npm-stars.html | 4 +-
deps/npm/docs/output/commands/npm-start.html | 4 +-
deps/npm/docs/output/commands/npm-stop.html | 4 +-
deps/npm/docs/output/commands/npm-team.html | 4 +-
deps/npm/docs/output/commands/npm-test.html | 4 +-
deps/npm/docs/output/commands/npm-token.html | 4 +-
.../docs/output/commands/npm-uninstall.html | 4 +-
.../docs/output/commands/npm-unpublish.html | 4 +-
deps/npm/docs/output/commands/npm-unstar.html | 4 +-
deps/npm/docs/output/commands/npm-update.html | 4 +-
.../npm/docs/output/commands/npm-version.html | 4 +-
deps/npm/docs/output/commands/npm-view.html | 4 +-
deps/npm/docs/output/commands/npm-whoami.html | 4 +-
deps/npm/docs/output/commands/npm.html | 6 +-
deps/npm/docs/output/commands/npx.html | 4 +-
.../docs/output/configuring-npm/folders.html | 4 +-
.../docs/output/configuring-npm/install.html | 4 +-
.../output/configuring-npm/npm-global.html | 4 +-
.../docs/output/configuring-npm/npm-json.html | 4 +-
.../configuring-npm/npm-shrinkwrap-json.html | 4 +-
.../docs/output/configuring-npm/npmrc.html | 4 +-
.../output/configuring-npm/package-json.html | 4 +-
.../configuring-npm/package-lock-json.html | 4 +-
deps/npm/docs/output/using-npm/config.html | 4 +-
.../using-npm/dependency-selectors.html | 4 +-
.../npm/docs/output/using-npm/developers.html | 4 +-
deps/npm/docs/output/using-npm/logging.html | 4 +-
deps/npm/docs/output/using-npm/orgs.html | 4 +-
.../docs/output/using-npm/package-spec.html | 4 +-
deps/npm/docs/output/using-npm/registry.html | 4 +-
deps/npm/docs/output/using-npm/removal.html | 4 +-
deps/npm/docs/output/using-npm/scope.html | 4 +-
deps/npm/docs/output/using-npm/scripts.html | 4 +-
.../npm/docs/output/using-npm/workspaces.html | 4 +-
deps/npm/lib/cli.js | 8 +
deps/npm/man/man1/npm-access.1 | 2 +-
deps/npm/man/man1/npm-adduser.1 | 2 +-
deps/npm/man/man1/npm-audit.1 | 2 +-
deps/npm/man/man1/npm-bugs.1 | 2 +-
deps/npm/man/man1/npm-cache.1 | 2 +-
deps/npm/man/man1/npm-ci.1 | 2 +-
deps/npm/man/man1/npm-completion.1 | 2 +-
deps/npm/man/man1/npm-config.1 | 2 +-
deps/npm/man/man1/npm-dedupe.1 | 2 +-
deps/npm/man/man1/npm-deprecate.1 | 2 +-
deps/npm/man/man1/npm-diff.1 | 2 +-
deps/npm/man/man1/npm-dist-tag.1 | 2 +-
deps/npm/man/man1/npm-docs.1 | 2 +-
deps/npm/man/man1/npm-doctor.1 | 2 +-
deps/npm/man/man1/npm-edit.1 | 2 +-
deps/npm/man/man1/npm-exec.1 | 2 +-
deps/npm/man/man1/npm-explain.1 | 2 +-
deps/npm/man/man1/npm-explore.1 | 2 +-
deps/npm/man/man1/npm-find-dupes.1 | 2 +-
deps/npm/man/man1/npm-fund.1 | 2 +-
deps/npm/man/man1/npm-help-search.1 | 2 +-
deps/npm/man/man1/npm-help.1 | 2 +-
deps/npm/man/man1/npm-hook.1 | 2 +-
deps/npm/man/man1/npm-init.1 | 2 +-
deps/npm/man/man1/npm-install-ci-test.1 | 2 +-
deps/npm/man/man1/npm-install-test.1 | 2 +-
deps/npm/man/man1/npm-install.1 | 2 +-
deps/npm/man/man1/npm-link.1 | 2 +-
deps/npm/man/man1/npm-login.1 | 2 +-
deps/npm/man/man1/npm-logout.1 | 2 +-
deps/npm/man/man1/npm-ls.1 | 4 +-
deps/npm/man/man1/npm-org.1 | 2 +-
deps/npm/man/man1/npm-outdated.1 | 2 +-
deps/npm/man/man1/npm-owner.1 | 2 +-
deps/npm/man/man1/npm-pack.1 | 2 +-
deps/npm/man/man1/npm-ping.1 | 2 +-
deps/npm/man/man1/npm-pkg.1 | 2 +-
deps/npm/man/man1/npm-prefix.1 | 2 +-
deps/npm/man/man1/npm-profile.1 | 2 +-
deps/npm/man/man1/npm-prune.1 | 2 +-
deps/npm/man/man1/npm-publish.1 | 2 +-
deps/npm/man/man1/npm-query.1 | 2 +-
deps/npm/man/man1/npm-rebuild.1 | 2 +-
deps/npm/man/man1/npm-repo.1 | 2 +-
deps/npm/man/man1/npm-restart.1 | 2 +-
deps/npm/man/man1/npm-root.1 | 2 +-
deps/npm/man/man1/npm-run-script.1 | 2 +-
deps/npm/man/man1/npm-sbom.1 | 2 +-
deps/npm/man/man1/npm-search.1 | 2 +-
deps/npm/man/man1/npm-shrinkwrap.1 | 2 +-
deps/npm/man/man1/npm-star.1 | 2 +-
deps/npm/man/man1/npm-stars.1 | 2 +-
deps/npm/man/man1/npm-start.1 | 2 +-
deps/npm/man/man1/npm-stop.1 | 2 +-
deps/npm/man/man1/npm-team.1 | 2 +-
deps/npm/man/man1/npm-test.1 | 2 +-
deps/npm/man/man1/npm-token.1 | 2 +-
deps/npm/man/man1/npm-uninstall.1 | 2 +-
deps/npm/man/man1/npm-unpublish.1 | 2 +-
deps/npm/man/man1/npm-unstar.1 | 2 +-
deps/npm/man/man1/npm-update.1 | 2 +-
deps/npm/man/man1/npm-version.1 | 2 +-
deps/npm/man/man1/npm-view.1 | 2 +-
deps/npm/man/man1/npm-whoami.1 | 2 +-
deps/npm/man/man1/npm.1 | 4 +-
deps/npm/man/man1/npx.1 | 2 +-
deps/npm/man/man5/folders.5 | 2 +-
deps/npm/man/man5/install.5 | 2 +-
deps/npm/man/man5/npm-global.5 | 2 +-
deps/npm/man/man5/npm-json.5 | 2 +-
deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +-
deps/npm/man/man5/npmrc.5 | 2 +-
deps/npm/man/man5/package-json.5 | 2 +-
deps/npm/man/man5/package-lock-json.5 | 2 +-
deps/npm/man/man7/config.7 | 2 +-
deps/npm/man/man7/dependency-selectors.7 | 2 +-
deps/npm/man/man7/developers.7 | 2 +-
deps/npm/man/man7/logging.7 | 2 +-
deps/npm/man/man7/orgs.7 | 2 +-
deps/npm/man/man7/package-spec.7 | 2 +-
deps/npm/man/man7/registry.7 | 2 +-
deps/npm/man/man7/removal.7 | 2 +-
deps/npm/man/man7/scope.7 | 2 +-
deps/npm/man/man7/scripts.7 | 2 +-
deps/npm/man/man7/workspaces.7 | 2 +-
.../cliui/node_modules/ansi-regex/index.js | 6 +-
.../node_modules/ansi-regex/package.json | 9 +-
.../node_modules/pacote}/LICENSE | 2 +-
.../node_modules/pacote/README.md | 283 +++++++++
.../node_modules/pacote/bin/index.js | 158 +++++
.../node_modules/pacote/lib/dir.js | 105 ++++
.../node_modules/pacote/lib/fetcher.js | 497 +++++++++++++++
.../node_modules/pacote/lib/file.js | 94 +++
.../node_modules/pacote/lib/git.js | 317 ++++++++++
.../node_modules/pacote/lib/index.js | 23 +
.../node_modules/pacote/lib/registry.js | 369 +++++++++++
.../node_modules/pacote/lib/remote.js | 89 +++
.../pacote/lib/util/add-git-sha.js | 15 +
.../node_modules/pacote/lib/util/cache-dir.js | 15 +
.../pacote/lib/util/is-package-bin.js | 25 +
.../node_modules/pacote/lib/util/npm.js | 14 +
.../node_modules/pacote/lib/util/protected.js | 5 +
.../pacote/lib/util/tar-create-options.js | 31 +
.../pacote/lib/util/trailing-slashes.js | 10 +
.../node_modules/pacote/package.json | 79 +++
.../@npmcli/metavuln-calculator/package.json | 4 +-
.../@npmcli/promise-spawn/lib/index.js | 16 +-
.../@npmcli/promise-spawn/package.json | 6 +-
.../node_modules/@npmcli/agent/lib/agents.js | 206 -------
.../node_modules/@npmcli/agent/lib/dns.js | 53 --
.../node_modules/@npmcli/agent/lib/errors.js | 61 --
.../node_modules/@npmcli/agent/lib/index.js | 56 --
.../node_modules/@npmcli/agent/lib/options.js | 86 ---
.../node_modules/@npmcli/agent/lib/proxy.js | 88 ---
.../node_modules/@npmcli/agent/package.json | 60 --
.../sign/node_modules/@npmcli/fs/LICENSE.md | 20 -
.../@npmcli/fs/lib/common/get-options.js | 20 -
.../@npmcli/fs/lib/common/node.js | 9 -
.../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 -
.../node_modules/@npmcli/fs/lib/cp/errors.js | 129 ----
.../node_modules/@npmcli/fs/lib/cp/index.js | 22 -
.../@npmcli/fs/lib/cp/polyfill.js | 428 -------------
.../sign/node_modules/@npmcli/fs/lib/index.js | 13 -
.../node_modules/@npmcli/fs/lib/move-file.js | 78 ---
.../@npmcli/fs/lib/readdir-scoped.js | 20 -
.../@npmcli/fs/lib/with-temp-dir.js | 39 --
.../sign/node_modules/@npmcli/fs/package.json | 52 --
.../sign/node_modules/cacache/LICENSE.md | 16 -
.../node_modules/cacache/lib/content/path.js | 29 -
.../node_modules/cacache/lib/content/read.js | 165 -----
.../node_modules/cacache/lib/content/rm.js | 18 -
.../node_modules/cacache/lib/content/write.js | 206 -------
.../node_modules/cacache/lib/entry-index.js | 336 ----------
.../sign/node_modules/cacache/lib/get.js | 170 -----
.../sign/node_modules/cacache/lib/index.js | 42 --
.../node_modules/cacache/lib/memoization.js | 72 ---
.../sign/node_modules/cacache/lib/put.js | 80 ---
.../sign/node_modules/cacache/lib/rm.js | 31 -
.../node_modules/cacache/lib/util/glob.js | 7 -
.../cacache/lib/util/hash-to-segments.js | 7 -
.../sign/node_modules/cacache/lib/util/tmp.js | 26 -
.../sign/node_modules/cacache/lib/verify.js | 257 --------
.../sign/node_modules/cacache/package.json | 82 ---
.../node_modules/make-fetch-happen/LICENSE | 16 -
.../make-fetch-happen/lib/cache/entry.js | 471 --------------
.../make-fetch-happen/lib/cache/errors.js | 11 -
.../make-fetch-happen/lib/cache/index.js | 49 --
.../make-fetch-happen/lib/cache/key.js | 17 -
.../make-fetch-happen/lib/cache/policy.js | 161 -----
.../make-fetch-happen/lib/fetch.js | 118 ----
.../make-fetch-happen/lib/index.js | 41 --
.../make-fetch-happen/lib/options.js | 54 --
.../make-fetch-happen/lib/pipeline.js | 41 --
.../make-fetch-happen/lib/remote.js | 131 ----
.../make-fetch-happen/package.json | 75 ---
.../sign/node_modules/minipass-fetch/LICENSE | 28 -
.../minipass-fetch/lib/abort-error.js | 17 -
.../node_modules/minipass-fetch/lib/blob.js | 97 ---
.../node_modules/minipass-fetch/lib/body.js | 350 -----------
.../minipass-fetch/lib/fetch-error.js | 32 -
.../minipass-fetch/lib/headers.js | 267 --------
.../node_modules/minipass-fetch/lib/index.js | 377 ------------
.../minipass-fetch/lib/request.js | 282 ---------
.../minipass-fetch/lib/response.js | 90 ---
.../node_modules/minipass-fetch/package.json | 69 ---
.../sign/node_modules/proc-log/LICENSE | 15 -
.../sign/node_modules/proc-log/lib/index.js | 153 -----
.../sign/node_modules/proc-log/package.json | 45 --
.../sign/node_modules/ssri/LICENSE.md | 16 -
.../sign/node_modules/ssri/lib/index.js | 580 ------------------
.../sign/node_modules/ssri/package.json | 65 --
.../sign/node_modules/unique-filename/LICENSE | 5 -
.../node_modules/unique-filename/lib/index.js | 7 -
.../node_modules/unique-filename/package.json | 51 --
.../sign/node_modules/unique-slug/LICENSE | 15 -
.../node_modules/unique-slug/lib/index.js | 11 -
.../node_modules/unique-slug/package.json | 47 --
.../@sigstore/tuf/dist/appdata.js | 3 +-
.../node_modules/@sigstore/tuf/dist/client.js | 1 -
.../node_modules/@sigstore/tuf/dist/index.js | 6 +-
.../node_modules/@sigstore/tuf/dist/target.js | 3 +-
.../node_modules/@sigstore/tuf/package.json | 8 +-
.../npm/node_modules/@sigstore/tuf/seeds.json | 2 +-
deps/npm/node_modules/ci-info/index.js | 80 ++-
deps/npm/node_modules/ci-info/package.json | 28 +-
deps/npm/node_modules/ci-info/vendors.json | 16 +-
.../node_modules/cross-spawn/lib/enoent.js | 2 +-
.../cross-spawn/lib/util/escape.js | 6 +-
.../npm/node_modules/cross-spawn/package.json | 4 +-
.../debug/node_modules/ms/index.js | 162 -----
.../debug/node_modules/ms/license.md | 21 -
.../debug/node_modules/ms/package.json | 37 --
deps/npm/node_modules/debug/package.json | 4 +-
.../node_modules/hosted-git-info/lib/hosts.js | 6 +-
.../node_modules/hosted-git-info/package.json | 6 +-
.../node_modules/libnpmpublish/lib/publish.js | 2 +-
.../node_modules/libnpmpublish/package.json | 4 +-
.../make-fetch-happen/lib/options.js | 7 +-
.../make-fetch-happen/lib/remote.js | 3 +-
.../node_modules/negotiator/HISTORY.md | 114 ++++
.../node_modules/negotiator/LICENSE | 24 +
.../node_modules/negotiator/index.js | 83 +++
.../node_modules/negotiator/lib/charset.js | 169 +++++
.../node_modules/negotiator/lib/encoding.js | 205 +++++++
.../node_modules/negotiator/lib/language.js | 179 ++++++
.../node_modules/negotiator/lib/mediaType.js | 294 +++++++++
.../node_modules/negotiator/package.json | 43 ++
.../make-fetch-happen/package.json | 8 +-
deps/npm/node_modules/negotiator/HISTORY.md | 5 +
deps/npm/node_modules/negotiator/index.js | 8 +-
.../node_modules/negotiator/lib/encoding.js | 31 +-
.../node_modules/negotiator/lib/mediaType.js | 6 +-
deps/npm/node_modules/negotiator/package.json | 2 +-
.../npm-install-checks/lib/current-env.js | 36 +-
.../npm-install-checks/package.json | 6 +-
.../npm-registry-fetch/lib/check-response.js | 16 +-
.../npm-registry-fetch/package.json | 8 +-
.../dist/commonjs/index.js | 12 +-
.../package-json-from-dist/dist/esm/index.js | 12 +-
.../package-json-from-dist/package.json | 4 +-
deps/npm/node_modules/pacote/lib/dir.js | 2 +
deps/npm/node_modules/pacote/lib/fetcher.js | 10 +-
deps/npm/node_modules/pacote/package.json | 4 +-
.../promise-call-limit/dist/commonjs/index.js | 4 +-
.../promise-call-limit/dist/esm/index.js | 4 +-
.../promise-call-limit/package.json | 18 +-
deps/npm/node_modules/sigstore/dist/config.js | 14 +-
.../node_modules/sigstore/dist/sigstore.js | 9 +-
.../node_modules}/@sigstore/bundle/LICENSE | 0
.../@sigstore/bundle/dist/build.js | 31 +-
.../@sigstore/bundle/dist/bundle.js | 10 +-
.../@sigstore/bundle/dist/error.js | 0
.../@sigstore/bundle/dist/index.js | 0
.../@sigstore/bundle/dist/serialized.js | 0
.../@sigstore/bundle/dist/utility.js | 0
.../@sigstore/bundle/dist/validate.js | 12 +-
.../@sigstore/bundle/package.json | 4 +-
.../node_modules}/@sigstore/core/LICENSE | 0
.../@sigstore/core/dist/asn1/error.js | 0
.../@sigstore/core/dist/asn1/index.js | 0
.../@sigstore/core/dist/asn1/length.js | 5 +-
.../@sigstore/core/dist/asn1/obj.js | 0
.../@sigstore/core/dist/asn1/parse.js | 13 +-
.../@sigstore/core/dist/asn1/tag.js | 0
.../@sigstore/core/dist/crypto.js | 19 +-
.../node_modules}/@sigstore/core/dist/dsse.js | 3 +-
.../@sigstore/core/dist/encoding.js | 5 +-
.../@sigstore/core/dist/index.js | 0
.../node_modules}/@sigstore/core/dist/json.js | 3 +-
.../node_modules}/@sigstore/core/dist/oid.js | 0
.../node_modules}/@sigstore/core/dist/pem.js | 5 +-
.../@sigstore/core/dist/rfc3161/error.js | 0
.../@sigstore/core/dist/rfc3161/index.js | 0
.../@sigstore/core/dist/rfc3161/timestamp.js | 0
.../@sigstore/core/dist/rfc3161/tstinfo.js | 0
.../@sigstore/core/dist/stream.js | 0
.../@sigstore/core/dist/x509/cert.js | 10 +-
.../@sigstore/core/dist/x509/ext.js | 0
.../@sigstore/core/dist/x509/index.js | 0
.../@sigstore/core/dist/x509/sct.js | 0
.../node_modules}/@sigstore/core/package.json | 4 +-
.../node_modules}/@sigstore/sign/LICENSE | 0
.../@sigstore/sign/dist/bundler/base.js | 0
.../@sigstore/sign/dist/bundler/bundle.js | 12 +-
.../@sigstore/sign/dist/bundler/dsse.js | 4 +-
.../@sigstore/sign/dist/bundler/index.js | 0
.../@sigstore/sign/dist/bundler/message.js | 0
.../@sigstore/sign/dist/error.js | 4 +-
.../@sigstore/sign/dist/external/error.js | 0
.../@sigstore/sign/dist/external/fetch.js | 7 +-
.../@sigstore/sign/dist/external/fulcio.js | 0
.../@sigstore/sign/dist/external/rekor.js | 0
.../@sigstore/sign/dist/external/tsa.js | 0
.../@sigstore/sign/dist/identity/ci.js | 0
.../@sigstore/sign/dist/identity/index.js | 0
.../@sigstore/sign/dist/identity/provider.js | 0
.../@sigstore/sign/dist/index.js | 0
.../@sigstore/sign/dist/signer/fulcio/ca.js | 1 -
.../sign/dist/signer/fulcio/ephemeral.js | 0
.../sign/dist/signer/fulcio/index.js | 0
.../@sigstore/sign/dist/signer/index.js | 0
.../@sigstore/sign/dist/signer/signer.js | 0
.../@sigstore/sign/dist/types/fetch.js | 0
.../@sigstore/sign/dist/util/index.js | 0
.../@sigstore/sign/dist/util/oidc.js | 3 +-
.../@sigstore/sign/dist/util/ua.js | 1 -
.../@sigstore/sign/dist/witness/index.js | 0
.../sign/dist/witness/tlog/client.js | 0
.../@sigstore/sign/dist/witness/tlog/entry.js | 28 +-
.../@sigstore/sign/dist/witness/tlog/index.js | 0
.../@sigstore/sign/dist/witness/tsa/client.js | 7 +-
.../@sigstore/sign/dist/witness/tsa/index.js | 0
.../@sigstore/sign/dist/witness/witness.js | 0
.../node_modules}/@sigstore/sign/package.json | 16 +-
.../@sigstore/verify/dist/bundle/dsse.js | 2 +-
.../@sigstore/verify/dist/bundle/index.js | 5 +-
.../@sigstore/verify/dist/bundle/message.js | 0
.../@sigstore/verify/dist/error.js | 0
.../@sigstore/verify/dist/index.js | 0
.../@sigstore/verify/dist/key/certificate.js | 4 +-
.../@sigstore/verify/dist/key/index.js | 6 +-
.../@sigstore/verify/dist/key/sct.js | 5 +-
.../@sigstore/verify/dist/policy.js | 5 +-
.../@sigstore/verify/dist/shared.types.js | 0
.../verify/dist/timestamp/checkpoint.js | 3 +-
.../@sigstore/verify/dist/timestamp/index.js | 5 +-
.../@sigstore/verify/dist/timestamp/merkle.js | 7 +-
.../@sigstore/verify/dist/timestamp/set.js | 3 +-
.../@sigstore/verify/dist/timestamp/tsa.js | 3 +-
.../@sigstore/verify/dist/tlog/dsse.js | 3 +-
.../verify/dist/tlog/hashedrekord.js | 3 +-
.../@sigstore/verify/dist/tlog/index.js | 3 +-
.../@sigstore/verify/dist/tlog/intoto.js | 3 +-
.../@sigstore/verify/dist/trust/filter.js | 5 +-
.../@sigstore/verify/dist/trust/index.js | 6 +-
.../verify/dist/trust/trust.types.js | 0
.../@sigstore/verify/dist/verifier.js | 0
.../@sigstore/verify/package.json | 8 +-
deps/npm/node_modules/sigstore/package.json | 20 +-
.../spdx-license-ids/deprecated.json | 1 +
.../node_modules/spdx-license-ids/index.json | 8 +-
.../spdx-license-ids/package.json | 2 +-
deps/npm/node_modules/tuf-js/dist/config.js | 2 +-
deps/npm/node_modules/tuf-js/dist/updater.js | 19 +-
.../npm/node_modules/tuf-js/dist/utils/url.js | 3 +-
.../node_modules/@npmcli/agent/lib/agents.js | 206 -------
.../node_modules/@npmcli/agent/lib/dns.js | 53 --
.../node_modules/@npmcli/agent/lib/errors.js | 61 --
.../node_modules/@npmcli/agent/lib/index.js | 56 --
.../node_modules/@npmcli/agent/lib/options.js | 86 ---
.../node_modules/@npmcli/agent/lib/proxy.js | 88 ---
.../node_modules/@npmcli/agent/package.json | 60 --
.../tuf-js/node_modules/@npmcli/fs/LICENSE.md | 20 -
.../@npmcli/fs/lib/common/get-options.js | 20 -
.../@npmcli/fs/lib/common/node.js | 9 -
.../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 -
.../node_modules/@npmcli/fs/lib/cp/errors.js | 129 ----
.../node_modules/@npmcli/fs/lib/cp/index.js | 22 -
.../@npmcli/fs/lib/cp/polyfill.js | 428 -------------
.../node_modules/@npmcli/fs/lib/index.js | 13 -
.../node_modules/@npmcli/fs/lib/move-file.js | 78 ---
.../@npmcli/fs/lib/readdir-scoped.js | 20 -
.../@npmcli/fs/lib/with-temp-dir.js | 39 --
.../node_modules/@npmcli/fs/package.json | 52 --
.../node_modules}/@tufjs/models/LICENSE | 0
.../node_modules}/@tufjs/models/dist/base.js | 23 +-
.../@tufjs/models/dist/delegations.js | 0
.../node_modules}/@tufjs/models/dist/error.js | 0
.../node_modules}/@tufjs/models/dist/file.js | 0
.../node_modules}/@tufjs/models/dist/index.js | 0
.../node_modules}/@tufjs/models/dist/key.js | 0
.../@tufjs/models/dist/metadata.js | 22 +-
.../node_modules}/@tufjs/models/dist/role.js | 0
.../node_modules}/@tufjs/models/dist/root.js | 0
.../@tufjs/models/dist/signature.js | 0
.../@tufjs/models/dist/snapshot.js | 0
.../@tufjs/models/dist/targets.js | 0
.../@tufjs/models/dist/timestamp.js | 0
.../@tufjs/models/dist/utils/guard.js | 13 +-
.../@tufjs/models/dist/utils/index.js | 0
.../@tufjs/models/dist/utils/key.js | 3 +-
.../@tufjs/models/dist/utils/oid.js | 3 +-
.../@tufjs/models/dist/utils/types.js | 0
.../@tufjs/models/dist/utils/verify.js | 0
.../node_modules}/@tufjs/models/package.json | 6 +-
.../tuf-js/node_modules/cacache/LICENSE.md | 16 -
.../node_modules/cacache/lib/content/path.js | 29 -
.../node_modules/cacache/lib/content/read.js | 165 -----
.../node_modules/cacache/lib/content/rm.js | 18 -
.../node_modules/cacache/lib/content/write.js | 206 -------
.../node_modules/cacache/lib/entry-index.js | 336 ----------
.../tuf-js/node_modules/cacache/lib/get.js | 170 -----
.../tuf-js/node_modules/cacache/lib/index.js | 42 --
.../node_modules/cacache/lib/memoization.js | 72 ---
.../tuf-js/node_modules/cacache/lib/put.js | 80 ---
.../tuf-js/node_modules/cacache/lib/rm.js | 31 -
.../node_modules/cacache/lib/util/glob.js | 7 -
.../cacache/lib/util/hash-to-segments.js | 7 -
.../node_modules/cacache/lib/util/tmp.js | 26 -
.../tuf-js/node_modules/cacache/lib/verify.js | 257 --------
.../tuf-js/node_modules/cacache/package.json | 82 ---
.../node_modules/make-fetch-happen/LICENSE | 16 -
.../make-fetch-happen/lib/cache/entry.js | 471 --------------
.../make-fetch-happen/lib/cache/errors.js | 11 -
.../make-fetch-happen/lib/cache/index.js | 49 --
.../make-fetch-happen/lib/cache/key.js | 17 -
.../make-fetch-happen/lib/cache/policy.js | 161 -----
.../make-fetch-happen/lib/fetch.js | 118 ----
.../make-fetch-happen/lib/index.js | 41 --
.../make-fetch-happen/lib/options.js | 54 --
.../make-fetch-happen/lib/pipeline.js | 41 --
.../make-fetch-happen/lib/remote.js | 131 ----
.../make-fetch-happen/package.json | 75 ---
.../node_modules/minipass-fetch/LICENSE | 28 -
.../minipass-fetch/lib/abort-error.js | 17 -
.../node_modules/minipass-fetch/lib/blob.js | 97 ---
.../node_modules/minipass-fetch/lib/body.js | 350 -----------
.../minipass-fetch/lib/fetch-error.js | 32 -
.../minipass-fetch/lib/headers.js | 267 --------
.../node_modules/minipass-fetch/lib/index.js | 377 ------------
.../minipass-fetch/lib/request.js | 282 ---------
.../minipass-fetch/lib/response.js | 90 ---
.../node_modules/minipass-fetch/package.json | 69 ---
.../tuf-js/node_modules/proc-log/lib/index.js | 153 -----
.../tuf-js/node_modules/proc-log/package.json | 45 --
.../tuf-js/node_modules/ssri/LICENSE.md | 16 -
.../tuf-js/node_modules/ssri/lib/index.js | 580 ------------------
.../tuf-js/node_modules/ssri/package.json | 65 --
.../node_modules/unique-filename/LICENSE | 5 -
.../node_modules/unique-filename/lib/index.js | 7 -
.../node_modules/unique-filename/package.json | 51 --
.../tuf-js/node_modules/unique-slug/LICENSE | 15 -
.../node_modules/unique-slug/lib/index.js | 11 -
.../node_modules/unique-slug/package.json | 47 --
deps/npm/node_modules/tuf-js/package.json | 14 +-
.../node_modules/ansi-regex/index.js | 6 +-
.../node_modules/ansi-regex/package.json | 9 +-
deps/npm/package.json | 20 +-
506 files changed, 4044 insertions(+), 14450 deletions(-)
rename deps/npm/node_modules/{tuf-js/node_modules/proc-log => @npmcli/metavuln-calculator/node_modules/pacote}/LICENSE (90%)
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/README.md
create mode 100755 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
create mode 100644 deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/fetch-error.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js
delete mode 100644 deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json
delete mode 100644 deps/npm/node_modules/debug/node_modules/ms/index.js
delete mode 100644 deps/npm/node_modules/debug/node_modules/ms/license.md
delete mode 100644 deps/npm/node_modules/debug/node_modules/ms/package.json
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
create mode 100644 deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/LICENSE (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/build.js (87%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/bundle.js (79%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/error.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/serialized.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/utility.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/dist/validate.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/bundle/package.json (92%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/LICENSE (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/error.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/length.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/obj.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/parse.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/asn1/tag.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/crypto.js (83%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/dsse.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/encoding.js (94%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/json.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/oid.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/pem.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/rfc3161/error.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/rfc3161/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/rfc3161/timestamp.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/rfc3161/tstinfo.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/stream.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/x509/cert.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/x509/ext.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/x509/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/dist/x509/sct.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/core/package.json (92%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/LICENSE (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/bundler/base.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/bundler/bundle.js (93%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/bundler/dsse.js (93%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/bundler/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/bundler/message.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/error.js (95%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/external/error.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/external/fetch.js (95%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/external/fulcio.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/external/rekor.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/external/tsa.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/identity/ci.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/identity/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/identity/provider.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/signer/fulcio/ca.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/signer/fulcio/ephemeral.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/signer/fulcio/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/signer/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/signer/signer.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/types/fetch.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/util/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/util/oidc.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/util/ua.js (95%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/tlog/client.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/tlog/entry.js (87%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/tlog/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/tsa/client.js (86%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/tsa/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/dist/witness/witness.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/sign/package.json (78%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/bundle/dsse.js (93%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/bundle/index.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/bundle/message.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/error.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/index.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/key/certificate.js (99%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/key/index.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/key/sct.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/policy.js (93%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/shared.types.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/timestamp/checkpoint.js (99%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/timestamp/index.js (96%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/timestamp/merkle.js (95%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/timestamp/set.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/timestamp/tsa.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/tlog/dsse.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/tlog/hashedrekord.js (97%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/tlog/index.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/tlog/intoto.js (98%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/trust/filter.js (93%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/trust/index.js (95%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/trust/trust.types.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/dist/verifier.js (100%)
rename deps/npm/node_modules/{ => sigstore/node_modules}/@sigstore/verify/package.json (86%)
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/LICENSE (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/base.js (80%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/delegations.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/error.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/file.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/index.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/key.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/metadata.js (91%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/role.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/root.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/signature.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/snapshot.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/targets.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/timestamp.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/guard.js (88%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/index.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/key.js (99%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/oid.js (96%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/types.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/verify.js (100%)
rename deps/npm/node_modules/{ => tuf-js/node_modules}/@tufjs/models/package.json (90%)
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/cacache/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/fetch-error.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/ssri/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js
delete mode 100644 deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json
diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md
index 3abedf9cd8e4e6..2dbb408ac77e45 100644
--- a/deps/npm/docs/content/commands/npm-ls.md
+++ b/deps/npm/docs/content/commands/npm-ls.md
@@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For
example, running `npm ls promzard` in npm's source tree will show:
```bash
-npm@10.9.0 /path/to/npm
+npm@10.9.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
```
diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md
index d92892ec544eb9..d5a92b439c6020 100644
--- a/deps/npm/docs/content/commands/npm.md
+++ b/deps/npm/docs/content/commands/npm.md
@@ -14,7 +14,7 @@ Note: This command is unaware of workspaces.
### Version
-10.9.0
+10.9.1
### Description
diff --git a/deps/npm/docs/output/commands/npm-access.html b/deps/npm/docs/output/commands/npm-access.html
index 0a839663fcb016..c965b4703b57c8 100644
--- a/deps/npm/docs/output/commands/npm-access.html
+++ b/deps/npm/docs/output/commands/npm-access.html
@@ -141,9 +141,9 @@
-
+
npm-access
- @10.9.0
+ @10.9.1
Set access level on published packages
diff --git a/deps/npm/docs/output/commands/npm-adduser.html b/deps/npm/docs/output/commands/npm-adduser.html
index 2ac615ad13978d..425b0d5d23a3f5 100644
--- a/deps/npm/docs/output/commands/npm-adduser.html
+++ b/deps/npm/docs/output/commands/npm-adduser.html
@@ -141,9 +141,9 @@
-
+
npm-adduser
- @10.9.0
+ @10.9.1
Add a registry user account
diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html
index d7374ccfa66089..be436a29cbc817 100644
--- a/deps/npm/docs/output/commands/npm-audit.html
+++ b/deps/npm/docs/output/commands/npm-audit.html
@@ -141,9 +141,9 @@
-
+
npm-audit
- @10.9.0
+ @10.9.1
Run a security audit
diff --git a/deps/npm/docs/output/commands/npm-bugs.html b/deps/npm/docs/output/commands/npm-bugs.html
index 9186bc100ae8f6..48d109440deff5 100644
--- a/deps/npm/docs/output/commands/npm-bugs.html
+++ b/deps/npm/docs/output/commands/npm-bugs.html
@@ -141,9 +141,9 @@
-
+
npm-bugs
- @10.9.0
+ @10.9.1
Report bugs for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-cache.html b/deps/npm/docs/output/commands/npm-cache.html
index 55cec217dbdfb7..a82e9a8e8a0d63 100644
--- a/deps/npm/docs/output/commands/npm-cache.html
+++ b/deps/npm/docs/output/commands/npm-cache.html
@@ -141,9 +141,9 @@
-
+
npm-cache
- @10.9.0
+ @10.9.1
Manipulates packages cache
diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html
index efe858777565a9..64d5d130273724 100644
--- a/deps/npm/docs/output/commands/npm-ci.html
+++ b/deps/npm/docs/output/commands/npm-ci.html
@@ -141,9 +141,9 @@
-
+
npm-ci
- @10.9.0
+ @10.9.1
Clean install a project
diff --git a/deps/npm/docs/output/commands/npm-completion.html b/deps/npm/docs/output/commands/npm-completion.html
index 4a91a94498f33b..776720e250b2be 100644
--- a/deps/npm/docs/output/commands/npm-completion.html
+++ b/deps/npm/docs/output/commands/npm-completion.html
@@ -141,9 +141,9 @@
-
+
npm-completion
- @10.9.0
+ @10.9.1
Tab Completion for npm
diff --git a/deps/npm/docs/output/commands/npm-config.html b/deps/npm/docs/output/commands/npm-config.html
index d18998fea8471d..074a2eb2c8606b 100644
--- a/deps/npm/docs/output/commands/npm-config.html
+++ b/deps/npm/docs/output/commands/npm-config.html
@@ -141,9 +141,9 @@
-
+
npm-config
- @10.9.0
+ @10.9.1
Manage the npm configuration files
diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html
index 194ea085383df3..a21182385e4e27 100644
--- a/deps/npm/docs/output/commands/npm-dedupe.html
+++ b/deps/npm/docs/output/commands/npm-dedupe.html
@@ -141,9 +141,9 @@
-
+
npm-dedupe
- @10.9.0
+ @10.9.1
Reduce duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-deprecate.html b/deps/npm/docs/output/commands/npm-deprecate.html
index ae40adfbbab051..f690a38413e51c 100644
--- a/deps/npm/docs/output/commands/npm-deprecate.html
+++ b/deps/npm/docs/output/commands/npm-deprecate.html
@@ -141,9 +141,9 @@
-
+
npm-deprecate
- @10.9.0
+ @10.9.1
Deprecate a version of a package
diff --git a/deps/npm/docs/output/commands/npm-diff.html b/deps/npm/docs/output/commands/npm-diff.html
index 257b1c21572083..8f3de9df3769b2 100644
--- a/deps/npm/docs/output/commands/npm-diff.html
+++ b/deps/npm/docs/output/commands/npm-diff.html
@@ -141,9 +141,9 @@
-
+
npm-diff
- @10.9.0
+ @10.9.1
The registry diff command
diff --git a/deps/npm/docs/output/commands/npm-dist-tag.html b/deps/npm/docs/output/commands/npm-dist-tag.html
index 08e7770a0c1745..8c9cc759da6cea 100644
--- a/deps/npm/docs/output/commands/npm-dist-tag.html
+++ b/deps/npm/docs/output/commands/npm-dist-tag.html
@@ -141,9 +141,9 @@
-
+
npm-dist-tag
- @10.9.0
+ @10.9.1
Modify package distribution tags
diff --git a/deps/npm/docs/output/commands/npm-docs.html b/deps/npm/docs/output/commands/npm-docs.html
index 8b647251f36740..f187ddef642bcb 100644
--- a/deps/npm/docs/output/commands/npm-docs.html
+++ b/deps/npm/docs/output/commands/npm-docs.html
@@ -141,9 +141,9 @@
-
+
npm-docs
- @10.9.0
+ @10.9.1
Open documentation for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-doctor.html b/deps/npm/docs/output/commands/npm-doctor.html
index 110ec2f9b20bf0..ca25439e6d7c20 100644
--- a/deps/npm/docs/output/commands/npm-doctor.html
+++ b/deps/npm/docs/output/commands/npm-doctor.html
@@ -141,9 +141,9 @@
-
+
npm-doctor
- @10.9.0
+ @10.9.1
Check the health of your npm environment
diff --git a/deps/npm/docs/output/commands/npm-edit.html b/deps/npm/docs/output/commands/npm-edit.html
index 9e4f7b361a5cb6..350b438b1230e5 100644
--- a/deps/npm/docs/output/commands/npm-edit.html
+++ b/deps/npm/docs/output/commands/npm-edit.html
@@ -141,9 +141,9 @@
-
+
npm-edit
- @10.9.0
+ @10.9.1
Edit an installed package
diff --git a/deps/npm/docs/output/commands/npm-exec.html b/deps/npm/docs/output/commands/npm-exec.html
index 695fa35ab825c2..500ba9cbc77b2f 100644
--- a/deps/npm/docs/output/commands/npm-exec.html
+++ b/deps/npm/docs/output/commands/npm-exec.html
@@ -141,9 +141,9 @@
-
+
npm-exec
- @10.9.0
+ @10.9.1
Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/commands/npm-explain.html b/deps/npm/docs/output/commands/npm-explain.html
index e79255ff5fa6c6..0fa690ab34f280 100644
--- a/deps/npm/docs/output/commands/npm-explain.html
+++ b/deps/npm/docs/output/commands/npm-explain.html
@@ -141,9 +141,9 @@
-
+
npm-explain
- @10.9.0
+ @10.9.1
Explain installed packages
diff --git a/deps/npm/docs/output/commands/npm-explore.html b/deps/npm/docs/output/commands/npm-explore.html
index e296f4146aff98..93e2f9a837c91b 100644
--- a/deps/npm/docs/output/commands/npm-explore.html
+++ b/deps/npm/docs/output/commands/npm-explore.html
@@ -141,9 +141,9 @@
-
+
npm-explore
- @10.9.0
+ @10.9.1
Browse an installed package
diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html
index a8c914a0dd3d44..bca82e42914386 100644
--- a/deps/npm/docs/output/commands/npm-find-dupes.html
+++ b/deps/npm/docs/output/commands/npm-find-dupes.html
@@ -141,9 +141,9 @@
-
+
npm-find-dupes
- @10.9.0
+ @10.9.1
Find duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-fund.html b/deps/npm/docs/output/commands/npm-fund.html
index 36a63253439b60..64b3b9839647b9 100644
--- a/deps/npm/docs/output/commands/npm-fund.html
+++ b/deps/npm/docs/output/commands/npm-fund.html
@@ -141,9 +141,9 @@
-
+
npm-fund
- @10.9.0
+ @10.9.1
Retrieve funding information
diff --git a/deps/npm/docs/output/commands/npm-help-search.html b/deps/npm/docs/output/commands/npm-help-search.html
index 76dea45d852e75..d0ff1a4c555502 100644
--- a/deps/npm/docs/output/commands/npm-help-search.html
+++ b/deps/npm/docs/output/commands/npm-help-search.html
@@ -141,9 +141,9 @@
-
+
npm-help-search
- @10.9.0
+ @10.9.1
Search npm help documentation
diff --git a/deps/npm/docs/output/commands/npm-help.html b/deps/npm/docs/output/commands/npm-help.html
index e6b14af2f9ec6c..1a773e36f31d31 100644
--- a/deps/npm/docs/output/commands/npm-help.html
+++ b/deps/npm/docs/output/commands/npm-help.html
@@ -141,9 +141,9 @@
-
+
npm-help
- @10.9.0
+ @10.9.1
Get help on npm
diff --git a/deps/npm/docs/output/commands/npm-hook.html b/deps/npm/docs/output/commands/npm-hook.html
index 393700a9a7165a..6688593a06533e 100644
--- a/deps/npm/docs/output/commands/npm-hook.html
+++ b/deps/npm/docs/output/commands/npm-hook.html
@@ -141,9 +141,9 @@
-
+
npm-hook
- @10.9.0
+ @10.9.1
Manage registry hooks
diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html
index 8ff01b2f7a76d0..3260f015ec7aa6 100644
--- a/deps/npm/docs/output/commands/npm-init.html
+++ b/deps/npm/docs/output/commands/npm-init.html
@@ -141,9 +141,9 @@
-
+
npm-init
- @10.9.0
+ @10.9.1
Create a package.json file
diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
index b0f9d237ed8e98..a7cdbbf0213afc 100644
--- a/deps/npm/docs/output/commands/npm-install-ci-test.html
+++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
@@ -141,9 +141,9 @@
-
+
npm-install-ci-test
- @10.9.0
+ @10.9.1
Install a project with a clean slate and run tests
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html
index b0fbd63887fff5..972e0c4b9b55f9 100644
--- a/deps/npm/docs/output/commands/npm-install-test.html
+++ b/deps/npm/docs/output/commands/npm-install-test.html
@@ -141,9 +141,9 @@
-
+
npm-install-test
- @10.9.0
+ @10.9.1
Install package(s) and run tests
diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html
index fa57e02eaf9ad5..685f6bff0fcfe3 100644
--- a/deps/npm/docs/output/commands/npm-install.html
+++ b/deps/npm/docs/output/commands/npm-install.html
@@ -141,9 +141,9 @@
-
+
npm-install
- @10.9.0
+ @10.9.1
Install a package
diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html
index 4e461ebefafd42..11b7ad9ec3c23a 100644
--- a/deps/npm/docs/output/commands/npm-link.html
+++ b/deps/npm/docs/output/commands/npm-link.html
@@ -141,9 +141,9 @@
-
+
npm-link
- @10.9.0
+ @10.9.1
Symlink a package folder
diff --git a/deps/npm/docs/output/commands/npm-login.html b/deps/npm/docs/output/commands/npm-login.html
index 9c1584ca36bc41..e9f1e0b08bb1d5 100644
--- a/deps/npm/docs/output/commands/npm-login.html
+++ b/deps/npm/docs/output/commands/npm-login.html
@@ -141,9 +141,9 @@
-
+
npm-login
- @10.9.0
+ @10.9.1
Login to a registry user account
diff --git a/deps/npm/docs/output/commands/npm-logout.html b/deps/npm/docs/output/commands/npm-logout.html
index 8908b329395254..d24c73d9a09f25 100644
--- a/deps/npm/docs/output/commands/npm-logout.html
+++ b/deps/npm/docs/output/commands/npm-logout.html
@@ -141,9 +141,9 @@
-
+
npm-logout
- @10.9.0
+ @10.9.1
Log out of the registry
diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html
index 2615a492a75e38..bdbd451e2794ab 100644
--- a/deps/npm/docs/output/commands/npm-ls.html
+++ b/deps/npm/docs/output/commands/npm-ls.html
@@ -141,9 +141,9 @@
-
+
npm-ls
- @10.9.0
+ @10.9.1
List installed packages
@@ -168,7 +168,7 @@ Description
the results to only the paths to the packages named. Note that nested
packages will also show the paths to the specified packages. For
example, running npm ls promzard
in npm's source tree will show:
-npm@10.9.0 /path/to/npm
+npm@10.9.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
diff --git a/deps/npm/docs/output/commands/npm-org.html b/deps/npm/docs/output/commands/npm-org.html
index 66d823f7f3c2de..4cbb0828d998ba 100644
--- a/deps/npm/docs/output/commands/npm-org.html
+++ b/deps/npm/docs/output/commands/npm-org.html
@@ -141,9 +141,9 @@
-
+
npm-org
- @10.9.0
+ @10.9.1
Manage orgs
diff --git a/deps/npm/docs/output/commands/npm-outdated.html b/deps/npm/docs/output/commands/npm-outdated.html
index 6002ff1818da2c..0611cc7d46e5e3 100644
--- a/deps/npm/docs/output/commands/npm-outdated.html
+++ b/deps/npm/docs/output/commands/npm-outdated.html
@@ -141,9 +141,9 @@
-
+
npm-outdated
- @10.9.0
+ @10.9.1
Check for outdated packages
diff --git a/deps/npm/docs/output/commands/npm-owner.html b/deps/npm/docs/output/commands/npm-owner.html
index e8b7a05274aa91..9c674c67479b27 100644
--- a/deps/npm/docs/output/commands/npm-owner.html
+++ b/deps/npm/docs/output/commands/npm-owner.html
@@ -141,9 +141,9 @@
-
+
npm-owner
- @10.9.0
+ @10.9.1
Manage package owners
diff --git a/deps/npm/docs/output/commands/npm-pack.html b/deps/npm/docs/output/commands/npm-pack.html
index 596ba9a35e3ac5..5e9f1fc13fa0e1 100644
--- a/deps/npm/docs/output/commands/npm-pack.html
+++ b/deps/npm/docs/output/commands/npm-pack.html
@@ -141,9 +141,9 @@
-
+
npm-pack
- @10.9.0
+ @10.9.1
Create a tarball from a package
diff --git a/deps/npm/docs/output/commands/npm-ping.html b/deps/npm/docs/output/commands/npm-ping.html
index de393dbd0b5d71..198d820b58c3ba 100644
--- a/deps/npm/docs/output/commands/npm-ping.html
+++ b/deps/npm/docs/output/commands/npm-ping.html
@@ -141,9 +141,9 @@
-
+
npm-ping
- @10.9.0
+ @10.9.1
Ping npm registry
diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html
index ffb153cc440ce4..0a4fcd384f55fb 100644
--- a/deps/npm/docs/output/commands/npm-pkg.html
+++ b/deps/npm/docs/output/commands/npm-pkg.html
@@ -141,9 +141,9 @@
-
+
npm-pkg
- @10.9.0
+ @10.9.1
Manages your package.json
diff --git a/deps/npm/docs/output/commands/npm-prefix.html b/deps/npm/docs/output/commands/npm-prefix.html
index da63644f2df42a..62651d94535ee6 100644
--- a/deps/npm/docs/output/commands/npm-prefix.html
+++ b/deps/npm/docs/output/commands/npm-prefix.html
@@ -141,9 +141,9 @@
-
+
npm-prefix
- @10.9.0
+ @10.9.1
Display prefix
diff --git a/deps/npm/docs/output/commands/npm-profile.html b/deps/npm/docs/output/commands/npm-profile.html
index c3679196a85bc4..ff616498acb136 100644
--- a/deps/npm/docs/output/commands/npm-profile.html
+++ b/deps/npm/docs/output/commands/npm-profile.html
@@ -141,9 +141,9 @@
-
+
npm-profile
- @10.9.0
+ @10.9.1
Change settings on your registry profile
diff --git a/deps/npm/docs/output/commands/npm-prune.html b/deps/npm/docs/output/commands/npm-prune.html
index 4e844ab9f02cbb..0bf046badbd199 100644
--- a/deps/npm/docs/output/commands/npm-prune.html
+++ b/deps/npm/docs/output/commands/npm-prune.html
@@ -141,9 +141,9 @@
-
+
npm-prune
- @10.9.0
+ @10.9.1
Remove extraneous packages
diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html
index b808cc29a15744..b69cb033afa39c 100644
--- a/deps/npm/docs/output/commands/npm-publish.html
+++ b/deps/npm/docs/output/commands/npm-publish.html
@@ -141,9 +141,9 @@
-
+
npm-publish
- @10.9.0
+ @10.9.1
Publish a package
diff --git a/deps/npm/docs/output/commands/npm-query.html b/deps/npm/docs/output/commands/npm-query.html
index d85a2f6c27ff79..04359b6954b82f 100644
--- a/deps/npm/docs/output/commands/npm-query.html
+++ b/deps/npm/docs/output/commands/npm-query.html
@@ -141,9 +141,9 @@
-
+
npm-query
- @10.9.0
+ @10.9.1
Dependency selector query
diff --git a/deps/npm/docs/output/commands/npm-rebuild.html b/deps/npm/docs/output/commands/npm-rebuild.html
index ff28b35b86bced..ce696a013d7792 100644
--- a/deps/npm/docs/output/commands/npm-rebuild.html
+++ b/deps/npm/docs/output/commands/npm-rebuild.html
@@ -141,9 +141,9 @@
-
+
npm-rebuild
- @10.9.0
+ @10.9.1
Rebuild a package
diff --git a/deps/npm/docs/output/commands/npm-repo.html b/deps/npm/docs/output/commands/npm-repo.html
index 8ee7e9d6d2d074..6ec545571e0cf8 100644
--- a/deps/npm/docs/output/commands/npm-repo.html
+++ b/deps/npm/docs/output/commands/npm-repo.html
@@ -141,9 +141,9 @@
-
+
npm-repo
- @10.9.0
+ @10.9.1
Open package repository page in the browser
diff --git a/deps/npm/docs/output/commands/npm-restart.html b/deps/npm/docs/output/commands/npm-restart.html
index e3171bf280910b..aa6907f7cf0e9b 100644
--- a/deps/npm/docs/output/commands/npm-restart.html
+++ b/deps/npm/docs/output/commands/npm-restart.html
@@ -141,9 +141,9 @@
-
+
npm-restart
- @10.9.0
+ @10.9.1
Restart a package
diff --git a/deps/npm/docs/output/commands/npm-root.html b/deps/npm/docs/output/commands/npm-root.html
index 4e2e82e5bb259c..139504648861b0 100644
--- a/deps/npm/docs/output/commands/npm-root.html
+++ b/deps/npm/docs/output/commands/npm-root.html
@@ -141,9 +141,9 @@
-
+
npm-root
- @10.9.0
+ @10.9.1
Display npm root
diff --git a/deps/npm/docs/output/commands/npm-run-script.html b/deps/npm/docs/output/commands/npm-run-script.html
index 4673e733f49fcd..bcb21c48052eff 100644
--- a/deps/npm/docs/output/commands/npm-run-script.html
+++ b/deps/npm/docs/output/commands/npm-run-script.html
@@ -141,9 +141,9 @@
-
+
npm-run-script
- @10.9.0
+ @10.9.1
Run arbitrary package scripts
diff --git a/deps/npm/docs/output/commands/npm-sbom.html b/deps/npm/docs/output/commands/npm-sbom.html
index 00508eca6cd91a..64fa00b09d8648 100644
--- a/deps/npm/docs/output/commands/npm-sbom.html
+++ b/deps/npm/docs/output/commands/npm-sbom.html
@@ -141,9 +141,9 @@
-
+
npm-sbom
- @10.9.0
+ @10.9.1
Generate a Software Bill of Materials (SBOM)
diff --git a/deps/npm/docs/output/commands/npm-search.html b/deps/npm/docs/output/commands/npm-search.html
index edf4e437e65400..19130178365691 100644
--- a/deps/npm/docs/output/commands/npm-search.html
+++ b/deps/npm/docs/output/commands/npm-search.html
@@ -141,9 +141,9 @@
-
+
npm-search
- @10.9.0
+ @10.9.1
Search for packages
diff --git a/deps/npm/docs/output/commands/npm-shrinkwrap.html b/deps/npm/docs/output/commands/npm-shrinkwrap.html
index f225abaed9218f..318e80890e12c9 100644
--- a/deps/npm/docs/output/commands/npm-shrinkwrap.html
+++ b/deps/npm/docs/output/commands/npm-shrinkwrap.html
@@ -141,9 +141,9 @@
-
+
npm-shrinkwrap
- @10.9.0
+ @10.9.1
Lock down dependency versions for publication
diff --git a/deps/npm/docs/output/commands/npm-star.html b/deps/npm/docs/output/commands/npm-star.html
index 04b36628ad2176..4db54c52ce041a 100644
--- a/deps/npm/docs/output/commands/npm-star.html
+++ b/deps/npm/docs/output/commands/npm-star.html
@@ -141,9 +141,9 @@
-
+
npm-star
- @10.9.0
+ @10.9.1
Mark your favorite packages
diff --git a/deps/npm/docs/output/commands/npm-stars.html b/deps/npm/docs/output/commands/npm-stars.html
index 3183aa047f1f89..23598971506f12 100644
--- a/deps/npm/docs/output/commands/npm-stars.html
+++ b/deps/npm/docs/output/commands/npm-stars.html
@@ -141,9 +141,9 @@
-
+
npm-stars
- @10.9.0
+ @10.9.1
View packages marked as favorites
diff --git a/deps/npm/docs/output/commands/npm-start.html b/deps/npm/docs/output/commands/npm-start.html
index b81caefe0a4431..a8b7460c5503b5 100644
--- a/deps/npm/docs/output/commands/npm-start.html
+++ b/deps/npm/docs/output/commands/npm-start.html
@@ -141,9 +141,9 @@
-
+
npm-start
- @10.9.0
+ @10.9.1
Start a package
diff --git a/deps/npm/docs/output/commands/npm-stop.html b/deps/npm/docs/output/commands/npm-stop.html
index 85d4c782a736e7..05c59988ce5b02 100644
--- a/deps/npm/docs/output/commands/npm-stop.html
+++ b/deps/npm/docs/output/commands/npm-stop.html
@@ -141,9 +141,9 @@
-
+
npm-stop
- @10.9.0
+ @10.9.1
Stop a package
diff --git a/deps/npm/docs/output/commands/npm-team.html b/deps/npm/docs/output/commands/npm-team.html
index 6ad869fa3bed9e..596e81ca07571e 100644
--- a/deps/npm/docs/output/commands/npm-team.html
+++ b/deps/npm/docs/output/commands/npm-team.html
@@ -141,9 +141,9 @@
-
+
npm-team
- @10.9.0
+ @10.9.1
Manage organization teams and team memberships
diff --git a/deps/npm/docs/output/commands/npm-test.html b/deps/npm/docs/output/commands/npm-test.html
index bce16c92e4f087..a67145f8007675 100644
--- a/deps/npm/docs/output/commands/npm-test.html
+++ b/deps/npm/docs/output/commands/npm-test.html
@@ -141,9 +141,9 @@
-
+
npm-test
- @10.9.0
+ @10.9.1
Test a package
diff --git a/deps/npm/docs/output/commands/npm-token.html b/deps/npm/docs/output/commands/npm-token.html
index 99fcafd9b8f53b..43a3d9e8c11824 100644
--- a/deps/npm/docs/output/commands/npm-token.html
+++ b/deps/npm/docs/output/commands/npm-token.html
@@ -141,9 +141,9 @@
-
+
npm-token
- @10.9.0
+ @10.9.1
Manage your authentication tokens
diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html
index dd8e7a2234604e..2be118b69200f7 100644
--- a/deps/npm/docs/output/commands/npm-uninstall.html
+++ b/deps/npm/docs/output/commands/npm-uninstall.html
@@ -141,9 +141,9 @@
-
+
npm-uninstall
- @10.9.0
+ @10.9.1
Remove a package
diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html
index 56e2c59f62addb..7ffcfc5687253d 100644
--- a/deps/npm/docs/output/commands/npm-unpublish.html
+++ b/deps/npm/docs/output/commands/npm-unpublish.html
@@ -141,9 +141,9 @@
-
+
npm-unpublish
- @10.9.0
+ @10.9.1
Remove a package from the registry
diff --git a/deps/npm/docs/output/commands/npm-unstar.html b/deps/npm/docs/output/commands/npm-unstar.html
index 710d8a947d665f..80e858ca870d86 100644
--- a/deps/npm/docs/output/commands/npm-unstar.html
+++ b/deps/npm/docs/output/commands/npm-unstar.html
@@ -141,9 +141,9 @@
-
+
npm-unstar
- @10.9.0
+ @10.9.1
Remove an item from your favorite packages
diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html
index e587ec92f8614a..1a6ae021eb91d7 100644
--- a/deps/npm/docs/output/commands/npm-update.html
+++ b/deps/npm/docs/output/commands/npm-update.html
@@ -141,9 +141,9 @@
-
+
npm-update
- @10.9.0
+ @10.9.1
Update packages
diff --git a/deps/npm/docs/output/commands/npm-version.html b/deps/npm/docs/output/commands/npm-version.html
index 196a0236093673..f86ea98c3c92bb 100644
--- a/deps/npm/docs/output/commands/npm-version.html
+++ b/deps/npm/docs/output/commands/npm-version.html
@@ -141,9 +141,9 @@
-
+
npm-version
- @10.9.0
+ @10.9.1
Bump a package version
diff --git a/deps/npm/docs/output/commands/npm-view.html b/deps/npm/docs/output/commands/npm-view.html
index 0eebee3037748d..054ab0f5929683 100644
--- a/deps/npm/docs/output/commands/npm-view.html
+++ b/deps/npm/docs/output/commands/npm-view.html
@@ -141,9 +141,9 @@
-
+
npm-view
- @10.9.0
+ @10.9.1
View registry info
diff --git a/deps/npm/docs/output/commands/npm-whoami.html b/deps/npm/docs/output/commands/npm-whoami.html
index 0d1fa5ea9b87d7..7103f29354b398 100644
--- a/deps/npm/docs/output/commands/npm-whoami.html
+++ b/deps/npm/docs/output/commands/npm-whoami.html
@@ -141,9 +141,9 @@
-
+
npm-whoami
- @10.9.0
+ @10.9.1
Display npm username
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html
index 005009c1dba277..79d2e5b0ec9a75 100644
--- a/deps/npm/docs/output/commands/npm.html
+++ b/deps/npm/docs/output/commands/npm.html
@@ -141,9 +141,9 @@
-
+
npm
- @10.9.0
+ @10.9.1
javascript package manager
@@ -158,7 +158,7 @@ Table of contents
Note: This command is unaware of workspaces.
Version
-10.9.0
+10.9.1
Description
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html
index d3239ed320bb70..e722d396d2fc2f 100644
--- a/deps/npm/docs/output/commands/npx.html
+++ b/deps/npm/docs/output/commands/npx.html
@@ -141,9 +141,9 @@
-
+
npx
- @10.9.0
+ @10.9.1
Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/configuring-npm/folders.html b/deps/npm/docs/output/configuring-npm/folders.html
index f23aef6354f73d..278258f62d3109 100644
--- a/deps/npm/docs/output/configuring-npm/folders.html
+++ b/deps/npm/docs/output/configuring-npm/folders.html
@@ -141,9 +141,9 @@
-
+
folders
- @10.9.0
+ @10.9.1
Folder Structures Used by npm
diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html
index cddbcaae2de8be..61faffa4666219 100644
--- a/deps/npm/docs/output/configuring-npm/install.html
+++ b/deps/npm/docs/output/configuring-npm/install.html
@@ -141,9 +141,9 @@
-
+
install
- @10.9.0
+ @10.9.1
Download and install node and npm
diff --git a/deps/npm/docs/output/configuring-npm/npm-global.html b/deps/npm/docs/output/configuring-npm/npm-global.html
index f23aef6354f73d..278258f62d3109 100644
--- a/deps/npm/docs/output/configuring-npm/npm-global.html
+++ b/deps/npm/docs/output/configuring-npm/npm-global.html
@@ -141,9 +141,9 @@
-
+
folders
- @10.9.0
+ @10.9.1
Folder Structures Used by npm
diff --git a/deps/npm/docs/output/configuring-npm/npm-json.html b/deps/npm/docs/output/configuring-npm/npm-json.html
index 0cd27f8d82fdd8..a13d96ed88c66e 100644
--- a/deps/npm/docs/output/configuring-npm/npm-json.html
+++ b/deps/npm/docs/output/configuring-npm/npm-json.html
@@ -141,9 +141,9 @@
-
+
package.json
- @10.9.0
+ @10.9.1
Specifics of npm's package.json handling
diff --git a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
index 5f20e99541a7a7..27034d851c3f13 100644
--- a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
+++ b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
@@ -141,9 +141,9 @@
-
+
npm-shrinkwrap.json
- @10.9.0
+ @10.9.1
A publishable lockfile
diff --git a/deps/npm/docs/output/configuring-npm/npmrc.html b/deps/npm/docs/output/configuring-npm/npmrc.html
index a20c3d807d3d83..ad3a7436cd9fde 100644
--- a/deps/npm/docs/output/configuring-npm/npmrc.html
+++ b/deps/npm/docs/output/configuring-npm/npmrc.html
@@ -141,9 +141,9 @@
-
+
npmrc
- @10.9.0
+ @10.9.1
The npm config files
diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html
index 0cd27f8d82fdd8..a13d96ed88c66e 100644
--- a/deps/npm/docs/output/configuring-npm/package-json.html
+++ b/deps/npm/docs/output/configuring-npm/package-json.html
@@ -141,9 +141,9 @@
-
+
package.json
- @10.9.0
+ @10.9.1
Specifics of npm's package.json handling
diff --git a/deps/npm/docs/output/configuring-npm/package-lock-json.html b/deps/npm/docs/output/configuring-npm/package-lock-json.html
index 58598df7ae4fbc..9cf1f7413394f3 100644
--- a/deps/npm/docs/output/configuring-npm/package-lock-json.html
+++ b/deps/npm/docs/output/configuring-npm/package-lock-json.html
@@ -141,9 +141,9 @@
-
+
package-lock.json
- @10.9.0
+ @10.9.1
A manifestation of the manifest
diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html
index 5d70bb7e8e803e..a3e521919798fc 100644
--- a/deps/npm/docs/output/using-npm/config.html
+++ b/deps/npm/docs/output/using-npm/config.html
@@ -141,9 +141,9 @@
-
+
config
- @10.9.0
+ @10.9.1
More than you probably want to know about npm configuration
diff --git a/deps/npm/docs/output/using-npm/dependency-selectors.html b/deps/npm/docs/output/using-npm/dependency-selectors.html
index 008377b3ab55ff..9ab66b16a2b588 100644
--- a/deps/npm/docs/output/using-npm/dependency-selectors.html
+++ b/deps/npm/docs/output/using-npm/dependency-selectors.html
@@ -141,9 +141,9 @@
-
+
Dependency Selector Syntax & Querying
- @10.9.0
+ @10.9.1
Dependency Selector Syntax & Querying
diff --git a/deps/npm/docs/output/using-npm/developers.html b/deps/npm/docs/output/using-npm/developers.html
index e75223cd623a54..d2e03c36046a7a 100644
--- a/deps/npm/docs/output/using-npm/developers.html
+++ b/deps/npm/docs/output/using-npm/developers.html
@@ -141,9 +141,9 @@
-
+
developers
- @10.9.0
+ @10.9.1
Developer Guide
diff --git a/deps/npm/docs/output/using-npm/logging.html b/deps/npm/docs/output/using-npm/logging.html
index 5499bd53e285f2..2d5b25ec33a9eb 100644
--- a/deps/npm/docs/output/using-npm/logging.html
+++ b/deps/npm/docs/output/using-npm/logging.html
@@ -141,9 +141,9 @@
-
+
Logging
- @10.9.0
+ @10.9.1
Why, What & How We Log
diff --git a/deps/npm/docs/output/using-npm/orgs.html b/deps/npm/docs/output/using-npm/orgs.html
index c9ed80f6421c5e..30378d391994c6 100644
--- a/deps/npm/docs/output/using-npm/orgs.html
+++ b/deps/npm/docs/output/using-npm/orgs.html
@@ -141,9 +141,9 @@
-
+
orgs
- @10.9.0
+ @10.9.1
Working with Teams & Orgs
diff --git a/deps/npm/docs/output/using-npm/package-spec.html b/deps/npm/docs/output/using-npm/package-spec.html
index a370699bb555bd..998257f5841b17 100644
--- a/deps/npm/docs/output/using-npm/package-spec.html
+++ b/deps/npm/docs/output/using-npm/package-spec.html
@@ -141,9 +141,9 @@
-
+
package-spec
- @10.9.0
+ @10.9.1
Package name specifier
diff --git a/deps/npm/docs/output/using-npm/registry.html b/deps/npm/docs/output/using-npm/registry.html
index 9d9f8a539333c9..323aa7c776f822 100644
--- a/deps/npm/docs/output/using-npm/registry.html
+++ b/deps/npm/docs/output/using-npm/registry.html
@@ -141,9 +141,9 @@
-
+
registry
- @10.9.0
+ @10.9.1
The JavaScript Package Registry
diff --git a/deps/npm/docs/output/using-npm/removal.html b/deps/npm/docs/output/using-npm/removal.html
index 2668dd9dd8a440..8011ad107cebc8 100644
--- a/deps/npm/docs/output/using-npm/removal.html
+++ b/deps/npm/docs/output/using-npm/removal.html
@@ -141,9 +141,9 @@
-
+
removal
- @10.9.0
+ @10.9.1
Cleaning the Slate
diff --git a/deps/npm/docs/output/using-npm/scope.html b/deps/npm/docs/output/using-npm/scope.html
index 0441f8ef703e95..f8d0cbbbfbb5bc 100644
--- a/deps/npm/docs/output/using-npm/scope.html
+++ b/deps/npm/docs/output/using-npm/scope.html
@@ -141,9 +141,9 @@
-
+
scope
- @10.9.0
+ @10.9.1
Scoped packages
diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html
index ff37c2ede18062..fbf65a2593c240 100644
--- a/deps/npm/docs/output/using-npm/scripts.html
+++ b/deps/npm/docs/output/using-npm/scripts.html
@@ -141,9 +141,9 @@
-
+
scripts
- @10.9.0
+ @10.9.1
How npm handles the "scripts" field
diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html
index 19195cc868db4b..09e6d401fd75aa 100644
--- a/deps/npm/docs/output/using-npm/workspaces.html
+++ b/deps/npm/docs/output/using-npm/workspaces.html
@@ -141,9 +141,9 @@
-
+
workspaces
- @10.9.0
+ @10.9.1
Working with workspaces
diff --git a/deps/npm/lib/cli.js b/deps/npm/lib/cli.js
index e11729fe3205b9..00b4fc0bd7fb72 100644
--- a/deps/npm/lib/cli.js
+++ b/deps/npm/lib/cli.js
@@ -1,3 +1,11 @@
+try {
+ const { enableCompileCache } = require('node:module')
+ /* istanbul ignore next */
+ if (enableCompileCache) {
+ enableCompileCache()
+ }
+} catch (e) { /* istanbul ignore next */ }
+
const validateEngines = require('./cli/validate-engines.js')
const cliEntry = require('node:path').resolve(__dirname, 'cli/entry.js')
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 602193f3b8e3e8..2e0b979a061918 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM-ACCESS" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-ACCESS" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-access\fR - Set access level on published packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index 467384ec0ec40e..c1f786f595b062 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM-ADDUSER" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-ADDUSER" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-adduser\fR - Add a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index 64bf5d5461a157..908d8d2ddc5cb0 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -1,4 +1,4 @@
-.TH "NPM-AUDIT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-AUDIT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-audit\fR - Run a security audit
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index c25b04b6814137..17eb1d09ae4c55 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM-BUGS" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-BUGS" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-bugs\fR - Report bugs for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index e9da044a5b4479..2b06a6e5d9d471 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM-CACHE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-CACHE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-cache\fR - Manipulates packages cache
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1
index 1ad1419cb1f98f..d021d55d0f34cc 100644
--- a/deps/npm/man/man1/npm-ci.1
+++ b/deps/npm/man/man1/npm-ci.1
@@ -1,4 +1,4 @@
-.TH "NPM-CI" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-CI" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-ci\fR - Clean install a project
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index 6f246c436cead3..e0a454d49109d2 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM-COMPLETION" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-COMPLETION" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-completion\fR - Tab Completion for npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index 1bef58e188b68c..b6c5e5dd331fb1 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM-CONFIG" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-CONFIG" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-config\fR - Manage the npm configuration files
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index b1e21f0478ed4f..3e0b5d91d4e0b5 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEDUPE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DEDUPE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-dedupe\fR - Reduce duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index 31e9e56cd95382..73d802434859f3 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEPRECATE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DEPRECATE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-deprecate\fR - Deprecate a version of a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1
index c29ae3266d1543..52fb6989ae79f9 100644
--- a/deps/npm/man/man1/npm-diff.1
+++ b/deps/npm/man/man1/npm-diff.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIFF" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DIFF" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-diff\fR - The registry diff command
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 46e5cf8ac3492a..272018226ae78b 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIST-TAG" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DIST-TAG" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-dist-tag\fR - Modify package distribution tags
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 6cda2d87abe163..86d32afda3cbeb 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCS" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DOCS" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-docs\fR - Open documentation for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index 7a67ed1e6b32b1..32c190ba11b9cd 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCTOR" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-DOCTOR" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-doctor\fR - Check the health of your npm environment
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index 5ae7e24b67d60e..d1f5db8b127da1 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM-EDIT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-EDIT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-edit\fR - Edit an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1
index 20c21e67919d65..db7631141e97cb 100644
--- a/deps/npm/man/man1/npm-exec.1
+++ b/deps/npm/man/man1/npm-exec.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXEC" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-EXEC" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-exec\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1
index 65ef0e086e2ce0..92602d31ec27ad 100644
--- a/deps/npm/man/man1/npm-explain.1
+++ b/deps/npm/man/man1/npm-explain.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLAIN" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-EXPLAIN" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-explain\fR - Explain installed packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index 6b92a81c2abc0d..f57981af0e84e6 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLORE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-EXPLORE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-explore\fR - Browse an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1
index 261313e1b23ad1..d86fd9b54f7a2d 100644
--- a/deps/npm/man/man1/npm-find-dupes.1
+++ b/deps/npm/man/man1/npm-find-dupes.1
@@ -1,4 +1,4 @@
-.TH "NPM-FIND-DUPES" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-FIND-DUPES" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-find-dupes\fR - Find duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1
index b87d57d20c0f50..d1c07a8785e7f6 100644
--- a/deps/npm/man/man1/npm-fund.1
+++ b/deps/npm/man/man1/npm-fund.1
@@ -1,4 +1,4 @@
-.TH "NPM-FUND" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-FUND" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-fund\fR - Retrieve funding information
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index 9660ca29697e9f..9b61ca2bacbe62 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP-SEARCH" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-HELP-SEARCH" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-help-search\fR - Search npm help documentation
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index a8c70fdc88a9ae..1a9cacd9992837 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-HELP" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-help\fR - Get help on npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1
index 2c07a5afadbb64..121633a214df82 100644
--- a/deps/npm/man/man1/npm-hook.1
+++ b/deps/npm/man/man1/npm-hook.1
@@ -1,4 +1,4 @@
-.TH "NPM-HOOK" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-HOOK" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-hook\fR - Manage registry hooks
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index cb7339dc0f4ff0..e3f38fa0718afd 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM-INIT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-INIT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-init\fR - Create a package.json file
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1
index 52defde693a506..f9521016c9fe1c 100644
--- a/deps/npm/man/man1/npm-install-ci-test.1
+++ b/deps/npm/man/man1/npm-install-ci-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-CI-TEST" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-INSTALL-CI-TEST" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index e2023e1a985474..a32baa859c8a75 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-TEST" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-INSTALL-TEST" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-install-test\fR - Install package(s) and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index 59b3a8c81ac73f..510736aecbe21e 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-INSTALL" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-install\fR - Install a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index e6106495400fdc..00378601d85fec 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM-LINK" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-LINK" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-link\fR - Symlink a package folder
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1
index 36f2bf2d0f35dc..dd76470ae39984 100644
--- a/deps/npm/man/man1/npm-login.1
+++ b/deps/npm/man/man1/npm-login.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGIN" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-LOGIN" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-login\fR - Login to a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 78cba39ccd85c9..b8f5117faf5f17 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGOUT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-LOGOUT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-logout\fR - Log out of the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index b0364fe1531753..70f25355090dee 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM-LS" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-LS" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-ls\fR - List installed packages
.SS "Synopsis"
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@10.9.0 /path/to/npm
+npm@10.9.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1
index 0465a6b5bddb54..96cd8f8188792b 100644
--- a/deps/npm/man/man1/npm-org.1
+++ b/deps/npm/man/man1/npm-org.1
@@ -1,4 +1,4 @@
-.TH "NPM-ORG" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-ORG" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-org\fR - Manage orgs
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index ad1cf07823630f..190a362f9db0e2 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM-OUTDATED" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-OUTDATED" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-outdated\fR - Check for outdated packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index 5842e5a51d125f..48306a70964528 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM-OWNER" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-OWNER" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-owner\fR - Manage package owners
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 6cc2acd4b580f7..b482c24822da3a 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM-PACK" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PACK" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-pack\fR - Create a tarball from a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index ad03cbbb6a1398..136348a2d95d7d 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM-PING" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PING" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-ping\fR - Ping npm registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1
index 2f45876234ec3d..be9998d4157760 100644
--- a/deps/npm/man/man1/npm-pkg.1
+++ b/deps/npm/man/man1/npm-pkg.1
@@ -1,4 +1,4 @@
-.TH "NPM-PKG" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PKG" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-pkg\fR - Manages your package.json
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index cac44344c71d79..44f138d72ee25e 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM-PREFIX" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PREFIX" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-prefix\fR - Display prefix
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1
index 28e3f41244ace1..cb8c012d8b51b3 100644
--- a/deps/npm/man/man1/npm-profile.1
+++ b/deps/npm/man/man1/npm-profile.1
@@ -1,4 +1,4 @@
-.TH "NPM-PROFILE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PROFILE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-profile\fR - Change settings on your registry profile
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index 7634c75f0901fa..5ce07fb8b0b197 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM-PRUNE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PRUNE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-prune\fR - Remove extraneous packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index c6724b6fe2ce51..2d30658334dd0e 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM-PUBLISH" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-PUBLISH" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-publish\fR - Publish a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1
index 0a3c6e665e7ecf..b4d6019ae216d3 100644
--- a/deps/npm/man/man1/npm-query.1
+++ b/deps/npm/man/man1/npm-query.1
@@ -1,4 +1,4 @@
-.TH "NPM-QUERY" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-QUERY" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-query\fR - Dependency selector query
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index e537cec373e3c6..a2fa8cae922e05 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM-REBUILD" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-REBUILD" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-rebuild\fR - Rebuild a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index 869541f6faaa51..3b3d714f4e255a 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM-REPO" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-REPO" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-repo\fR - Open package repository page in the browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index b0e3b0deead7ef..dbdf51cfacb73d 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM-RESTART" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-RESTART" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-restart\fR - Restart a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 94a132557e3979..f1d47ad60d2844 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM-ROOT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-ROOT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-root\fR - Display npm root
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 424c733d6a0523..a34eed7ad0a0bd 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM-RUN-SCRIPT" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-RUN-SCRIPT" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-run-script\fR - Run arbitrary package scripts
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1
index 7bb8199c8b11c8..85b9ecff24b6ac 100644
--- a/deps/npm/man/man1/npm-sbom.1
+++ b/deps/npm/man/man1/npm-sbom.1
@@ -1,4 +1,4 @@
-.TH "NPM-SBOM" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-SBOM" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM)
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 4fe67890bc949a..3526282de54112 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-SEARCH" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-SEARCH" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-search\fR - Search for packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index ed3c0cb03ccca7..454e2719fbb91d 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-SHRINKWRAP" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR - Lock down dependency versions for publication
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index 83d0ff230ec1bb..a848760bcfda03 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM-STAR" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-STAR" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-star\fR - Mark your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index bdeffec7a3b1b5..7a1deaae083530 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM-STARS" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-STARS" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-stars\fR - View packages marked as favorites
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index c3cb48c6f72576..d2ecf94226cc75 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM-START" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-START" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-start\fR - Start a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index 36ef105209b7f0..8b125a42495e95 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM-STOP" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-STOP" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-stop\fR - Stop a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 6712c8f92b23c8..10b05e3250360d 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEAM" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-TEAM" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-team\fR - Manage organization teams and team memberships
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index 5e63e9c6d76ab3..ab418a7842c014 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEST" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-TEST" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-test\fR - Test a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1
index 29a2c361574121..dda637ff7337fc 100644
--- a/deps/npm/man/man1/npm-token.1
+++ b/deps/npm/man/man1/npm-token.1
@@ -1,4 +1,4 @@
-.TH "NPM-TOKEN" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-TOKEN" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-token\fR - Manage your authentication tokens
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index 0c10a60c1f07e7..cdcf6e3821a384 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNINSTALL" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-UNINSTALL" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-uninstall\fR - Remove a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index 098af2e59310bc..118e362d07e850 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNPUBLISH" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-UNPUBLISH" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-unpublish\fR - Remove a package from the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1
index 7326984aa8b4d2..a03b7936ecd133 100644
--- a/deps/npm/man/man1/npm-unstar.1
+++ b/deps/npm/man/man1/npm-unstar.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNSTAR" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-UNSTAR" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-unstar\fR - Remove an item from your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 797a9e601cf3d2..db8153d5ec5cc1 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM-UPDATE" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-UPDATE" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-update\fR - Update packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index d0a7dcee6fea65..26e496ffea90af 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM-VERSION" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-VERSION" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-version\fR - Bump a package version
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 2925f8d7d2cffe..66f23dea09cf36 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM-VIEW" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-VIEW" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-view\fR - View registry info
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index f283db5c9d247a..5065d9439b7476 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM-WHOAMI" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-WHOAMI" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-whoami\fR - Display npm username
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index b2034bc50fa5e3..465bd98994480c 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPM" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm\fR - javascript package manager
.SS "Synopsis"
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-10.9.0
+10.9.1
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index a1000443cc665e..d2b9ca19f0b07b 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "October 2024" "NPM@10.9.0" ""
+.TH "NPX" "1" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpx\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5
index 6e3045729c46ca..4ec7f9e421229b 100644
--- a/deps/npm/man/man5/folders.5
+++ b/deps/npm/man/man5/folders.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" ""
+.TH "FOLDERS" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5
index f5fabf02d7431b..8d2ba04de228df 100644
--- a/deps/npm/man/man5/install.5
+++ b/deps/npm/man/man5/install.5
@@ -1,4 +1,4 @@
-.TH "INSTALL" "5" "October 2024" "NPM@10.9.0" ""
+.TH "INSTALL" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBinstall\fR - Download and install node and npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 6e3045729c46ca..4ec7f9e421229b 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" ""
+.TH "FOLDERS" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index 49773dda01d706..4dfb85cf0ec290 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" ""
+.TH "PACKAGE.JSON" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5
index d1f394d23aa1ce..18d600846e61e0 100644
--- a/deps/npm/man/man5/npm-shrinkwrap-json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap-json.5
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP.JSON" "5" "October 2024" "NPM@10.9.0" ""
+.TH "NPM-SHRINKWRAP.JSON" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR - A publishable lockfile
.SS "Description"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 3b78f989c23545..c0aea346446c26 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "October 2024" "NPM@10.9.0" ""
+.TH "NPMRC" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBnpmrc\fR - The npm config files
.SS "Description"
diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5
index 49773dda01d706..4dfb85cf0ec290 100644
--- a/deps/npm/man/man5/package-json.5
+++ b/deps/npm/man/man5/package-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" ""
+.TH "PACKAGE.JSON" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5
index df3dcca1c4fa75..a8e005d6eacd95 100644
--- a/deps/npm/man/man5/package-lock-json.5
+++ b/deps/npm/man/man5/package-lock-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE-LOCK.JSON" "5" "October 2024" "NPM@10.9.0" ""
+.TH "PACKAGE-LOCK.JSON" "5" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBpackage-lock.json\fR - A manifestation of the manifest
.SS "Description"
diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7
index ee166c9d4ccee9..91182bebb7bb58 100644
--- a/deps/npm/man/man7/config.7
+++ b/deps/npm/man/man7/config.7
@@ -1,4 +1,4 @@
-.TH "CONFIG" "7" "October 2024" "NPM@10.9.0" ""
+.TH "CONFIG" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBconfig\fR - More than you probably want to know about npm configuration
.SS "Description"
diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7
index 54d80fee4dc58d..9bad66a7133656 100644
--- a/deps/npm/man/man7/dependency-selectors.7
+++ b/deps/npm/man/man7/dependency-selectors.7
@@ -1,4 +1,4 @@
-.TH "QUERYING" "7" "October 2024" "NPM@10.9.0" ""
+.TH "QUERYING" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBQuerying\fR - Dependency Selector Syntax & Querying
.SS "Description"
diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7
index aa907f3f51deda..45e5e7f88ef717 100644
--- a/deps/npm/man/man7/developers.7
+++ b/deps/npm/man/man7/developers.7
@@ -1,4 +1,4 @@
-.TH "DEVELOPERS" "7" "October 2024" "NPM@10.9.0" ""
+.TH "DEVELOPERS" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBdevelopers\fR - Developer Guide
.SS "Description"
diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7
index f36ae6e4703048..82995205531df6 100644
--- a/deps/npm/man/man7/logging.7
+++ b/deps/npm/man/man7/logging.7
@@ -1,4 +1,4 @@
-.TH "LOGGING" "7" "October 2024" "NPM@10.9.0" ""
+.TH "LOGGING" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBLogging\fR - Why, What & How We Log
.SS "Description"
diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7
index 2866d7dd9800a5..a8fb5afa8fca2d 100644
--- a/deps/npm/man/man7/orgs.7
+++ b/deps/npm/man/man7/orgs.7
@@ -1,4 +1,4 @@
-.TH "ORGS" "7" "October 2024" "NPM@10.9.0" ""
+.TH "ORGS" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBorgs\fR - Working with Teams & Orgs
.SS "Description"
diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7
index 0d76a5f3017253..25e65529ecd110 100644
--- a/deps/npm/man/man7/package-spec.7
+++ b/deps/npm/man/man7/package-spec.7
@@ -1,4 +1,4 @@
-.TH "PACKAGE-SPEC" "7" "October 2024" "NPM@10.9.0" ""
+.TH "PACKAGE-SPEC" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBpackage-spec\fR - Package name specifier
.SS "Description"
diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7
index 61ad2702f7b235..4183b46aca6022 100644
--- a/deps/npm/man/man7/registry.7
+++ b/deps/npm/man/man7/registry.7
@@ -1,4 +1,4 @@
-.TH "REGISTRY" "7" "October 2024" "NPM@10.9.0" ""
+.TH "REGISTRY" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBregistry\fR - The JavaScript Package Registry
.SS "Description"
diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7
index a76b743553c304..2323ea863ee840 100644
--- a/deps/npm/man/man7/removal.7
+++ b/deps/npm/man/man7/removal.7
@@ -1,4 +1,4 @@
-.TH "REMOVAL" "7" "October 2024" "NPM@10.9.0" ""
+.TH "REMOVAL" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBremoval\fR - Cleaning the Slate
.SS "Synopsis"
diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7
index 94c22ccbd9f8fa..89631de3d8d8ca 100644
--- a/deps/npm/man/man7/scope.7
+++ b/deps/npm/man/man7/scope.7
@@ -1,4 +1,4 @@
-.TH "SCOPE" "7" "October 2024" "NPM@10.9.0" ""
+.TH "SCOPE" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBscope\fR - Scoped packages
.SS "Description"
diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7
index 925d3181dc9174..dffe4dacc6357e 100644
--- a/deps/npm/man/man7/scripts.7
+++ b/deps/npm/man/man7/scripts.7
@@ -1,4 +1,4 @@
-.TH "SCRIPTS" "7" "October 2024" "NPM@10.9.0" ""
+.TH "SCRIPTS" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBscripts\fR - How npm handles the "scripts" field
.SS "Description"
diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7
index 1812eb73eb7ece..f68b418b73f994 100644
--- a/deps/npm/man/man7/workspaces.7
+++ b/deps/npm/man/man7/workspaces.7
@@ -1,4 +1,4 @@
-.TH "WORKSPACES" "7" "October 2024" "NPM@10.9.0" ""
+.TH "WORKSPACES" "7" "November 2024" "NPM@10.9.1" ""
.SH "NAME"
\fBworkspaces\fR - Working with workspaces
.SS "Description"
diff --git a/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
index 130a0929b8ce8c..ddfdba39a783a4 100644
--- a/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
+++ b/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
@@ -1,7 +1,9 @@
export default function ansiRegex({onlyFirst = false} = {}) {
+ // Valid string terminator sequences are BEL, ESC\, and 0x9c
+ const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)';
const pattern = [
- '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
- '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
+ `[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
+ '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))',
].join('|');
return new RegExp(pattern, onlyFirst ? undefined : 'g');
diff --git a/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
index 7bbb563bf2a70a..49f3f61021512b 100644
--- a/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
+++ b/deps/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
@@ -1,6 +1,6 @@
{
"name": "ansi-regex",
- "version": "6.0.1",
+ "version": "6.1.0",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": "chalk/ansi-regex",
@@ -12,6 +12,8 @@
},
"type": "module",
"exports": "./index.js",
+ "types": "./index.d.ts",
+ "sideEffects": false,
"engines": {
"node": ">=12"
},
@@ -51,8 +53,9 @@
"pattern"
],
"devDependencies": {
+ "ansi-escapes": "^5.0.0",
"ava": "^3.15.0",
- "tsd": "^0.14.0",
- "xo": "^0.38.2"
+ "tsd": "^0.21.0",
+ "xo": "^0.54.2"
}
}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
similarity index 90%
rename from deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE
rename to deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
index 83837797202b70..a03cd0ed0b338b 100644
--- a/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
@@ -1,6 +1,6 @@
The ISC License
-Copyright (c) GitHub, Inc.
+Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/README.md b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/README.md
new file mode 100644
index 00000000000000..dbb0051de23a4d
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/README.md
@@ -0,0 +1,283 @@
+# pacote
+
+Fetches package manifests and tarballs from the npm registry.
+
+## USAGE
+
+```js
+const pacote = require('pacote')
+
+// get a package manifest
+pacote.manifest('foo@1.x').then(manifest => console.log('got it', manifest))
+
+// extract a package into a folder
+pacote.extract('github:npm/cli', 'some/path', options)
+ .then(({from, resolved, integrity}) => {
+ console.log('extracted!', from, resolved, integrity)
+ })
+
+pacote.tarball('https://server.com/package.tgz').then(data => {
+ console.log('got ' + data.length + ' bytes of tarball data')
+})
+```
+
+`pacote` works with any kind of package specifier that npm can install. If
+you can pass it to the npm CLI, you can pass it to pacote. (In fact, that's
+exactly what the npm CLI does.)
+
+Anything that you can do with one kind of package, you can do with another.
+
+Data that isn't relevant (like a packument for a tarball) will be
+simulated.
+
+`prepare` scripts will be run when generating tarballs from `git` and
+`directory` locations, to simulate what _would_ be published to the
+registry, so that you get a working package instead of just raw source
+code that might need to be transpiled.
+
+## CLI
+
+This module exports a command line interface that can do most of what is
+described below. Run `pacote -h` to learn more.
+
+```
+Pacote - The JavaScript Package Handler, v10.1.1
+
+Usage:
+
+ pacote resolve
+ Resolve a specifier and output the fully resolved target
+ Returns integrity and from if '--long' flag is set.
+
+ pacote manifest
+ Fetch a manifest and print to stdout
+
+ pacote packument
+ Fetch a full packument and print to stdout
+
+ pacote tarball []
+ Fetch a package tarball and save to
+ If is missing or '-', the tarball will be streamed to stdout.
+
+ pacote extract
+ Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote. Additional flags for this executable:
+
+ --long Print an object from 'resolve', including integrity and spec.
+ --json Print result objects as JSON rather than node's default.
+ (This is the default if stdout is not a TTY.)
+ --help -h Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+```
+
+## API
+
+The `spec` refers to any kind of package specifier that npm can install.
+If you can pass it to the npm CLI, you can pass it to pacote. (In fact,
+that's exactly what the npm CLI does.)
+
+See below for valid `opts` values.
+
+* `pacote.resolve(spec, opts)` Resolve a specifier like `foo@latest` or
+ `github:user/project` all the way to a tarball url, tarball file, or git
+ repo with commit hash.
+
+* `pacote.extract(spec, dest, opts)` Extract a package's tarball into a
+ destination folder. Returns a promise that resolves to the
+ `{from,resolved,integrity}` of the extracted package.
+
+* `pacote.manifest(spec, opts)` Fetch (or simulate) a package's manifest
+ (basically, the `package.json` file, plus a bit of metadata).
+ See below for more on manifests and packuments. Returns a Promise that
+ resolves to the manifest object.
+
+* `pacote.packument(spec, opts)` Fetch (or simulate) a package's packument
+ (basically, the top-level package document listing all the manifests that
+ the registry returns). See below for more on manifests and packuments.
+ Returns a Promise that resolves to the packument object.
+
+* `pacote.tarball(spec, opts)` Get a package tarball data as a buffer in
+ memory. Returns a Promise that resolves to the tarball data Buffer, with
+ `from`, `resolved`, and `integrity` fields attached.
+
+* `pacote.tarball.file(spec, dest, opts)` Save a package tarball data to
+ a file on disk. Returns a Promise that resolves to
+ `{from,integrity,resolved}` of the fetched tarball.
+
+* `pacote.tarball.stream(spec, streamHandler, opts)` Fetch a tarball and
+ make the stream available to the `streamHandler` function.
+
+ This is mostly an internal function, but it is exposed because it does
+ provide some functionality that may be difficult to achieve otherwise.
+
+ The `streamHandler` function MUST return a Promise that resolves when
+ the stream (and all associated work) is ended, or rejects if the stream
+ has an error.
+
+ The `streamHandler` function MAY be called multiple times, as Pacote
+ retries requests in some scenarios, such as cache corruption or
+ retriable network failures.
+
+### Options
+
+Options are passed to
+[`npm-registry-fetch`](http://npm.im/npm-registry-fetch) and
+[`cacache`](http://npm.im/cacache), so in addition to these, anything for
+those modules can be given to pacote as well.
+
+Options object is cloned, and mutated along the way to add integrity,
+resolved, and other properties, as they are determined.
+
+* `cache` Where to store cache entries and temp files. Passed to
+ [`cacache`](http://npm.im/cacache). Defaults to the same cache directory
+ that npm will use by default, based on platform and environment.
+* `where` Base folder for resolving relative `file:` dependencies.
+* `resolved` Shortcut for looking up resolved values. Should be specified
+ if known.
+* `integrity` Expected integrity of fetched package tarball. If specified,
+ tarballs with mismatched integrity values will raise an `EINTEGRITY`
+ error.
+* `umask` Permission mode mask for extracted files and directories.
+ Defaults to `0o22`. See "Extracted File Modes" below.
+* `fmode` Minimum permission mode for extracted files. Defaults to
+ `0o666`. See "Extracted File Modes" below.
+* `dmode` Minimum permission mode for extracted directories. Defaults to
+ `0o777`. See "Extracted File Modes" below.
+* `preferOnline` Prefer to revalidate cache entries, even when it would not
+ be strictly necessary. Default `false`.
+* `before` When picking a manifest from a packument, only consider
+ packages published before the specified date. Default `null`.
+* `defaultTag` The default `dist-tag` to use when choosing a manifest from a
+ packument. Defaults to `latest`.
+* `registry` The npm registry to use by default. Defaults to
+ `https://registry.npmjs.org/`.
+* `fullMetadata` Fetch the full metadata from the registry for packuments,
+ including information not strictly required for installation (author,
+ description, etc.) Defaults to `true` when `before` is set, since the
+ version publish time is part of the extended packument metadata.
+* `fullReadJson` Use the slower `read-package-json` package insted of
+ `read-package-json-fast` in order to include extra fields like "readme" in
+ the manifest. Defaults to `false`.
+* `packumentCache` For registry packuments only, you may provide a `Map`
+ object which will be used to cache packument requests between pacote
+ calls. This allows you to easily avoid hitting the registry multiple
+ times (even just to validate the cache) for a given packument, since it
+ is unlikely to change in the span of a single command.
+* `verifySignatures` A boolean that will make pacote verify the
+ integrity signature of a manifest, if present. There must be a
+ configured `_keys` entry in the config that is scoped to the
+ registry the manifest is being fetched from.
+* `verifyAttestations` A boolean that will make pacote verify Sigstore
+ attestations, if present. There must be a configured `_keys` entry in the
+ config that is scoped to the registry the manifest is being fetched from.
+* `tufCache` Where to store metadata/target files when retrieving the package
+ attestation key material via TUF. Defaults to the same cache directory that
+ npm will use by default, based on platform and environment.
+
+### Advanced API
+
+Each different type of fetcher is exposed for more advanced usage such as
+using helper methods from this classes:
+
+* `DirFetcher`
+* `FileFetcher`
+* `GitFetcher`
+* `RegistryFetcher`
+* `RemoteFetcher`
+
+## Extracted File Modes
+
+Files are extracted with a mode matching the following formula:
+
+```
+( (tarball entry mode value) | (minimum mode option) ) ~ (umask)
+```
+
+This is in order to prevent unreadable files or unlistable directories from
+cluttering a project's `node_modules` folder, even if the package tarball
+specifies that the file should be inaccessible.
+
+It also prevents files from being group- or world-writable without explicit
+opt-in by the user, because all file and directory modes are masked against
+the `umask` value.
+
+So, a file which is `0o771` in the tarball, using the default `fmode` of
+`0o666` and `umask` of `0o22`, will result in a file mode of `0o755`:
+
+```
+(0o771 | 0o666) => 0o777
+(0o777 ~ 0o22) => 0o755
+```
+
+In almost every case, the defaults are appropriate. To respect exactly
+what is in the package tarball (even if this makes an unusable system), set
+both `dmode` and `fmode` options to `0`. Otherwise, the `umask` config
+should be used in most cases where file mode modifications are required,
+and this functions more or less the same as the `umask` value in most Unix
+systems.
+
+## Extracted File Ownership
+
+When running as `root` on Unix systems, all extracted files and folders
+will have their owning `uid` and `gid` values set to match the ownership
+of the containing folder.
+
+This prevents `root`-owned files showing up in a project's `node_modules`
+folder when a user runs `sudo npm install`.
+
+## Manifests
+
+A `manifest` is similar to a `package.json` file. However, it has a few
+pieces of extra metadata, and sometimes lacks metadata that is inessential
+to package installation.
+
+In addition to the common `package.json` fields, manifests include:
+
+* `manifest._resolved` The tarball url or file path where the package
+ artifact can be found.
+* `manifest._from` A normalized form of the spec passed in as an argument.
+* `manifest._integrity` The integrity value for the package artifact.
+* `manifest._id` The canonical spec of this package version: name@version.
+* `manifest.dist` Registry manifests (those included in a packument) have a
+ `dist` object. Only `tarball` is required, though at least one of
+ `shasum` or `integrity` is almost always present.
+
+ * `tarball` The url to the associated package artifact. (Copied by
+ Pacote to `manifest._resolved`.)
+ * `integrity` The integrity SRI string for the artifact. This may not
+ be present for older packages on the npm registry. (Copied by Pacote
+ to `manifest._integrity`.)
+ * `shasum` Legacy integrity value. Hexadecimal-encoded sha1 hash.
+ (Converted to an SRI string and copied by Pacote to
+ `manifest._integrity` when `dist.integrity` is not present.)
+ * `fileCount` Number of files in the tarball.
+ * `unpackedSize` Size on disk of the package when unpacked.
+ * `signatures` Signatures of the shasum. Includes the keyid that
+ correlates to a [`key from the npm
+ registry`](https://registry.npmjs.org/-/npm/v1/keys)
+
+## Packuments
+
+A packument is the top-level package document that lists the set of
+manifests for available versions for a package.
+
+When a packument is fetched with `accept:
+application/vnd.npm.install-v1+json` in the HTTP headers, only the most
+minimum necessary metadata is returned. Additional metadata is returned
+when fetched with only `accept: application/json`.
+
+For Pacote's purposes, the following fields are relevant:
+
+* `versions` An object where each key is a version, and each value is the
+ manifest for that version.
+* `dist-tags` An object mapping dist-tags to version numbers. This is how
+ `foo@latest` gets turned into `foo@1.2.3`.
+* `time` In the full packument, an object mapping version numbers to
+ publication times, for the `opts.before` functionality.
+
+Pacote adds the following field, regardless of the accept header:
+
+* `_contentLength` The size of the packument.
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js
new file mode 100755
index 00000000000000..f35b62ca71a537
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/bin/index.js
@@ -0,0 +1,158 @@
+#!/usr/bin/env node
+
+const run = conf => {
+ const pacote = require('../')
+ switch (conf._[0]) {
+ case 'resolve':
+ case 'manifest':
+ case 'packument':
+ if (conf._[0] === 'resolve' && conf.long) {
+ return pacote.manifest(conf._[1], conf).then(mani => ({
+ resolved: mani._resolved,
+ integrity: mani._integrity,
+ from: mani._from,
+ }))
+ }
+ return pacote[conf._[0]](conf._[1], conf)
+
+ case 'tarball':
+ if (!conf._[2] || conf._[2] === '-') {
+ return pacote.tarball.stream(conf._[1], stream => {
+ stream.pipe(
+ conf.testStdout ||
+ /* istanbul ignore next */
+ process.stdout
+ )
+ // make sure it resolves something falsey
+ return stream.promise().then(() => {
+ return false
+ })
+ }, conf)
+ } else {
+ return pacote.tarball.file(conf._[1], conf._[2], conf)
+ }
+
+ case 'extract':
+ return pacote.extract(conf._[1], conf._[2], conf)
+
+ default: /* istanbul ignore next */ {
+ throw new Error(`bad command: ${conf._[0]}`)
+ }
+ }
+}
+
+const version = require('../package.json').version
+const usage = () =>
+`Pacote - The JavaScript Package Handler, v${version}
+
+Usage:
+
+ pacote resolve
+ Resolve a specifier and output the fully resolved target
+ Returns integrity and from if '--long' flag is set.
+
+ pacote manifest
+ Fetch a manifest and print to stdout
+
+ pacote packument
+ Fetch a full packument and print to stdout
+
+ pacote tarball []
+ Fetch a package tarball and save to
+ If is missing or '-', the tarball will be streamed to stdout.
+
+ pacote extract
+ Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote. Additional flags for this executable:
+
+ --long Print an object from 'resolve', including integrity and spec.
+ --json Print result objects as JSON rather than node's default.
+ (This is the default if stdout is not a TTY.)
+ --help -h Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+`
+
+const shouldJSON = (conf, result) =>
+ conf.json ||
+ !process.stdout.isTTY &&
+ conf.json === undefined &&
+ result &&
+ typeof result === 'object'
+
+const pretty = (conf, result) =>
+ shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
+
+let addedLogListener = false
+const main = args => {
+ const conf = parse(args)
+ if (conf.help || conf.h) {
+ return console.log(usage())
+ }
+
+ if (!addedLogListener) {
+ process.on('log', console.error)
+ addedLogListener = true
+ }
+
+ try {
+ return run(conf)
+ .then(result => result && console.log(pretty(conf, result)))
+ .catch(er => {
+ console.error(er)
+ process.exit(1)
+ })
+ } catch (er) {
+ console.error(er.message)
+ console.error(usage())
+ }
+}
+
+const parseArg = arg => {
+ const split = arg.slice(2).split('=')
+ const k = split.shift()
+ const v = split.join('=')
+ const no = /^no-/.test(k) && !v
+ const key = (no ? k.slice(3) : k)
+ .replace(/^tag$/, 'defaultTag')
+ .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
+ const value = v ? v.replace(/^~/, process.env.HOME) : !no
+ return { key, value }
+}
+
+const parse = args => {
+ const conf = {
+ _: [],
+ cache: process.env.HOME + '/.npm/_cacache',
+ }
+ let dashdash = false
+ args.forEach(arg => {
+ if (dashdash) {
+ conf._.push(arg)
+ } else if (arg === '--') {
+ dashdash = true
+ } else if (arg === '-h') {
+ conf.help = true
+ } else if (/^--/.test(arg)) {
+ const { key, value } = parseArg(arg)
+ conf[key] = value
+ } else {
+ conf._.push(arg)
+ }
+ })
+ return conf
+}
+
+if (module === require.main) {
+ main(process.argv.slice(2))
+} else {
+ module.exports = {
+ main,
+ run,
+ usage,
+ parseArg,
+ parse,
+ }
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
new file mode 100644
index 00000000000000..04846eb8a6e221
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
@@ -0,0 +1,105 @@
+const { resolve } = require('node:path')
+const packlist = require('npm-packlist')
+const runScript = require('@npmcli/run-script')
+const tar = require('tar')
+const { Minipass } = require('minipass')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _ = require('./util/protected.js')
+const tarCreateOptions = require('./util/tar-create-options.js')
+
+class DirFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ // just the fully resolved filename
+ this.resolved = this.spec.fetchSpec
+
+ this.tree = opts.tree || null
+ this.Arborist = opts.Arborist || null
+ }
+
+ // exposes tarCreateOptions as public API
+ static tarCreateOptions (manifest) {
+ return tarCreateOptions(manifest)
+ }
+
+ get types () {
+ return ['directory']
+ }
+
+ #prepareDir () {
+ return this.manifest().then(mani => {
+ if (!mani.scripts || !mani.scripts.prepare) {
+ return
+ }
+ if (this.opts.ignoreScripts) {
+ return
+ }
+
+ // we *only* run prepare.
+ // pre/post-pack is run by the npm CLI for publish and pack,
+ // but this function is *also* run when installing git deps
+ const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
+
+ return runScript({
+ // this || undefined is because runScript will be unhappy with the default null value
+ scriptShell: this.opts.scriptShell || undefined,
+ pkg: mani,
+ event: 'prepare',
+ path: this.resolved,
+ stdio,
+ env: {
+ npm_package_resolved: this.resolved,
+ npm_package_integrity: this.integrity,
+ npm_package_json: resolve(this.resolved, 'package.json'),
+ },
+ })
+ })
+ }
+
+ [_.tarballFromResolved] () {
+ if (!this.tree && !this.Arborist) {
+ throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
+ }
+
+ const stream = new Minipass()
+ stream.resolved = this.resolved
+ stream.integrity = this.integrity
+
+ const { prefix, workspaces } = this.opts
+
+ // run the prepare script, get the list of files, and tar it up
+ // pipe to the stream, and proxy errors the chain.
+ this.#prepareDir()
+ .then(async () => {
+ if (!this.tree) {
+ const arb = new this.Arborist({ path: this.resolved })
+ this.tree = await arb.loadActual()
+ }
+ return packlist(this.tree, { path: this.resolved, prefix, workspaces })
+ })
+ .then(files => tar.c(tarCreateOptions(this.package), files)
+ .on('error', er => stream.emit('error', er)).pipe(stream))
+ .catch(er => stream.emit('error', er))
+ return stream
+ }
+
+ manifest () {
+ if (this.package) {
+ return Promise.resolve(this.package)
+ }
+
+ return this[_.readPackageJson](this.resolved)
+ .then(mani => this.package = {
+ ...mani,
+ _integrity: this.integrity && String(this.integrity),
+ _resolved: this.resolved,
+ _from: this.from,
+ })
+ }
+
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+}
+module.exports = DirFetcher
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
new file mode 100644
index 00000000000000..f2ac97619d3af1
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
@@ -0,0 +1,497 @@
+// This is the base class that the other fetcher types in lib
+// all descend from.
+// It handles the unpacking and retry logic that is shared among
+// all of the other Fetcher types.
+
+const { basename, dirname } = require('node:path')
+const { rm, mkdir } = require('node:fs/promises')
+const PackageJson = require('@npmcli/package-json')
+const cacache = require('cacache')
+const fsm = require('fs-minipass')
+const getContents = require('@npmcli/installed-package-contents')
+const npa = require('npm-package-arg')
+const retry = require('promise-retry')
+const ssri = require('ssri')
+const tar = require('tar')
+const { Minipass } = require('minipass')
+const { log } = require('proc-log')
+const _ = require('./util/protected.js')
+const cacheDir = require('./util/cache-dir.js')
+const isPackageBin = require('./util/is-package-bin.js')
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+
+// Pacote is only concerned with the package.json contents
+const packageJsonPrepare = (p) => PackageJson.prepare(p).then(pkg => pkg.content)
+const packageJsonNormalize = (p) => PackageJson.normalize(p).then(pkg => pkg.content)
+
+class FetcherBase {
+ constructor (spec, opts) {
+ if (!opts || typeof opts !== 'object') {
+ throw new TypeError('options object is required')
+ }
+ this.spec = npa(spec, opts.where)
+
+ this.allowGitIgnore = !!opts.allowGitIgnore
+
+ // a bit redundant because presumably the caller already knows this,
+ // but it makes it easier to not have to keep track of the requested
+ // spec when we're dispatching thousands of these at once, and normalizing
+ // is nice. saveSpec is preferred if set, because it turns stuff like
+ // x/y#committish into github:x/y#committish. use name@rawSpec for
+ // registry deps so that we turn xyz and xyz@ -> xyz@
+ this.from = this.spec.registry
+ ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
+
+ this.#assertType()
+ // clone the opts object so that others aren't upset when we mutate it
+ // by adding/modifying the integrity value.
+ this.opts = { ...opts }
+
+ this.cache = opts.cache || cacheDir().cacache
+ this.tufCache = opts.tufCache || cacheDir().tufcache
+ this.resolved = opts.resolved || null
+
+ // default to caching/verifying with sha512, that's what we usually have
+ // need to change this default, or start overriding it, when sha512
+ // is no longer strong enough.
+ this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
+
+ if (typeof opts.integrity === 'string') {
+ this.opts.integrity = ssri.parse(opts.integrity)
+ }
+
+ this.package = null
+ this.type = this.constructor.name
+ this.fmode = opts.fmode || 0o666
+ this.dmode = opts.dmode || 0o777
+ // we don't need a default umask, because we don't chmod files coming
+ // out of package tarballs. they're forced to have a mode that is
+ // valid, regardless of what's in the tarball entry, and then we let
+ // the process's umask setting do its job. but if configured, we do
+ // respect it.
+ this.umask = opts.umask || 0
+
+ this.preferOnline = !!opts.preferOnline
+ this.preferOffline = !!opts.preferOffline
+ this.offline = !!opts.offline
+
+ this.before = opts.before
+ this.fullMetadata = this.before ? true : !!opts.fullMetadata
+ this.fullReadJson = !!opts.fullReadJson
+ this[_.readPackageJson] = this.fullReadJson
+ ? packageJsonPrepare
+ : packageJsonNormalize
+
+ // rrh is a registry hostname or 'never' or 'always'
+ // defaults to registry.npmjs.org
+ this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
+ 'registry.npmjs.org' : opts.replaceRegistryHost
+
+ this.defaultTag = opts.defaultTag || 'latest'
+ this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
+
+ // command to run 'prepare' scripts on directories and git dirs
+ // To use pacote with yarn, for example, set npmBin to 'yarn'
+ // and npmCliConfig with yarn's equivalents.
+ this.npmBin = opts.npmBin || 'npm'
+
+ // command to install deps for preparing
+ this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
+
+ // XXX fill more of this in based on what we know from this.opts
+ // we explicitly DO NOT fill in --tag, though, since we are often
+ // going to be packing in the context of a publish, which may set
+ // a dist-tag, but certainly wants to keep defaulting to latest.
+ this.npmCliConfig = opts.npmCliConfig || [
+ `--cache=${dirname(this.cache)}`,
+ `--prefer-offline=${!!this.preferOffline}`,
+ `--prefer-online=${!!this.preferOnline}`,
+ `--offline=${!!this.offline}`,
+ ...(this.before ? [`--before=${this.before.toISOString()}`] : []),
+ '--no-progress',
+ '--no-save',
+ '--no-audit',
+ // override any omit settings from the environment
+ '--include=dev',
+ '--include=peer',
+ '--include=optional',
+ // we need the actual things, not just the lockfile
+ '--no-package-lock-only',
+ '--no-dry-run',
+ ]
+ }
+
+ get integrity () {
+ return this.opts.integrity || null
+ }
+
+ set integrity (i) {
+ if (!i) {
+ return
+ }
+
+ i = ssri.parse(i)
+ const current = this.opts.integrity
+
+ // do not ever update an existing hash value, but do
+ // merge in NEW algos and hashes that we don't already have.
+ if (current) {
+ current.merge(i)
+ } else {
+ this.opts.integrity = i
+ }
+ }
+
+ get notImplementedError () {
+ return new Error('not implemented in this fetcher type: ' + this.type)
+ }
+
+ // override in child classes
+ // Returns a Promise that resolves to this.resolved string value
+ resolve () {
+ return this.resolved ? Promise.resolve(this.resolved)
+ : Promise.reject(this.notImplementedError)
+ }
+
+ packument () {
+ return Promise.reject(this.notImplementedError)
+ }
+
+ // override in child class
+ // returns a manifest containing:
+ // - name
+ // - version
+ // - _resolved
+ // - _integrity
+ // - plus whatever else was in there (corgi, full metadata, or pj file)
+ manifest () {
+ return Promise.reject(this.notImplementedError)
+ }
+
+ // private, should be overridden.
+ // Note that they should *not* calculate or check integrity or cache,
+ // but *just* return the raw tarball data stream.
+ [_.tarballFromResolved] () {
+ throw this.notImplementedError
+ }
+
+ // public, should not be overridden
+ tarball () {
+ return this.tarballStream(stream => stream.concat().then(data => {
+ data.integrity = this.integrity && String(this.integrity)
+ data.resolved = this.resolved
+ data.from = this.from
+ return data
+ }))
+ }
+
+ // private
+ // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
+ #tarballFromCache () {
+ const startTime = Date.now()
+ const stream = cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+ const elapsedTime = Date.now() - startTime
+ // cache is good, so log it as a hit in particular since there was no fetch logged
+ log.http(
+ 'cache',
+ `${this.spec} ${elapsedTime}ms (cache hit)`
+ )
+ return stream
+ }
+
+ get [_.cacheFetches] () {
+ return true
+ }
+
+ #istream (stream) {
+ // if not caching this, just return it
+ if (!this.opts.cache || !this[_.cacheFetches]) {
+ // instead of creating a new integrity stream, we only piggyback on the
+ // provided stream's events
+ if (stream.hasIntegrityEmitter) {
+ stream.on('integrity', i => this.integrity = i)
+ return stream
+ }
+
+ const istream = ssri.integrityStream(this.opts)
+ istream.on('integrity', i => this.integrity = i)
+ stream.on('error', err => istream.emit('error', err))
+ return stream.pipe(istream)
+ }
+
+ // we have to return a stream that gets ALL the data, and proxies errors,
+ // but then pipe from the original tarball stream into the cache as well.
+ // To do this without losing any data, and since the cacache put stream
+ // is not a passthrough, we have to pipe from the original stream into
+ // the cache AFTER we pipe into the middleStream. Since the cache stream
+ // has an asynchronous flush to write its contents to disk, we need to
+ // defer the middleStream end until the cache stream ends.
+ const middleStream = new Minipass()
+ stream.on('error', err => middleStream.emit('error', err))
+ stream.pipe(middleStream, { end: false })
+ const cstream = cacache.put.stream(
+ this.opts.cache,
+ `pacote:tarball:${this.from}`,
+ this.opts
+ )
+ cstream.on('integrity', i => this.integrity = i)
+ cstream.on('error', err => stream.emit('error', err))
+ stream.pipe(cstream)
+
+ // eslint-disable-next-line promise/catch-or-return
+ cstream.promise().catch(() => {}).then(() => middleStream.end())
+ return middleStream
+ }
+
+ pickIntegrityAlgorithm () {
+ return this.integrity ? this.integrity.pickAlgorithm(this.opts)
+ : this.defaultIntegrityAlgorithm
+ }
+
+ // TODO: check error class, once those are rolled out to our deps
+ isDataCorruptionError (er) {
+ return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
+ }
+
+ // override the types getter
+ get types () {
+ return false
+ }
+
+ #assertType () {
+ if (this.types && !this.types.includes(this.spec.type)) {
+ throw new TypeError(`Wrong spec type (${
+ this.spec.type
+ }) for ${
+ this.constructor.name
+ }. Supported types: ${this.types.join(', ')}`)
+ }
+ }
+
+ // We allow ENOENTs from cacache, but not anywhere else.
+ // An ENOENT trying to read a tgz file, for example, is Right Out.
+ isRetriableError (er) {
+ // TODO: check error class, once those are rolled out to our deps
+ return this.isDataCorruptionError(er) ||
+ er.code === 'ENOENT' ||
+ er.code === 'EISDIR'
+ }
+
+ // Mostly internal, but has some uses
+ // Pass in a function which returns a promise
+ // Function will be called 1 or more times with streams that may fail.
+ // Retries:
+ // Function MUST handle errors on the stream by rejecting the promise,
+ // so that retry logic can pick it up and either retry or fail whatever
+ // promise it was making (ie, failing extraction, etc.)
+ //
+ // The return value of this method is a Promise that resolves the same
+ // as whatever the streamHandler resolves to.
+ //
+ // This should never be overridden by child classes, but it is public.
+ tarballStream (streamHandler) {
+ // Only short-circuit via cache if we have everything else we'll need,
+ // and the user has not expressed a preference for checking online.
+
+ const fromCache = (
+ !this.preferOnline &&
+ this.integrity &&
+ this.resolved
+ ) ? streamHandler(this.#tarballFromCache()).catch(er => {
+ if (this.isDataCorruptionError(er)) {
+ log.warn('tarball', `cached data for ${
+ this.spec
+ } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
+ return this.cleanupCached().then(() => {
+ throw er
+ })
+ } else {
+ throw er
+ }
+ }) : null
+
+ const fromResolved = er => {
+ if (er) {
+ if (!this.isRetriableError(er)) {
+ throw er
+ }
+ log.silly('tarball', `no local data for ${
+ this.spec
+ }. Extracting by manifest.`)
+ }
+ return this.resolve().then(() => retry(tryAgain =>
+ streamHandler(this.#istream(this[_.tarballFromResolved]()))
+ .catch(streamErr => {
+ // Most likely data integrity. A cache ENOENT error is unlikely
+ // here, since we're definitely not reading from the cache, but it
+ // IS possible that the fetch subsystem accessed the cache, and the
+ // entry got blown away or something. Try one more time to be sure.
+ if (this.isRetriableError(streamErr)) {
+ log.warn('tarball', `tarball data for ${
+ this.spec
+ } (${this.integrity}) seems to be corrupted. Trying again.`)
+ return this.cleanupCached().then(() => tryAgain(streamErr))
+ }
+ throw streamErr
+ }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
+ }
+
+ return fromCache ? fromCache.catch(fromResolved) : fromResolved()
+ }
+
+ cleanupCached () {
+ return cacache.rm.content(this.cache, this.integrity, this.opts)
+ }
+
+ #empty (path) {
+ return getContents({ path, depth: 1 }).then(contents => Promise.all(
+ contents.map(entry => rm(entry, { recursive: true, force: true }))))
+ }
+
+ async #mkdir (dest) {
+ await this.#empty(dest)
+ return await mkdir(dest, { recursive: true })
+ }
+
+ // extraction is always the same. the only difference is where
+ // the tarball comes from.
+ async extract (dest) {
+ await this.#mkdir(dest)
+ return this.tarballStream((tarball) => this.#extract(dest, tarball))
+ }
+
+ #toFile (dest) {
+ return this.tarballStream(str => new Promise((res, rej) => {
+ const writer = new fsm.WriteStream(dest)
+ str.on('error', er => writer.emit('error', er))
+ writer.on('error', er => rej(er))
+ writer.on('close', () => res({
+ integrity: this.integrity && String(this.integrity),
+ resolved: this.resolved,
+ from: this.from,
+ }))
+ str.pipe(writer)
+ }))
+ }
+
+ // don't use this.#mkdir because we don't want to rimraf anything
+ async tarballFile (dest) {
+ const dir = dirname(dest)
+ await mkdir(dir, { recursive: true })
+ return this.#toFile(dest)
+ }
+
+ #extract (dest, tarball) {
+ const extractor = tar.x(this.#tarxOptions({ cwd: dest }))
+ const p = new Promise((resolve, reject) => {
+ extractor.on('end', () => {
+ resolve({
+ resolved: this.resolved,
+ integrity: this.integrity && String(this.integrity),
+ from: this.from,
+ })
+ })
+
+ extractor.on('error', er => {
+ log.warn('tar', er.message)
+ log.silly('tar', er)
+ reject(er)
+ })
+
+ tarball.on('error', er => reject(er))
+ })
+
+ tarball.pipe(extractor)
+ return p
+ }
+
+ // always ensure that entries are at least as permissive as our configured
+ // dmode/fmode, but never more permissive than the umask allows.
+ #entryMode (path, mode, type) {
+ const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
+ : /File$/.test(type) ? this.fmode
+ : /* istanbul ignore next - should never happen in a pkg */ 0
+
+ // make sure package bins are executable
+ const exe = isPackageBin(this.package, path) ? 0o111 : 0
+ // always ensure that files are read/writable by the owner
+ return ((mode | m) & ~this.umask) | exe | 0o600
+ }
+
+ #tarxOptions ({ cwd }) {
+ const sawIgnores = new Set()
+ return {
+ cwd,
+ noChmod: true,
+ noMtime: true,
+ filter: (name, entry) => {
+ if (/Link$/.test(entry.type)) {
+ return false
+ }
+ entry.mode = this.#entryMode(entry.path, entry.mode, entry.type)
+ // this replicates the npm pack behavior where .gitignore files
+ // are treated like .npmignore files, but only if a .npmignore
+ // file is not present.
+ if (/File$/.test(entry.type)) {
+ const base = basename(entry.path)
+ if (base === '.npmignore') {
+ sawIgnores.add(entry.path)
+ } else if (base === '.gitignore' && !this.allowGitIgnore) {
+ // rename, but only if there's not already a .npmignore
+ const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
+ if (sawIgnores.has(ni)) {
+ return false
+ }
+ entry.path = ni
+ }
+ return true
+ }
+ },
+ strip: 1,
+ onwarn: /* istanbul ignore next - we can trust that tar logs */
+ (code, msg, data) => {
+ log.warn('tar', code, msg)
+ log.silly('tar', code, msg, data)
+ },
+ umask: this.umask,
+ // always ignore ownership info from tarball metadata
+ preserveOwner: false,
+ }
+ }
+}
+
+module.exports = FetcherBase
+
+// Child classes
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+// Get an appropriate fetcher object from a spec and options
+FetcherBase.get = (rawSpec, opts = {}) => {
+ const spec = npa(rawSpec, opts.where)
+ switch (spec.type) {
+ case 'git':
+ return new GitFetcher(spec, opts)
+
+ case 'remote':
+ return new RemoteFetcher(spec, opts)
+
+ case 'version':
+ case 'range':
+ case 'tag':
+ case 'alias':
+ return new RegistryFetcher(spec.subSpec || spec, opts)
+
+ case 'file':
+ return new FileFetcher(spec, opts)
+
+ case 'directory':
+ return new DirFetcher(spec, opts)
+
+ default:
+ throw new TypeError('Unknown spec type: ' + spec.type)
+ }
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
new file mode 100644
index 00000000000000..2021325085e4f0
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
@@ -0,0 +1,94 @@
+const { resolve } = require('node:path')
+const { stat, chmod } = require('node:fs/promises')
+const cacache = require('cacache')
+const fsm = require('fs-minipass')
+const Fetcher = require('./fetcher.js')
+const _ = require('./util/protected.js')
+
+class FileFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ // just the fully resolved filename
+ this.resolved = this.spec.fetchSpec
+ }
+
+ get types () {
+ return ['file']
+ }
+
+ manifest () {
+ if (this.package) {
+ return Promise.resolve(this.package)
+ }
+
+ // have to unpack the tarball for this.
+ return cacache.tmp.withTmp(this.cache, this.opts, dir =>
+ this.extract(dir)
+ .then(() => this[_.readPackageJson](dir))
+ .then(mani => this.package = {
+ ...mani,
+ _integrity: this.integrity && String(this.integrity),
+ _resolved: this.resolved,
+ _from: this.from,
+ }))
+ }
+
+ #exeBins (pkg, dest) {
+ if (!pkg.bin) {
+ return Promise.resolve()
+ }
+
+ return Promise.all(Object.keys(pkg.bin).map(async k => {
+ const script = resolve(dest, pkg.bin[k])
+ // Best effort. Ignore errors here, the only result is that
+ // a bin script is not executable. But if it's missing or
+ // something, we just leave it for a later stage to trip over
+ // when we can provide a more useful contextual error.
+ try {
+ const st = await stat(script)
+ const mode = st.mode | 0o111
+ if (mode === st.mode) {
+ return
+ }
+ await chmod(script, mode)
+ } catch {
+ // Ignore errors here
+ }
+ }))
+ }
+
+ extract (dest) {
+ // if we've already loaded the manifest, then the super got it.
+ // but if not, read the unpacked manifest and chmod properly.
+ return super.extract(dest)
+ .then(result => this.package ? result
+ : this[_.readPackageJson](dest).then(pkg =>
+ this.#exeBins(pkg, dest)).then(() => result))
+ }
+
+ [_.tarballFromResolved] () {
+ // create a read stream and return it
+ return new fsm.ReadStream(this.resolved)
+ }
+
+ packument () {
+ // simulate based on manifest
+ return this.manifest().then(mani => ({
+ name: mani.name,
+ 'dist-tags': {
+ [this.defaultTag]: mani.version,
+ },
+ versions: {
+ [mani.version]: {
+ ...mani,
+ dist: {
+ tarball: `file:${this.resolved}`,
+ integrity: this.integrity && String(this.integrity),
+ },
+ },
+ },
+ }))
+ }
+}
+
+module.exports = FileFetcher
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
new file mode 100644
index 00000000000000..077193a86f026f
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
@@ -0,0 +1,317 @@
+const cacache = require('cacache')
+const git = require('@npmcli/git')
+const npa = require('npm-package-arg')
+const pickManifest = require('npm-pick-manifest')
+const { Minipass } = require('minipass')
+const { log } = require('proc-log')
+const DirFetcher = require('./dir.js')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const RemoteFetcher = require('./remote.js')
+const _ = require('./util/protected.js')
+const addGitSha = require('./util/add-git-sha.js')
+const npm = require('./util/npm.js')
+
+const hashre = /^[a-f0-9]{40}$/
+
+// get the repository url.
+// prefer https if there's auth, since ssh will drop that.
+// otherwise, prefer ssh if available (more secure).
+// We have to add the git+ back because npa suppresses it.
+const repoUrl = (h, opts) =>
+ h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
+ h.https && addGitPlus(h.https(opts))
+
+// add git+ to the url, but only one time.
+const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
+
+class GitFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+
+ // we never want to compare integrity for git dependencies: npm/rfcs#525
+ if (this.opts.integrity) {
+ delete this.opts.integrity
+ log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
+ }
+
+ this.resolvedRef = null
+ if (this.spec.hosted) {
+ this.from = this.spec.hosted.shortcut({ noCommittish: false })
+ }
+
+ // shortcut: avoid full clone when we can go straight to the tgz
+ // if we have the full sha and it's a hosted git platform
+ if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
+ this.resolvedSha = this.spec.gitCommittish
+ // use hosted.tarball() when we shell to RemoteFetcher later
+ this.resolved = this.spec.hosted
+ ? repoUrl(this.spec.hosted, { noCommittish: false })
+ : this.spec.rawSpec
+ } else {
+ this.resolvedSha = ''
+ }
+
+ this.Arborist = opts.Arborist || null
+ }
+
+ // just exposed to make it easier to test all the combinations
+ static repoUrl (hosted, opts) {
+ return repoUrl(hosted, opts)
+ }
+
+ get types () {
+ return ['git']
+ }
+
+ resolve () {
+ // likely a hosted git repo with a sha, so get the tarball url
+ // but in general, no reason to resolve() more than necessary!
+ if (this.resolved) {
+ return super.resolve()
+ }
+
+ // fetch the git repo and then look at the current hash
+ const h = this.spec.hosted
+ // try to use ssh, fall back to git.
+ return h
+ ? this.#resolvedFromHosted(h)
+ : this.#resolvedFromRepo(this.spec.fetchSpec)
+ }
+
+ // first try https, since that's faster and passphrase-less for
+ // public repos, and supports private repos when auth is provided.
+ // Fall back to SSH to support private repos
+ // NB: we always store the https url in resolved field if auth
+ // is present, otherwise ssh if the hosted type provides it
+ #resolvedFromHosted (hosted) {
+ return this.#resolvedFromRepo(hosted.https && hosted.https()).catch(er => {
+ // Throw early since we know pathspec errors will fail again if retried
+ if (er instanceof git.errors.GitPathspecError) {
+ throw er
+ }
+ const ssh = hosted.sshurl && hosted.sshurl()
+ // no fallthrough if we can't fall through or have https auth
+ if (!ssh || hosted.auth) {
+ throw er
+ }
+ return this.#resolvedFromRepo(ssh)
+ })
+ }
+
+ #resolvedFromRepo (gitRemote) {
+ // XXX make this a custom error class
+ if (!gitRemote) {
+ return Promise.reject(new Error(`No git url for ${this.spec}`))
+ }
+ const gitRange = this.spec.gitRange
+ const name = this.spec.name
+ return git.revs(gitRemote, this.opts).then(remoteRefs => {
+ return gitRange ? pickManifest({
+ versions: remoteRefs.versions,
+ 'dist-tags': remoteRefs['dist-tags'],
+ name,
+ }, gitRange, this.opts)
+ : this.spec.gitCommittish ?
+ remoteRefs.refs[this.spec.gitCommittish] ||
+ remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
+ : remoteRefs.refs.HEAD // no git committish, get default head
+ }).then(revDoc => {
+ // the committish provided isn't in the rev list
+ // things like HEAD~3 or @yesterday can land here.
+ if (!revDoc || !revDoc.sha) {
+ return this.#resolvedFromClone()
+ }
+
+ this.resolvedRef = revDoc
+ this.resolvedSha = revDoc.sha
+ this.#addGitSha(revDoc.sha)
+ return this.resolved
+ })
+ }
+
+ #setResolvedWithSha (withSha) {
+ // we haven't cloned, so a tgz download is still faster
+ // of course, if it's not a known host, we can't do that.
+ this.resolved = !this.spec.hosted ? withSha
+ : repoUrl(npa(withSha).hosted, { noCommittish: false })
+ }
+
+ // when we get the git sha, we affix it to our spec to build up
+ // either a git url with a hash, or a tarball download URL
+ #addGitSha (sha) {
+ this.#setResolvedWithSha(addGitSha(this.spec, sha))
+ }
+
+ #resolvedFromClone () {
+ // do a full or shallow clone, then look at the HEAD
+ // kind of wasteful, but no other option, really
+ return this.#clone(() => this.resolved)
+ }
+
+ #prepareDir (dir) {
+ return this[_.readPackageJson](dir).then(mani => {
+ // no need if we aren't going to do any preparation.
+ const scripts = mani.scripts
+ if (!mani.workspaces && (!scripts || !(
+ scripts.postinstall ||
+ scripts.build ||
+ scripts.preinstall ||
+ scripts.install ||
+ scripts.prepack ||
+ scripts.prepare))) {
+ return
+ }
+
+ // to avoid cases where we have an cycle of git deps that depend
+ // on one another, we only ever do preparation for one instance
+ // of a given git dep along the chain of installations.
+ // Note that this does mean that a dependency MAY in theory end up
+ // trying to run its prepare script using a dependency that has not
+ // been properly prepared itself, but that edge case is smaller
+ // and less hazardous than a fork bomb of npm and git commands.
+ const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
+ : process.env._PACOTE_NO_PREPARE_.split('\n')
+ if (noPrepare.includes(this.resolved)) {
+ log.info('prepare', 'skip prepare, already seen', this.resolved)
+ return
+ }
+ noPrepare.push(this.resolved)
+
+ // the DirFetcher will do its own preparation to run the prepare scripts
+ // All we have to do is put the deps in place so that it can succeed.
+ return npm(
+ this.npmBin,
+ [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
+ dir,
+ { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
+ { message: 'git dep preparation failed' }
+ )
+ })
+ }
+
+ [_.tarballFromResolved] () {
+ const stream = new Minipass()
+ stream.resolved = this.resolved
+ stream.from = this.from
+
+ // check it out and then shell out to the DirFetcher tarball packer
+ this.#clone(dir => this.#prepareDir(dir)
+ .then(() => new Promise((res, rej) => {
+ if (!this.Arborist) {
+ throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
+ }
+ const df = new DirFetcher(`file:${dir}`, {
+ ...this.opts,
+ Arborist: this.Arborist,
+ resolved: null,
+ integrity: null,
+ })
+ const dirStream = df[_.tarballFromResolved]()
+ dirStream.on('error', rej)
+ dirStream.on('end', res)
+ dirStream.pipe(stream)
+ }))).catch(
+ /* istanbul ignore next: very unlikely and hard to test */
+ er => stream.emit('error', er)
+ )
+ return stream
+ }
+
+ // clone a git repo into a temp folder (or fetch and unpack if possible)
+ // handler accepts a directory, and returns a promise that resolves
+ // when we're done with it, at which point, cacache deletes it
+ //
+ // TODO: after cloning, create a tarball of the folder, and add to the cache
+ // with cacache.put.stream(), using a key that's deterministic based on the
+ // spec and repo, so that we don't ever clone the same thing multiple times.
+ #clone (handler, tarballOk = true) {
+ const o = { tmpPrefix: 'git-clone' }
+ const ref = this.resolvedSha || this.spec.gitCommittish
+ const h = this.spec.hosted
+ const resolved = this.resolved
+
+ // can be set manually to false to fall back to actual git clone
+ tarballOk = tarballOk &&
+ h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
+
+ return cacache.tmp.withTmp(this.cache, o, async tmp => {
+ // if we're resolved, and have a tarball url, shell out to RemoteFetcher
+ if (tarballOk) {
+ const nameat = this.spec.name ? `${this.spec.name}@` : ''
+ return new RemoteFetcher(h.tarball({ noCommittish: false }), {
+ ...this.opts,
+ allowGitIgnore: true,
+ pkgid: `git:${nameat}${this.resolved}`,
+ resolved: this.resolved,
+ integrity: null, // it'll always be different, if we have one
+ }).extract(tmp).then(() => handler(tmp), er => {
+ // fall back to ssh download if tarball fails
+ if (er.constructor.name.match(/^Http/)) {
+ return this.#clone(handler, false)
+ } else {
+ throw er
+ }
+ })
+ }
+
+ const sha = await (
+ h ? this.#cloneHosted(ref, tmp)
+ : this.#cloneRepo(this.spec.fetchSpec, ref, tmp)
+ )
+ this.resolvedSha = sha
+ if (!this.resolved) {
+ await this.#addGitSha(sha)
+ }
+ return handler(tmp)
+ })
+ }
+
+ // first try https, since that's faster and passphrase-less for
+ // public repos, and supports private repos when auth is provided.
+ // Fall back to SSH to support private repos
+ // NB: we always store the https url in resolved field if auth
+ // is present, otherwise ssh if the hosted type provides it
+ #cloneHosted (ref, tmp) {
+ const hosted = this.spec.hosted
+ return this.#cloneRepo(hosted.https({ noCommittish: true }), ref, tmp)
+ .catch(er => {
+ // Throw early since we know pathspec errors will fail again if retried
+ if (er instanceof git.errors.GitPathspecError) {
+ throw er
+ }
+ const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
+ // no fallthrough if we can't fall through or have https auth
+ if (!ssh || hosted.auth) {
+ throw er
+ }
+ return this.#cloneRepo(ssh, ref, tmp)
+ })
+ }
+
+ #cloneRepo (repo, ref, tmp) {
+ const { opts, spec } = this
+ return git.clone(repo, ref, tmp, { ...opts, spec })
+ }
+
+ manifest () {
+ if (this.package) {
+ return Promise.resolve(this.package)
+ }
+
+ return this.spec.hosted && this.resolved
+ ? FileFetcher.prototype.manifest.apply(this)
+ : this.#clone(dir =>
+ this[_.readPackageJson](dir)
+ .then(mani => this.package = {
+ ...mani,
+ _resolved: this.resolved,
+ _from: this.from,
+ }))
+ }
+
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+}
+module.exports = GitFetcher
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
new file mode 100644
index 00000000000000..f35314d275d5fd
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
@@ -0,0 +1,23 @@
+const { get } = require('./fetcher.js')
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+const tarball = (spec, opts) => get(spec, opts).tarball()
+tarball.stream = (spec, handler, opts) => get(spec, opts).tarballStream(handler)
+tarball.file = (spec, dest, opts) => get(spec, opts).tarballFile(dest)
+
+module.exports = {
+ GitFetcher,
+ RegistryFetcher,
+ FileFetcher,
+ DirFetcher,
+ RemoteFetcher,
+ resolve: (spec, opts) => get(spec, opts).resolve(),
+ extract: (spec, dest, opts) => get(spec, opts).extract(dest),
+ manifest: (spec, opts) => get(spec, opts).manifest(),
+ packument: (spec, opts) => get(spec, opts).packument(),
+ tarball,
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
new file mode 100644
index 00000000000000..1ecf4ee1773499
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
@@ -0,0 +1,369 @@
+const crypto = require('node:crypto')
+const PackageJson = require('@npmcli/package-json')
+const pickManifest = require('npm-pick-manifest')
+const ssri = require('ssri')
+const npa = require('npm-package-arg')
+const sigstore = require('sigstore')
+const fetch = require('npm-registry-fetch')
+const Fetcher = require('./fetcher.js')
+const RemoteFetcher = require('./remote.js')
+const pacoteVersion = require('../package.json').version
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+const _ = require('./util/protected.js')
+
+// Corgis are cute. 🐕🐶
+const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
+const fullDoc = 'application/json'
+
+// Some really old packages have no time field in their packument so we need a
+// cutoff date.
+const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z'
+
+class RegistryFetcher extends Fetcher {
+ #cacheKey
+ constructor (spec, opts) {
+ super(spec, opts)
+
+ // you usually don't want to fetch the same packument multiple times in
+ // the span of a given script or command, no matter how many pacote calls
+ // are made, so this lets us avoid doing that. It's only relevant for
+ // registry fetchers, because other types simulate their packument from
+ // the manifest, which they memoize on this.package, so it's very cheap
+ // already.
+ this.packumentCache = this.opts.packumentCache || null
+
+ this.registry = fetch.pickRegistry(spec, opts)
+ this.packumentUrl = `${removeTrailingSlashes(this.registry)}/${this.spec.escapedName}`
+ this.#cacheKey = `${this.fullMetadata ? 'full' : 'corgi'}:${this.packumentUrl}`
+
+ const parsed = new URL(this.registry)
+ const regKey = `//${parsed.host}${parsed.pathname}`
+ // unlike the nerf-darted auth keys, this one does *not* allow a mismatch
+ // of trailing slashes. It must match exactly.
+ if (this.opts[`${regKey}:_keys`]) {
+ this.registryKeys = this.opts[`${regKey}:_keys`]
+ }
+
+ // XXX pacote <=9 has some logic to ignore opts.resolved if
+ // the resolved URL doesn't go to the same registry.
+ // Consider reproducing that here, to throw away this.resolved
+ // in that case.
+ }
+
+ async resolve () {
+ // fetching the manifest sets resolved and (if present) integrity
+ await this.manifest()
+ if (!this.resolved) {
+ throw Object.assign(
+ new Error('Invalid package manifest: no `dist.tarball` field'),
+ { package: this.spec.toString() }
+ )
+ }
+ return this.resolved
+ }
+
+ #headers () {
+ return {
+ // npm will override UA, but ensure that we always send *something*
+ 'user-agent': this.opts.userAgent ||
+ `pacote/${pacoteVersion} node/${process.version}`,
+ ...(this.opts.headers || {}),
+ 'pacote-version': pacoteVersion,
+ 'pacote-req-type': 'packument',
+ 'pacote-pkg-id': `registry:${this.spec.name}`,
+ accept: this.fullMetadata ? fullDoc : corgiDoc,
+ }
+ }
+
+ async packument () {
+ // note this might be either an in-flight promise for a request,
+ // or the actual packument, but we never want to make more than
+ // one request at a time for the same thing regardless.
+ if (this.packumentCache?.has(this.#cacheKey)) {
+ return this.packumentCache.get(this.#cacheKey)
+ }
+
+ // npm-registry-fetch the packument
+ // set the appropriate header for corgis if fullMetadata isn't set
+ // return the res.json() promise
+ try {
+ const res = await fetch(this.packumentUrl, {
+ ...this.opts,
+ headers: this.#headers(),
+ spec: this.spec,
+
+ // never check integrity for packuments themselves
+ integrity: null,
+ })
+ const packument = await res.json()
+ const contentLength = res.headers.get('content-length')
+ if (contentLength) {
+ packument._contentLength = Number(contentLength)
+ }
+ this.packumentCache?.set(this.#cacheKey, packument)
+ return packument
+ } catch (err) {
+ this.packumentCache?.delete(this.#cacheKey)
+ if (err.code !== 'E404' || this.fullMetadata) {
+ throw err
+ }
+ // possible that corgis are not supported by this registry
+ this.fullMetadata = true
+ return this.packument()
+ }
+ }
+
+ async manifest () {
+ if (this.package) {
+ return this.package
+ }
+
+ // When verifying signatures, we need to fetch the full/uncompressed
+ // packument to get publish time as this is not included in the
+ // corgi/compressed packument.
+ if (this.opts.verifySignatures) {
+ this.fullMetadata = true
+ }
+
+ const packument = await this.packument()
+ const steps = PackageJson.normalizeSteps.filter(s => s !== '_attributes')
+ const mani = await new PackageJson().fromContent(pickManifest(packument, this.spec.fetchSpec, {
+ ...this.opts,
+ defaultTag: this.defaultTag,
+ before: this.before,
+ })).normalize({ steps }).then(p => p.content)
+
+ /* XXX add ETARGET and E403 revalidation of cached packuments here */
+
+ // add _time from packument if fetched with fullMetadata
+ const time = packument.time?.[mani.version]
+ if (time) {
+ mani._time = time
+ }
+
+ // add _resolved and _integrity from dist object
+ const { dist } = mani
+ if (dist) {
+ this.resolved = mani._resolved = dist.tarball
+ mani._from = this.from
+ const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
+ : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
+ : null
+ if (distIntegrity) {
+ if (this.integrity && !this.integrity.match(distIntegrity)) {
+ // only bork if they have algos in common.
+ // otherwise we end up breaking if we have saved a sha512
+ // previously for the tarball, but the manifest only
+ // provides a sha1, which is possible for older publishes.
+ // Otherwise, this is almost certainly a case of holding it
+ // wrong, and will result in weird or insecure behavior
+ // later on when building package tree.
+ for (const algo of Object.keys(this.integrity)) {
+ if (distIntegrity[algo]) {
+ throw Object.assign(new Error(
+ `Integrity checksum failed when using ${algo}: ` +
+ `wanted ${this.integrity} but got ${distIntegrity}.`
+ ), { code: 'EINTEGRITY' })
+ }
+ }
+ }
+ // made it this far, the integrity is worthwhile. accept it.
+ // the setter here will take care of merging it into what we already
+ // had.
+ this.integrity = distIntegrity
+ }
+ }
+ if (this.integrity) {
+ mani._integrity = String(this.integrity)
+ if (dist.signatures) {
+ if (this.opts.verifySignatures) {
+ // validate and throw on error, then set _signatures
+ const message = `${mani._id}:${mani._integrity}`
+ for (const signature of dist.signatures) {
+ const publicKey = this.registryKeys &&
+ this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
+ if (!publicKey) {
+ throw Object.assign(new Error(
+ `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+ 'but no corresponding public key can be found'
+ ), { code: 'EMISSINGSIGNATUREKEY' })
+ }
+
+ const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF)
+ const validPublicKey = !publicKey.expires ||
+ publishedTime < Date.parse(publicKey.expires)
+ if (!validPublicKey) {
+ throw Object.assign(new Error(
+ `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+ `but the corresponding public key has expired ${publicKey.expires}`
+ ), { code: 'EEXPIREDSIGNATUREKEY' })
+ }
+ const verifier = crypto.createVerify('SHA256')
+ verifier.write(message)
+ verifier.end()
+ const valid = verifier.verify(
+ publicKey.pemkey,
+ signature.sig,
+ 'base64'
+ )
+ if (!valid) {
+ throw Object.assign(new Error(
+ `${mani._id} has an invalid registry signature with ` +
+ `keyid: ${publicKey.keyid} and signature: ${signature.sig}`
+ ), {
+ code: 'EINTEGRITYSIGNATURE',
+ keyid: publicKey.keyid,
+ signature: signature.sig,
+ resolved: mani._resolved,
+ integrity: mani._integrity,
+ })
+ }
+ }
+ mani._signatures = dist.signatures
+ } else {
+ mani._signatures = dist.signatures
+ }
+ }
+
+ if (dist.attestations) {
+ if (this.opts.verifyAttestations) {
+ // Always fetch attestations from the current registry host
+ const attestationsPath = new URL(dist.attestations.url).pathname
+ const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
+ const res = await fetch(attestationsUrl, {
+ ...this.opts,
+ // disable integrity check for attestations json payload, we check the
+ // integrity in the verification steps below
+ integrity: null,
+ })
+ const { attestations } = await res.json()
+ const bundles = attestations.map(({ predicateType, bundle }) => {
+ const statement = JSON.parse(
+ Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
+ )
+ const keyid = bundle.dsseEnvelope.signatures[0].keyid
+ const signature = bundle.dsseEnvelope.signatures[0].sig
+
+ return {
+ predicateType,
+ bundle,
+ statement,
+ keyid,
+ signature,
+ }
+ })
+
+ const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
+ const attestationRegistryKeys = (this.registryKeys || [])
+ .filter(key => attestationKeyIds.includes(key.keyid))
+ if (!attestationRegistryKeys.length) {
+ throw Object.assign(new Error(
+ `${mani._id} has attestations but no corresponding public key(s) can be found`
+ ), { code: 'EMISSINGSIGNATUREKEY' })
+ }
+
+ for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
+ const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
+ // Publish attestations have a keyid set and a valid public key must be found
+ if (keyid) {
+ if (!publicKey) {
+ throw Object.assign(new Error(
+ `${mani._id} has attestations with keyid: ${keyid} ` +
+ 'but no corresponding public key can be found'
+ ), { code: 'EMISSINGSIGNATUREKEY' })
+ }
+
+ const integratedTime = new Date(
+ Number(
+ bundle.verificationMaterial.tlogEntries[0].integratedTime
+ ) * 1000
+ )
+ const validPublicKey = !publicKey.expires ||
+ (integratedTime < Date.parse(publicKey.expires))
+ if (!validPublicKey) {
+ throw Object.assign(new Error(
+ `${mani._id} has attestations with keyid: ${keyid} ` +
+ `but the corresponding public key has expired ${publicKey.expires}`
+ ), { code: 'EEXPIREDSIGNATUREKEY' })
+ }
+ }
+
+ const subject = {
+ name: statement.subject[0].name,
+ sha512: statement.subject[0].digest.sha512,
+ }
+
+ // Only type 'version' can be turned into a PURL
+ const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
+ // Verify the statement subject matches the package, version
+ if (subject.name !== purl) {
+ throw Object.assign(new Error(
+ `${mani._id} package name and version (PURL): ${purl} ` +
+ `doesn't match what was signed: ${subject.name}`
+ ), { code: 'EATTESTATIONSUBJECT' })
+ }
+
+ // Verify the statement subject matches the tarball integrity
+ const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
+ if (subject.sha512 !== integrityHexDigest) {
+ throw Object.assign(new Error(
+ `${mani._id} package integrity (hex digest): ` +
+ `${integrityHexDigest} ` +
+ `doesn't match what was signed: ${subject.sha512}`
+ ), { code: 'EATTESTATIONSUBJECT' })
+ }
+
+ try {
+ // Provenance attestations are signed with a signing certificate
+ // (including the key) so we don't need to return a public key.
+ //
+ // Publish attestations are signed with a keyid so we need to
+ // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
+ const options = {
+ tufCachePath: this.tufCache,
+ tufForceCache: true,
+ keySelector: publicKey ? () => publicKey.pemkey : undefined,
+ }
+ await sigstore.verify(bundle, options)
+ } catch (e) {
+ throw Object.assign(new Error(
+ `${mani._id} failed to verify attestation: ${e.message}`
+ ), {
+ code: 'EATTESTATIONVERIFY',
+ predicateType,
+ keyid,
+ signature,
+ resolved: mani._resolved,
+ integrity: mani._integrity,
+ })
+ }
+ }
+ mani._attestations = dist.attestations
+ } else {
+ mani._attestations = dist.attestations
+ }
+ }
+ }
+
+ this.package = mani
+ return this.package
+ }
+
+ [_.tarballFromResolved] () {
+ // we use a RemoteFetcher to get the actual tarball stream
+ return new RemoteFetcher(this.resolved, {
+ ...this.opts,
+ resolved: this.resolved,
+ pkgid: `registry:${this.spec.name}@${this.resolved}`,
+ })[_.tarballFromResolved]()
+ }
+
+ get types () {
+ return [
+ 'tag',
+ 'version',
+ 'range',
+ ]
+ }
+}
+module.exports = RegistryFetcher
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
new file mode 100644
index 00000000000000..bd321e65a1f18a
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
@@ -0,0 +1,89 @@
+const fetch = require('npm-registry-fetch')
+const { Minipass } = require('minipass')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _ = require('./util/protected.js')
+const pacoteVersion = require('../package.json').version
+
+class RemoteFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ this.resolved = this.spec.fetchSpec
+ const resolvedURL = new URL(this.resolved)
+ if (this.replaceRegistryHost !== 'never'
+ && (this.replaceRegistryHost === 'always'
+ || this.replaceRegistryHost === resolvedURL.host)) {
+ this.resolved = new URL(resolvedURL.pathname, this.registry).href
+ }
+
+ // nam is a fermented pork sausage that is good to eat
+ const nameat = this.spec.name ? `${this.spec.name}@` : ''
+ this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
+ }
+
+ // Don't need to cache tarball fetches in pacote, because make-fetch-happen
+ // will write into cacache anyway.
+ get [_.cacheFetches] () {
+ return false
+ }
+
+ [_.tarballFromResolved] () {
+ const stream = new Minipass()
+ stream.hasIntegrityEmitter = true
+
+ const fetchOpts = {
+ ...this.opts,
+ headers: this.#headers(),
+ spec: this.spec,
+ integrity: this.integrity,
+ algorithms: [this.pickIntegrityAlgorithm()],
+ }
+
+ // eslint-disable-next-line promise/always-return
+ fetch(this.resolved, fetchOpts).then(res => {
+ res.body.on('error',
+ /* istanbul ignore next - exceedingly rare and hard to simulate */
+ er => stream.emit('error', er)
+ )
+
+ res.body.on('integrity', i => {
+ this.integrity = i
+ stream.emit('integrity', i)
+ })
+
+ res.body.pipe(stream)
+ }).catch(er => stream.emit('error', er))
+
+ return stream
+ }
+
+ #headers () {
+ return {
+ // npm will override this, but ensure that we always send *something*
+ 'user-agent': this.opts.userAgent ||
+ `pacote/${pacoteVersion} node/${process.version}`,
+ ...(this.opts.headers || {}),
+ 'pacote-version': pacoteVersion,
+ 'pacote-req-type': 'tarball',
+ 'pacote-pkg-id': this.pkgid,
+ ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
+ : {}),
+ ...(this.opts.headers || {}),
+ }
+ }
+
+ get types () {
+ return ['remote']
+ }
+
+ // getting a packument and/or manifest is the same as with a file: spec.
+ // unpack the tarball stream, and then read from the package.json file.
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+
+ manifest () {
+ return FileFetcher.prototype.manifest.apply(this)
+ }
+}
+module.exports = RemoteFetcher
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
new file mode 100644
index 00000000000000..843fe5b600cafa
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
@@ -0,0 +1,15 @@
+// add a sha to a git remote url spec
+const addGitSha = (spec, sha) => {
+ if (spec.hosted) {
+ const h = spec.hosted
+ const opt = { noCommittish: true }
+ const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
+
+ return `${base}#${sha}`
+ } else {
+ // don't use new URL for this, because it doesn't handle scp urls
+ return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
+ }
+}
+
+module.exports = addGitSha
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
new file mode 100644
index 00000000000000..ba5683a7bb5bf3
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
@@ -0,0 +1,15 @@
+const { resolve } = require('node:path')
+const { tmpdir, homedir } = require('node:os')
+
+module.exports = (fakePlatform = false) => {
+ const temp = tmpdir()
+ const uidOrPid = process.getuid ? process.getuid() : process.pid
+ const home = homedir() || resolve(temp, 'npm-' + uidOrPid)
+ const platform = fakePlatform || process.platform
+ const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
+ const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
+ return {
+ cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
+ tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
+ }
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
new file mode 100644
index 00000000000000..49a3f73f537ce9
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
@@ -0,0 +1,25 @@
+// Function to determine whether a path is in the package.bin set.
+// Used to prevent issues when people publish a package from a
+// windows machine, and then install with --no-bin-links.
+//
+// Note: this is not possible in remote or file fetchers, since
+// we don't have the manifest until AFTER we've unpacked. But the
+// main use case is registry fetching with git a distant second,
+// so that's an acceptable edge case to not handle.
+
+const binObj = (name, bin) =>
+ typeof bin === 'string' ? { [name]: bin } : bin
+
+const hasBin = (pkg, path) => {
+ const bin = binObj(pkg.name, pkg.bin)
+ const p = path.replace(/^[^\\/]*\//, '')
+ for (const kv of Object.entries(bin)) {
+ if (kv[1] === p) {
+ return true
+ }
+ }
+ return false
+}
+
+module.exports = (pkg, path) =>
+ pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
new file mode 100644
index 00000000000000..a3005c255565fb
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
@@ -0,0 +1,14 @@
+// run an npm command
+const spawn = require('@npmcli/promise-spawn')
+
+module.exports = (npmBin, npmCommand, cwd, env, extra) => {
+ const isJS = npmBin.endsWith('.js')
+ const cmd = isJS ? process.execPath : npmBin
+ const args = (isJS ? [npmBin] : []).concat(npmCommand)
+ // when installing to run the `prepare` script for a git dep, we need
+ // to ensure that we don't run into a cycle of checking out packages
+ // in temp directories. this lets us link previously-seen repos that
+ // are also being prepared.
+
+ return spawn(cmd, args, { cwd, env }, extra)
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js
new file mode 100644
index 00000000000000..e05203b481e6aa
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/protected.js
@@ -0,0 +1,5 @@
+module.exports = {
+ cacheFetches: Symbol.for('pacote.Fetcher._cacheFetches'),
+ readPackageJson: Symbol.for('package.Fetcher._readPackageJson'),
+ tarballFromResolved: Symbol.for('pacote.Fetcher._tarballFromResolved'),
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
new file mode 100644
index 00000000000000..d070f0f7ba2d4e
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
@@ -0,0 +1,31 @@
+const isPackageBin = require('./is-package-bin.js')
+
+const tarCreateOptions = manifest => ({
+ cwd: manifest._resolved,
+ prefix: 'package/',
+ portable: true,
+ gzip: {
+ // forcing the level to 9 seems to avoid some
+ // platform specific optimizations that cause
+ // integrity mismatch errors due to differing
+ // end results after compression
+ level: 9,
+ },
+
+ // ensure that package bins are always executable
+ // Note that npm-packlist is already filtering out
+ // anything that is not a regular file, ignored by
+ // .npmignore or package.json "files", etc.
+ filter: (path, stat) => {
+ if (isPackageBin(manifest, path)) {
+ stat.mode |= 0o111
+ }
+ return true
+ },
+
+ // Provide a specific date in the 1980s for the benefit of zip,
+ // which is confounded by files dated at the Unix epoch 0.
+ mtime: new Date('1985-10-26T08:15:00.000Z'),
+})
+
+module.exports = tarCreateOptions
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
new file mode 100644
index 00000000000000..c50cb6173b92eb
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
@@ -0,0 +1,10 @@
+const removeTrailingSlashes = (input) => {
+ // in order to avoid regexp redos detection
+ let output = input
+ while (output.endsWith('/')) {
+ output = output.slice(0, -1)
+ }
+ return output
+}
+
+module.exports = removeTrailingSlashes
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
new file mode 100644
index 00000000000000..335c7a6c87bd3c
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
@@ -0,0 +1,79 @@
+{
+ "name": "pacote",
+ "version": "20.0.0",
+ "description": "JavaScript package downloader",
+ "author": "GitHub Inc.",
+ "bin": {
+ "pacote": "bin/index.js"
+ },
+ "license": "ISC",
+ "main": "lib/index.js",
+ "scripts": {
+ "test": "tap",
+ "snap": "tap",
+ "lint": "npm run eslint",
+ "postlint": "template-oss-check",
+ "lintfix": "npm run eslint -- --fix",
+ "posttest": "npm run lint",
+ "template-oss-apply": "template-oss-apply --force",
+ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+ },
+ "tap": {
+ "timeout": 300,
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ]
+ },
+ "devDependencies": {
+ "@npmcli/arborist": "^7.1.0",
+ "@npmcli/eslint-config": "^5.0.0",
+ "@npmcli/template-oss": "4.23.3",
+ "hosted-git-info": "^8.0.0",
+ "mutate-fs": "^2.1.1",
+ "nock": "^13.2.4",
+ "npm-registry-mock": "^1.3.2",
+ "tap": "^16.0.1"
+ },
+ "files": [
+ "bin/",
+ "lib/"
+ ],
+ "keywords": [
+ "packages",
+ "npm",
+ "git"
+ ],
+ "dependencies": {
+ "@npmcli/git": "^6.0.0",
+ "@npmcli/installed-package-contents": "^3.0.0",
+ "@npmcli/package-json": "^6.0.0",
+ "@npmcli/promise-spawn": "^8.0.0",
+ "@npmcli/run-script": "^9.0.0",
+ "cacache": "^19.0.0",
+ "fs-minipass": "^3.0.0",
+ "minipass": "^7.0.2",
+ "npm-package-arg": "^12.0.0",
+ "npm-packlist": "^9.0.0",
+ "npm-pick-manifest": "^10.0.0",
+ "npm-registry-fetch": "^18.0.0",
+ "proc-log": "^5.0.0",
+ "promise-retry": "^2.0.1",
+ "sigstore": "^3.0.0",
+ "ssri": "^12.0.0",
+ "tar": "^6.1.11"
+ },
+ "engines": {
+ "node": "^18.17.0 || >=20.5.0"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/pacote.git"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.23.3",
+ "windowsCI": false,
+ "publish": "true"
+ }
+}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
index d4c3cf54d83ea7..df0b8f2f4faf1c 100644
--- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/metavuln-calculator",
- "version": "8.0.0",
+ "version": "8.0.1",
"main": "lib/index.js",
"files": [
"bin/",
@@ -41,7 +41,7 @@
"dependencies": {
"cacache": "^19.0.0",
"json-parse-even-better-errors": "^4.0.0",
- "pacote": "^19.0.0",
+ "pacote": "^20.0.0",
"proc-log": "^5.0.0",
"semver": "^7.3.5"
},
diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
index e147cb8f9c746f..aa7b55d8f038d4 100644
--- a/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -131,9 +131,19 @@ const open = (_args, opts = {}, extra = {}) => {
let platform = process.platform
// process.platform === 'linux' may actually indicate WSL, if that's the case
- // we want to treat things as win32 anyway so the host can open the argument
+ // open the argument with sensible-browser which is pre-installed
+ // In WSL, set the default browser using, for example,
+ // export BROWSER="/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe"
+ // or
+ // export BROWSER="/mnt/c/Program Files (x86)/Microsoft/Edge/Application/msedge.exe"
+ // To permanently set the default browser, add the appropriate entry to your shell's
+ // RC file, e.g. .bashrc or .zshrc.
if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
- platform = 'win32'
+ platform = 'wsl'
+ if (!process.env.BROWSER) {
+ return Promise.reject(
+ new Error('Set the BROWSER environment variable to your desired browser.'))
+ }
}
let command = options.command
@@ -146,6 +156,8 @@ const open = (_args, opts = {}, extra = {}) => {
// accidentally interpret the first arg as the title, we stick an empty
// string immediately after the start command
command = 'start ""'
+ } else if (platform === 'wsl') {
+ command = 'sensible-browser'
} else if (platform === 'darwin') {
command = 'open'
} else {
diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
index 9914063f85156d..f5fb026be50e85 100644
--- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json
+++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/promise-spawn",
- "version": "8.0.1",
+ "version": "8.0.2",
"files": [
"bin/",
"lib/"
@@ -33,7 +33,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.4",
"spawk": "^1.7.1",
"tap": "^16.0.1"
},
@@ -42,7 +42,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.4",
"publish": true
},
"dependencies": {
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js
deleted file mode 100644
index c541b93001517e..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const net = require('net')
-const tls = require('tls')
-const { once } = require('events')
-const timers = require('timers/promises')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
-const Errors = require('./errors.js')
-const { Agent: AgentBase } = require('agent-base')
-
-module.exports = class Agent extends AgentBase {
- #options
- #timeouts
- #proxy
- #noProxy
- #ProxyAgent
-
- constructor (options = {}) {
- const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
-
- super(normalizedOptions)
-
- this.#options = normalizedOptions
- this.#timeouts = timeouts
-
- if (proxy) {
- this.#proxy = new URL(proxy)
- this.#noProxy = noProxy
- this.#ProxyAgent = getProxyAgent(proxy)
- }
- }
-
- get proxy () {
- return this.#proxy ? { url: this.#proxy } : {}
- }
-
- #getProxy (options) {
- if (!this.#proxy) {
- return
- }
-
- const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
- proxy: this.#proxy,
- noProxy: this.#noProxy,
- })
-
- if (!proxy) {
- return
- }
-
- const cacheKey = cacheOptions({
- ...options,
- ...this.#options,
- timeouts: this.#timeouts,
- proxy,
- })
-
- if (proxyCache.has(cacheKey)) {
- return proxyCache.get(cacheKey)
- }
-
- let ProxyAgent = this.#ProxyAgent
- if (Array.isArray(ProxyAgent)) {
- ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
- }
-
- const proxyAgent = new ProxyAgent(proxy, {
- ...this.#options,
- socketOptions: { family: this.#options.family },
- })
- proxyCache.set(cacheKey, proxyAgent)
-
- return proxyAgent
- }
-
- // takes an array of promises and races them against the connection timeout
- // which will throw the necessary error if it is hit. This will return the
- // result of the promise race.
- async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
- if (timeout) {
- const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
- .then(() => {
- throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
- }).catch((err) => {
- if (err.name === 'AbortError') {
- return
- }
- throw err
- })
- promises.push(connectionTimeout)
- }
-
- let result
- try {
- result = await Promise.race(promises)
- ac.abort()
- } catch (err) {
- ac.abort()
- throw err
- }
- return result
- }
-
- async connect (request, options) {
- // if the connection does not have its own lookup function
- // set, then use the one from our options
- options.lookup ??= this.#options.lookup
-
- let socket
- let timeout = this.#timeouts.connection
- const isSecureEndpoint = this.isSecureEndpoint(options)
-
- const proxy = this.#getProxy(options)
- if (proxy) {
- // some of the proxies will wait for the socket to fully connect before
- // returning so we have to await this while also racing it against the
- // connection timeout.
- const start = Date.now()
- socket = await this.#timeoutConnection({
- options,
- timeout,
- promises: [proxy.connect(request, options)],
- })
- // see how much time proxy.connect took and subtract it from
- // the timeout
- if (timeout) {
- timeout = timeout - (Date.now() - start)
- }
- } else {
- socket = (isSecureEndpoint ? tls : net).connect(options)
- }
-
- socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
- socket.setNoDelay(this.keepAlive)
-
- const abortController = new AbortController()
- const { signal } = abortController
-
- const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
- ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
- : Promise.resolve()
-
- await this.#timeoutConnection({
- options,
- timeout,
- promises: [
- connectPromise,
- once(socket, 'error', { signal }).then((err) => {
- throw err[0]
- }),
- ],
- }, abortController)
-
- if (this.#timeouts.idle) {
- socket.setTimeout(this.#timeouts.idle, () => {
- socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
- })
- }
-
- return socket
- }
-
- addRequest (request, options) {
- const proxy = this.#getProxy(options)
- // it would be better to call proxy.addRequest here but this causes the
- // http-proxy-agent to call its super.addRequest which causes the request
- // to be added to the agent twice. since we only support 3 agents
- // currently (see the required agents in proxy.js) we have manually
- // checked that the only public methods we need to call are called in the
- // next block. this could change in the future and presumably we would get
- // failing tests until we have properly called the necessary methods on
- // each of our proxy agents
- if (proxy?.setRequestProps) {
- proxy.setRequestProps(request, options)
- }
-
- request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
-
- if (this.#timeouts.response) {
- let responseTimeout
- request.once('finish', () => {
- setTimeout(() => {
- request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
- }, this.#timeouts.response)
- })
- request.once('response', () => {
- clearTimeout(responseTimeout)
- })
- }
-
- if (this.#timeouts.transfer) {
- let transferTimeout
- request.once('response', (res) => {
- setTimeout(() => {
- res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
- }, this.#timeouts.transfer)
- res.once('close', () => {
- clearTimeout(transferTimeout)
- })
- })
- }
-
- return super.addRequest(request, options)
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js
deleted file mode 100644
index 3c6946c566d736..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js
+++ /dev/null
@@ -1,53 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const dns = require('dns')
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-const cache = new LRUCache({ max: 50 })
-
-const getOptions = ({
- family = 0,
- hints = dns.ADDRCONFIG,
- all = false,
- verbatim = undefined,
- ttl = 5 * 60 * 1000,
- lookup = dns.lookup,
-}) => ({
- // hints and lookup are returned since both are top level properties to (net|tls).connect
- hints,
- lookup: (hostname, ...args) => {
- const callback = args.pop() // callback is always last arg
- const lookupOptions = args[0] ?? {}
-
- const options = {
- family,
- hints,
- all,
- verbatim,
- ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
- }
-
- const key = JSON.stringify({ hostname, ...options })
-
- if (cache.has(key)) {
- const cached = cache.get(key)
- return process.nextTick(callback, null, ...cached)
- }
-
- lookup(hostname, options, (err, ...result) => {
- if (err) {
- return callback(err)
- }
-
- cache.set(key, result, { ttl })
- return callback(null, ...result)
- })
- },
-})
-
-module.exports = {
- cache,
- getOptions,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js
deleted file mode 100644
index 70475aec8eb357..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js
+++ /dev/null
@@ -1,61 +0,0 @@
-'use strict'
-
-class InvalidProxyProtocolError extends Error {
- constructor (url) {
- super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
- this.code = 'EINVALIDPROXY'
- this.proxy = url
- }
-}
-
-class ConnectionTimeoutError extends Error {
- constructor (host) {
- super(`Timeout connecting to host \`${host}\``)
- this.code = 'ECONNECTIONTIMEOUT'
- this.host = host
- }
-}
-
-class IdleTimeoutError extends Error {
- constructor (host) {
- super(`Idle timeout reached for host \`${host}\``)
- this.code = 'EIDLETIMEOUT'
- this.host = host
- }
-}
-
-class ResponseTimeoutError extends Error {
- constructor (request, proxy) {
- let msg = 'Response timeout '
- if (proxy) {
- msg += `from proxy \`${proxy.host}\` `
- }
- msg += `connecting to host \`${request.host}\``
- super(msg)
- this.code = 'ERESPONSETIMEOUT'
- this.proxy = proxy
- this.request = request
- }
-}
-
-class TransferTimeoutError extends Error {
- constructor (request, proxy) {
- let msg = 'Transfer timeout '
- if (proxy) {
- msg += `from proxy \`${proxy.host}\` `
- }
- msg += `for \`${request.host}\``
- super(msg)
- this.code = 'ETRANSFERTIMEOUT'
- this.proxy = proxy
- this.request = request
- }
-}
-
-module.exports = {
- InvalidProxyProtocolError,
- ConnectionTimeoutError,
- IdleTimeoutError,
- ResponseTimeoutError,
- TransferTimeoutError,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js
deleted file mode 100644
index b33d6eaef07a21..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js
+++ /dev/null
@@ -1,56 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, proxyCache } = require('./proxy.js')
-const dns = require('./dns.js')
-const Agent = require('./agents.js')
-
-const agentCache = new LRUCache({ max: 20 })
-
-const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
- // false has meaning so this can't be a simple truthiness check
- if (agent != null) {
- return agent
- }
-
- url = new URL(url)
-
- const proxyForUrl = getProxy(url, { proxy, noProxy })
- const normalizedOptions = {
- ...normalizeOptions(options),
- proxy: proxyForUrl,
- }
-
- const cacheKey = cacheOptions({
- ...normalizedOptions,
- secureEndpoint: url.protocol === 'https:',
- })
-
- if (agentCache.has(cacheKey)) {
- return agentCache.get(cacheKey)
- }
-
- const newAgent = new Agent(normalizedOptions)
- agentCache.set(cacheKey, newAgent)
-
- return newAgent
-}
-
-module.exports = {
- getAgent,
- Agent,
- // these are exported for backwards compatability
- HttpAgent: Agent,
- HttpsAgent: Agent,
- cache: {
- proxy: proxyCache,
- agent: agentCache,
- dns: dns.cache,
- clear: () => {
- proxyCache.clear()
- agentCache.clear()
- dns.cache.clear()
- },
- },
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js
deleted file mode 100644
index 0bf53f725f0846..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-const dns = require('./dns')
-
-const normalizeOptions = (opts) => {
- const family = parseInt(opts.family ?? '0', 10)
- const keepAlive = opts.keepAlive ?? true
-
- const normalized = {
- // nodejs http agent options. these are all the defaults
- // but kept here to increase the likelihood of cache hits
- // https://nodejs.org/api/http.html#new-agentoptions
- keepAliveMsecs: keepAlive ? 1000 : undefined,
- maxSockets: opts.maxSockets ?? 15,
- maxTotalSockets: Infinity,
- maxFreeSockets: keepAlive ? 256 : undefined,
- scheduling: 'fifo',
- // then spread the rest of the options
- ...opts,
- // we already set these to their defaults that we want
- family,
- keepAlive,
- // our custom timeout options
- timeouts: {
- // the standard timeout option is mapped to our idle timeout
- // and then deleted below
- idle: opts.timeout ?? 0,
- connection: 0,
- response: 0,
- transfer: 0,
- ...opts.timeouts,
- },
- // get the dns options that go at the top level of socket connection
- ...dns.getOptions({ family, ...opts.dns }),
- }
-
- // remove timeout since we already used it to set our own idle timeout
- delete normalized.timeout
-
- return normalized
-}
-
-const createKey = (obj) => {
- let key = ''
- const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
- for (let [k, v] of sorted) {
- if (v == null) {
- v = 'null'
- } else if (v instanceof URL) {
- v = v.toString()
- } else if (typeof v === 'object') {
- v = createKey(v)
- }
- key += `${k}:${v}:`
- }
- return key
-}
-
-const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
- secureEndpoint: !!secureEndpoint,
- // socket connect options
- family: options.family,
- hints: options.hints,
- localAddress: options.localAddress,
- // tls specific connect options
- strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
- ca: secureEndpoint ? options.ca : null,
- cert: secureEndpoint ? options.cert : null,
- key: secureEndpoint ? options.key : null,
- // http agent options
- keepAlive: options.keepAlive,
- keepAliveMsecs: options.keepAliveMsecs,
- maxSockets: options.maxSockets,
- maxTotalSockets: options.maxTotalSockets,
- maxFreeSockets: options.maxFreeSockets,
- scheduling: options.scheduling,
- // timeout options
- timeouts: options.timeouts,
- // proxy
- proxy: options.proxy,
-})
-
-module.exports = {
- normalizeOptions,
- cacheOptions,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js
deleted file mode 100644
index 6272e929e57bcf..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js
+++ /dev/null
@@ -1,88 +0,0 @@
-'use strict'
-
-const { HttpProxyAgent } = require('http-proxy-agent')
-const { HttpsProxyAgent } = require('https-proxy-agent')
-const { SocksProxyAgent } = require('socks-proxy-agent')
-const { LRUCache } = require('lru-cache')
-const { InvalidProxyProtocolError } = require('./errors.js')
-
-const PROXY_CACHE = new LRUCache({ max: 20 })
-
-const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
-
-const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
-
-const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
- key = key.toLowerCase()
- if (PROXY_ENV_KEYS.has(key)) {
- acc[key] = value
- }
- return acc
-}, {})
-
-const getProxyAgent = (url) => {
- url = new URL(url)
-
- const protocol = url.protocol.slice(0, -1)
- if (SOCKS_PROTOCOLS.has(protocol)) {
- return SocksProxyAgent
- }
- if (protocol === 'https' || protocol === 'http') {
- return [HttpProxyAgent, HttpsProxyAgent]
- }
-
- throw new InvalidProxyProtocolError(url)
-}
-
-const isNoProxy = (url, noProxy) => {
- if (typeof noProxy === 'string') {
- noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
- }
-
- if (!noProxy || !noProxy.length) {
- return false
- }
-
- const hostSegments = url.hostname.split('.').reverse()
-
- return noProxy.some((no) => {
- const noSegments = no.split('.').filter(Boolean).reverse()
- if (!noSegments.length) {
- return false
- }
-
- for (let i = 0; i < noSegments.length; i++) {
- if (hostSegments[i] !== noSegments[i]) {
- return false
- }
- }
-
- return true
- })
-}
-
-const getProxy = (url, { proxy, noProxy }) => {
- url = new URL(url)
-
- if (!proxy) {
- proxy = url.protocol === 'https:'
- ? PROXY_ENV.https_proxy
- : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
- }
-
- if (!noProxy) {
- noProxy = PROXY_ENV.no_proxy
- }
-
- if (!proxy || isNoProxy(url, noProxy)) {
- return null
- }
-
- return new URL(proxy)
-}
-
-module.exports = {
- getProxyAgent,
- getProxy,
- proxyCache: PROXY_CACHE,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json
deleted file mode 100644
index ef5b4e3228cc46..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "name": "@npmcli/agent",
- "version": "2.2.2",
- "description": "the http/https agent used by the npm cli",
- "main": "lib/index.js",
- "scripts": {
- "gencerts": "bash scripts/create-cert.sh",
- "test": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "author": "GitHub Inc.",
- "license": "ISC",
- "bugs": {
- "url": "https://github.com/npm/agent/issues"
- },
- "homepage": "https://github.com/npm/agent#readme",
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.3",
- "publish": "true"
- },
- "dependencies": {
- "agent-base": "^7.1.0",
- "http-proxy-agent": "^7.0.0",
- "https-proxy-agent": "^7.0.1",
- "lru-cache": "^10.0.1",
- "socks-proxy-agent": "^8.0.3"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.3",
- "minipass-fetch": "^3.0.3",
- "nock": "^13.2.7",
- "semver": "^7.5.4",
- "simple-socks": "^3.1.0",
- "tap": "^16.3.0"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/agent.git"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md
deleted file mode 100644
index 5fc208ff122e08..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-ISC License
-
-Copyright npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js
deleted file mode 100644
index cb5982f79077ac..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// given an input that may or may not be an object, return an object that has
-// a copy of every defined property listed in 'copy'. if the input is not an
-// object, assign it to the property named by 'wrap'
-const getOptions = (input, { copy, wrap }) => {
- const result = {}
-
- if (input && typeof input === 'object') {
- for (const prop of copy) {
- if (input[prop] !== undefined) {
- result[prop] = input[prop]
- }
- }
- } else {
- result[wrap] = input
- }
-
- return result
-}
-
-module.exports = getOptions
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js
deleted file mode 100644
index 4d13bc037359d7..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js
+++ /dev/null
@@ -1,9 +0,0 @@
-const semver = require('semver')
-
-const satisfies = (range) => {
- return semver.satisfies(process.version, range, { includePrerelease: true })
-}
-
-module.exports = {
- satisfies,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE
deleted file mode 100644
index 93546dfb7655bf..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2011-2017 JP Richardson
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
-(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
- merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
- ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js
deleted file mode 100644
index 1cd1e05d0c533d..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js
+++ /dev/null
@@ -1,129 +0,0 @@
-'use strict'
-const { inspect } = require('util')
-
-// adapted from node's internal/errors
-// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
-
-// close copy of node's internal SystemError class.
-class SystemError {
- constructor (code, prefix, context) {
- // XXX context.code is undefined in all constructors used in cp/polyfill
- // that may be a bug copied from node, maybe the constructor should use
- // `code` not `errno`? nodejs/node#41104
- let message = `${prefix}: ${context.syscall} returned ` +
- `${context.code} (${context.message})`
-
- if (context.path !== undefined) {
- message += ` ${context.path}`
- }
- if (context.dest !== undefined) {
- message += ` => ${context.dest}`
- }
-
- this.code = code
- Object.defineProperties(this, {
- name: {
- value: 'SystemError',
- enumerable: false,
- writable: true,
- configurable: true,
- },
- message: {
- value: message,
- enumerable: false,
- writable: true,
- configurable: true,
- },
- info: {
- value: context,
- enumerable: true,
- configurable: true,
- writable: false,
- },
- errno: {
- get () {
- return context.errno
- },
- set (value) {
- context.errno = value
- },
- enumerable: true,
- configurable: true,
- },
- syscall: {
- get () {
- return context.syscall
- },
- set (value) {
- context.syscall = value
- },
- enumerable: true,
- configurable: true,
- },
- })
-
- if (context.path !== undefined) {
- Object.defineProperty(this, 'path', {
- get () {
- return context.path
- },
- set (value) {
- context.path = value
- },
- enumerable: true,
- configurable: true,
- })
- }
-
- if (context.dest !== undefined) {
- Object.defineProperty(this, 'dest', {
- get () {
- return context.dest
- },
- set (value) {
- context.dest = value
- },
- enumerable: true,
- configurable: true,
- })
- }
- }
-
- toString () {
- return `${this.name} [${this.code}]: ${this.message}`
- }
-
- [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
- return inspect(this, {
- ...ctx,
- getters: true,
- customInspect: false,
- })
- }
-}
-
-function E (code, message) {
- module.exports[code] = class NodeError extends SystemError {
- constructor (ctx) {
- super(code, message, ctx)
- }
- }
-}
-
-E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
-E('ERR_FS_CP_EEXIST', 'Target already exists')
-E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
-E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
-E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
-E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
-E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
-E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
-E('ERR_FS_EISDIR', 'Path is a directory')
-
-module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
- constructor (name, expected, actual) {
- super()
- this.code = 'ERR_INVALID_ARG_TYPE'
- this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js
deleted file mode 100644
index 972ce7aa12abef..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const fs = require('fs/promises')
-const getOptions = require('../common/get-options.js')
-const node = require('../common/node.js')
-const polyfill = require('./polyfill.js')
-
-// node 16.7.0 added fs.cp
-const useNative = node.satisfies('>=16.7.0')
-
-const cp = async (src, dest, opts) => {
- const options = getOptions(opts, {
- copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
- })
-
- // the polyfill is tested separately from this module, no need to hack
- // process.version to try to trigger it just for coverage
- // istanbul ignore next
- return useNative
- ? fs.cp(src, dest, options)
- : polyfill(src, dest, options)
-}
-
-module.exports = cp
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js
deleted file mode 100644
index 80eb10de971918..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js
+++ /dev/null
@@ -1,428 +0,0 @@
-// this file is a modified version of the code in node 17.2.0
-// which is, in turn, a modified version of the fs-extra module on npm
-// node core changes:
-// - Use of the assert module has been replaced with core's error system.
-// - All code related to the glob dependency has been removed.
-// - Bring your own custom fs module is not currently supported.
-// - Some basic code cleanup.
-// changes here:
-// - remove all callback related code
-// - drop sync support
-// - change assertions back to non-internal methods (see options.js)
-// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
-'use strict'
-
-const {
- ERR_FS_CP_DIR_TO_NON_DIR,
- ERR_FS_CP_EEXIST,
- ERR_FS_CP_EINVAL,
- ERR_FS_CP_FIFO_PIPE,
- ERR_FS_CP_NON_DIR_TO_DIR,
- ERR_FS_CP_SOCKET,
- ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
- ERR_FS_CP_UNKNOWN,
- ERR_FS_EISDIR,
- ERR_INVALID_ARG_TYPE,
-} = require('./errors.js')
-const {
- constants: {
- errno: {
- EEXIST,
- EISDIR,
- EINVAL,
- ENOTDIR,
- },
- },
-} = require('os')
-const {
- chmod,
- copyFile,
- lstat,
- mkdir,
- readdir,
- readlink,
- stat,
- symlink,
- unlink,
- utimes,
-} = require('fs/promises')
-const {
- dirname,
- isAbsolute,
- join,
- parse,
- resolve,
- sep,
- toNamespacedPath,
-} = require('path')
-const { fileURLToPath } = require('url')
-
-const defaultOptions = {
- dereference: false,
- errorOnExist: false,
- filter: undefined,
- force: true,
- preserveTimestamps: false,
- recursive: false,
-}
-
-async function cp (src, dest, opts) {
- if (opts != null && typeof opts !== 'object') {
- throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
- }
- return cpFn(
- toNamespacedPath(getValidatedPath(src)),
- toNamespacedPath(getValidatedPath(dest)),
- { ...defaultOptions, ...opts })
-}
-
-function getValidatedPath (fileURLOrPath) {
- const path = fileURLOrPath != null && fileURLOrPath.href
- && fileURLOrPath.origin
- ? fileURLToPath(fileURLOrPath)
- : fileURLOrPath
- return path
-}
-
-async function cpFn (src, dest, opts) {
- // Warn about using preserveTimestamps on 32-bit node
- // istanbul ignore next
- if (opts.preserveTimestamps && process.arch === 'ia32') {
- const warning = 'Using the preserveTimestamps option in 32-bit ' +
- 'node is not recommended'
- process.emitWarning(warning, 'TimestampPrecisionWarning')
- }
- const stats = await checkPaths(src, dest, opts)
- const { srcStat, destStat } = stats
- await checkParentPaths(src, srcStat, dest)
- if (opts.filter) {
- return handleFilter(checkParentDir, destStat, src, dest, opts)
- }
- return checkParentDir(destStat, src, dest, opts)
-}
-
-async function checkPaths (src, dest, opts) {
- const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
- if (destStat) {
- if (areIdentical(srcStat, destStat)) {
- throw new ERR_FS_CP_EINVAL({
- message: 'src and dest cannot be the same',
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- if (srcStat.isDirectory() && !destStat.isDirectory()) {
- throw new ERR_FS_CP_DIR_TO_NON_DIR({
- message: `cannot overwrite directory ${src} ` +
- `with non-directory ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EISDIR,
- })
- }
- if (!srcStat.isDirectory() && destStat.isDirectory()) {
- throw new ERR_FS_CP_NON_DIR_TO_DIR({
- message: `cannot overwrite non-directory ${src} ` +
- `with directory ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: ENOTDIR,
- })
- }
- }
-
- if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${src} to a subdirectory of self ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return { srcStat, destStat }
-}
-
-function areIdentical (srcStat, destStat) {
- return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
- destStat.dev === srcStat.dev
-}
-
-function getStats (src, dest, opts) {
- const statFunc = opts.dereference ?
- (file) => stat(file, { bigint: true }) :
- (file) => lstat(file, { bigint: true })
- return Promise.all([
- statFunc(src),
- statFunc(dest).catch((err) => {
- // istanbul ignore next: unsure how to cover.
- if (err.code === 'ENOENT') {
- return null
- }
- // istanbul ignore next: unsure how to cover.
- throw err
- }),
- ])
-}
-
-async function checkParentDir (destStat, src, dest, opts) {
- const destParent = dirname(dest)
- const dirExists = await pathExists(destParent)
- if (dirExists) {
- return getStatsForCopy(destStat, src, dest, opts)
- }
- await mkdir(destParent, { recursive: true })
- return getStatsForCopy(destStat, src, dest, opts)
-}
-
-function pathExists (dest) {
- return stat(dest).then(
- () => true,
- // istanbul ignore next: not sure when this would occur
- (err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
-}
-
-// Recursively check if dest parent is a subdirectory of src.
-// It works for all file types including symlinks since it
-// checks the src and dest inodes. It starts from the deepest
-// parent and stops once it reaches the src parent or the root path.
-async function checkParentPaths (src, srcStat, dest) {
- const srcParent = resolve(dirname(src))
- const destParent = resolve(dirname(dest))
- if (destParent === srcParent || destParent === parse(destParent).root) {
- return
- }
- let destStat
- try {
- destStat = await stat(destParent, { bigint: true })
- } catch (err) {
- // istanbul ignore else: not sure when this would occur
- if (err.code === 'ENOENT') {
- return
- }
- // istanbul ignore next: not sure when this would occur
- throw err
- }
- if (areIdentical(srcStat, destStat)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${src} to a subdirectory of self ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return checkParentPaths(src, srcStat, destParent)
-}
-
-const normalizePathToArray = (path) =>
- resolve(path).split(sep).filter(Boolean)
-
-// Return true if dest is a subdir of src, otherwise false.
-// It only checks the path strings.
-function isSrcSubdir (src, dest) {
- const srcArr = normalizePathToArray(src)
- const destArr = normalizePathToArray(dest)
- return srcArr.every((cur, i) => destArr[i] === cur)
-}
-
-async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
- const include = await opts.filter(src, dest)
- if (include) {
- return onInclude(destStat, src, dest, opts, cb)
- }
-}
-
-function startCopy (destStat, src, dest, opts) {
- if (opts.filter) {
- return handleFilter(getStatsForCopy, destStat, src, dest, opts)
- }
- return getStatsForCopy(destStat, src, dest, opts)
-}
-
-async function getStatsForCopy (destStat, src, dest, opts) {
- const statFn = opts.dereference ? stat : lstat
- const srcStat = await statFn(src)
- // istanbul ignore else: can't portably test FIFO
- if (srcStat.isDirectory() && opts.recursive) {
- return onDir(srcStat, destStat, src, dest, opts)
- } else if (srcStat.isDirectory()) {
- throw new ERR_FS_EISDIR({
- message: `${src} is a directory (not copied)`,
- path: src,
- syscall: 'cp',
- errno: EINVAL,
- })
- } else if (srcStat.isFile() ||
- srcStat.isCharacterDevice() ||
- srcStat.isBlockDevice()) {
- return onFile(srcStat, destStat, src, dest, opts)
- } else if (srcStat.isSymbolicLink()) {
- return onLink(destStat, src, dest)
- } else if (srcStat.isSocket()) {
- throw new ERR_FS_CP_SOCKET({
- message: `cannot copy a socket file: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- } else if (srcStat.isFIFO()) {
- throw new ERR_FS_CP_FIFO_PIPE({
- message: `cannot copy a FIFO pipe: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- // istanbul ignore next: should be unreachable
- throw new ERR_FS_CP_UNKNOWN({
- message: `cannot copy an unknown file type: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
-}
-
-function onFile (srcStat, destStat, src, dest, opts) {
- if (!destStat) {
- return _copyFile(srcStat, src, dest, opts)
- }
- return mayCopyFile(srcStat, src, dest, opts)
-}
-
-async function mayCopyFile (srcStat, src, dest, opts) {
- if (opts.force) {
- await unlink(dest)
- return _copyFile(srcStat, src, dest, opts)
- } else if (opts.errorOnExist) {
- throw new ERR_FS_CP_EEXIST({
- message: `${dest} already exists`,
- path: dest,
- syscall: 'cp',
- errno: EEXIST,
- })
- }
-}
-
-async function _copyFile (srcStat, src, dest, opts) {
- await copyFile(src, dest)
- if (opts.preserveTimestamps) {
- return handleTimestampsAndMode(srcStat.mode, src, dest)
- }
- return setDestMode(dest, srcStat.mode)
-}
-
-async function handleTimestampsAndMode (srcMode, src, dest) {
- // Make sure the file is writable before setting the timestamp
- // otherwise open fails with EPERM when invoked with 'r+'
- // (through utimes call)
- if (fileIsNotWritable(srcMode)) {
- await makeFileWritable(dest, srcMode)
- return setDestTimestampsAndMode(srcMode, src, dest)
- }
- return setDestTimestampsAndMode(srcMode, src, dest)
-}
-
-function fileIsNotWritable (srcMode) {
- return (srcMode & 0o200) === 0
-}
-
-function makeFileWritable (dest, srcMode) {
- return setDestMode(dest, srcMode | 0o200)
-}
-
-async function setDestTimestampsAndMode (srcMode, src, dest) {
- await setDestTimestamps(src, dest)
- return setDestMode(dest, srcMode)
-}
-
-function setDestMode (dest, srcMode) {
- return chmod(dest, srcMode)
-}
-
-async function setDestTimestamps (src, dest) {
- // The initial srcStat.atime cannot be trusted
- // because it is modified by the read(2) system call
- // (See https://nodejs.org/api/fs.html#fs_stat_time_values)
- const updatedSrcStat = await stat(src)
- return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
-}
-
-function onDir (srcStat, destStat, src, dest, opts) {
- if (!destStat) {
- return mkDirAndCopy(srcStat.mode, src, dest, opts)
- }
- return copyDir(src, dest, opts)
-}
-
-async function mkDirAndCopy (srcMode, src, dest, opts) {
- await mkdir(dest)
- await copyDir(src, dest, opts)
- return setDestMode(dest, srcMode)
-}
-
-async function copyDir (src, dest, opts) {
- const dir = await readdir(src)
- for (let i = 0; i < dir.length; i++) {
- const item = dir[i]
- const srcItem = join(src, item)
- const destItem = join(dest, item)
- const { destStat } = await checkPaths(srcItem, destItem, opts)
- await startCopy(destStat, srcItem, destItem, opts)
- }
-}
-
-async function onLink (destStat, src, dest) {
- let resolvedSrc = await readlink(src)
- if (!isAbsolute(resolvedSrc)) {
- resolvedSrc = resolve(dirname(src), resolvedSrc)
- }
- if (!destStat) {
- return symlink(resolvedSrc, dest)
- }
- let resolvedDest
- try {
- resolvedDest = await readlink(dest)
- } catch (err) {
- // Dest exists and is a regular file or directory,
- // Windows may throw UNKNOWN error. If dest already exists,
- // fs throws error anyway, so no need to guard against it here.
- // istanbul ignore next: can only test on windows
- if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
- return symlink(resolvedSrc, dest)
- }
- // istanbul ignore next: should not be possible
- throw err
- }
- if (!isAbsolute(resolvedDest)) {
- resolvedDest = resolve(dirname(dest), resolvedDest)
- }
- if (isSrcSubdir(resolvedSrc, resolvedDest)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
- `${resolvedDest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- // Do not copy if src is a subdir of dest since unlinking
- // dest in this case would result in removing src contents
- // and therefore a broken symlink would be created.
- const srcStat = await stat(src)
- if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
- throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
- message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return copyLink(resolvedSrc, dest)
-}
-
-async function copyLink (resolvedSrc, dest) {
- await unlink(dest)
- return symlink(resolvedSrc, dest)
-}
-
-module.exports = cp
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js
deleted file mode 100644
index 81c746304cc428..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js
+++ /dev/null
@@ -1,13 +0,0 @@
-'use strict'
-
-const cp = require('./cp/index.js')
-const withTempDir = require('./with-temp-dir.js')
-const readdirScoped = require('./readdir-scoped.js')
-const moveFile = require('./move-file.js')
-
-module.exports = {
- cp,
- withTempDir,
- readdirScoped,
- moveFile,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js
deleted file mode 100644
index d56e06d384659a..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const { dirname, join, resolve, relative, isAbsolute } = require('path')
-const fs = require('fs/promises')
-
-const pathExists = async path => {
- try {
- await fs.access(path)
- return true
- } catch (er) {
- return er.code !== 'ENOENT'
- }
-}
-
-const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
- if (!source || !destination) {
- throw new TypeError('`source` and `destination` file required')
- }
-
- options = {
- overwrite: true,
- ...options,
- }
-
- if (!options.overwrite && await pathExists(destination)) {
- throw new Error(`The destination file exists: ${destination}`)
- }
-
- await fs.mkdir(dirname(destination), { recursive: true })
-
- try {
- await fs.rename(source, destination)
- } catch (error) {
- if (error.code === 'EXDEV' || error.code === 'EPERM') {
- const sourceStat = await fs.lstat(source)
- if (sourceStat.isDirectory()) {
- const files = await fs.readdir(source)
- await Promise.all(files.map((file) =>
- moveFile(join(source, file), join(destination, file), options, false, symlinks)
- ))
- } else if (sourceStat.isSymbolicLink()) {
- symlinks.push({ source, destination })
- } else {
- await fs.copyFile(source, destination)
- }
- } else {
- throw error
- }
- }
-
- if (root) {
- await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => {
- let target = await fs.readlink(symSource)
- // junction symlinks in windows will be absolute paths, so we need to
- // make sure they point to the symlink destination
- if (isAbsolute(target)) {
- target = resolve(symDestination, relative(symSource, target))
- }
- // try to determine what the actual file is so we can create the correct
- // type of symlink in windows
- let targetStat = 'file'
- try {
- targetStat = await fs.stat(resolve(dirname(symSource), target))
- if (targetStat.isDirectory()) {
- targetStat = 'junction'
- }
- } catch {
- // targetStat remains 'file'
- }
- await fs.symlink(
- target,
- symDestination,
- targetStat
- )
- }))
- await fs.rm(source, { recursive: true, force: true })
- }
-}
-
-module.exports = moveFile
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js
deleted file mode 100644
index cd601dfbe7486b..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const { readdir } = require('fs/promises')
-const { join } = require('path')
-
-const readdirScoped = async (dir) => {
- const results = []
-
- for (const item of await readdir(dir)) {
- if (item.startsWith('@')) {
- for (const scopedItem of await readdir(join(dir, item))) {
- results.push(join(item, scopedItem))
- }
- } else {
- results.push(item)
- }
- }
-
- return results
-}
-
-module.exports = readdirScoped
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js
deleted file mode 100644
index 0738ac4f29e1be..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const { join, sep } = require('path')
-
-const getOptions = require('./common/get-options.js')
-const { mkdir, mkdtemp, rm } = require('fs/promises')
-
-// create a temp directory, ensure its permissions match its parent, then call
-// the supplied function passing it the path to the directory. clean up after
-// the function finishes, whether it throws or not
-const withTempDir = async (root, fn, opts) => {
- const options = getOptions(opts, {
- copy: ['tmpPrefix'],
- })
- // create the directory
- await mkdir(root, { recursive: true })
-
- const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''))
- let err
- let result
-
- try {
- result = await fn(target)
- } catch (_err) {
- err = _err
- }
-
- try {
- await rm(target, { force: true, recursive: true })
- } catch {
- // ignore errors
- }
-
- if (err) {
- throw err
- }
-
- return result
-}
-
-module.exports = withTempDir
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json
deleted file mode 100644
index 5261a11b78000e..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "name": "@npmcli/fs",
- "version": "3.1.1",
- "description": "filesystem utilities for the npm cli",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "snap": "tap",
- "test": "tap",
- "npmclilint": "npmcli-lint",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "lintfix": "npm run lint -- --fix",
- "posttest": "npm run lint",
- "postsnap": "npm run lintfix --",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fs.git"
- },
- "keywords": [
- "npm",
- "oss"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.1"
- },
- "dependencies": {
- "semver": "^7.3.5"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d932..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f26..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
- const sri = ssri.parse(integrity, { single: true })
- // contentPath is the *strongest* algo given
- return path.join(
- contentDir(cache),
- sri.algorithm,
- ...hashToSegments(sri.hexDigest())
- )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
- return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 5f6192c3cec566..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,165 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
- const { size } = opts
- const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
- // get size
- const stat = size ? { size } : await fs.stat(cpath)
- return { stat, cpath, sri }
- })
-
- if (stat.size > MAX_SINGLE_READ_SIZE) {
- return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
- }
-
- const data = await fs.readFile(cpath, { encoding: null })
-
- if (stat.size !== data.length) {
- throw sizeError(stat.size, data.length)
- }
-
- if (!ssri.checkData(data, sri)) {
- throw integrityError(sri, cpath)
- }
-
- return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
- stream.push(
- new fsm.ReadStream(cpath, {
- size,
- readSize: MAX_SINGLE_READ_SIZE,
- }),
- ssri.integrityStream({
- integrity: sri,
- size,
- })
- )
- return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
- const { size } = opts
- const stream = new Pipeline()
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
- // get size
- const stat = size ? { size } : await fs.stat(cpath)
- return { stat, cpath, sri }
- })
-
- return readPipeline(cpath, stat.size, sri, stream)
- }).catch(err => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
- return withContentSri(cache, integrity, (cpath) => {
- return fs.copyFile(cpath, dest)
- })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
- if (!integrity) {
- return false
- }
-
- try {
- return await withContentSri(cache, integrity, async (cpath, sri) => {
- const stat = await fs.stat(cpath)
- return { size: stat.size, sri, stat }
- })
- } catch (err) {
- if (err.code === 'ENOENT') {
- return false
- }
-
- if (err.code === 'EPERM') {
- /* istanbul ignore else */
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- }
-}
-
-async function withContentSri (cache, integrity, fn) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
-
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- // Can't use race here because a generic error can happen before
- // a ENOENT error, and can happen before a valid result
- const results = await Promise.all(digests.map(async (meta) => {
- try {
- return await withContentSri(cache, meta, fn)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- { code: 'ENOENT' }
- )
- }
- return err
- }
- }))
- // Return the first non error if it is found
- const result = results.find((r) => !(r instanceof Error))
- if (result) {
- return result
- }
-
- // Throw the No matching content found error
- const enoentError = results.find((r) => r.code === 'ENOENT')
- if (enoentError) {
- throw enoentError
- }
-
- // Throw generic error
- throw results.find((r) => r instanceof Error)
- }
-}
-
-function sizeError (expected, found) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- const err = new Error(`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb25..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
- const content = await hasContent(cache, integrity)
- // ~pretty~ sure we can't end up with a content lacking sri, but be safe
- if (content && content.sri) {
- await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
- return true
- } else {
- return false
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index e7187abca8788a..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
- const { algorithms, size, integrity } = opts
-
- if (typeof size === 'number' && data.length !== size) {
- throw sizeError(size, data.length)
- }
-
- const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
- if (integrity && !ssri.checkData(data, integrity, opts)) {
- throw checksumError(integrity, sri)
- }
-
- for (const algo in sri) {
- const tmp = await makeTmp(cache, opts)
- const hash = sri[algo].toString()
- try {
- await fs.writeFile(tmp.target, data, { flag: 'wx' })
- await moveToDestination(tmp, cache, hash, opts)
- } finally {
- if (!tmp.moved) {
- await fs.rm(tmp.target, { recursive: true, force: true })
- }
- }
- }
- return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
- constructor (cache, opts) {
- super()
- this.opts = opts
- this.cache = cache
- this.inputStream = new Minipass()
- this.inputStream.on('error', er => this.emit('error', er))
- this.inputStream.on('drain', () => this.emit('drain'))
- this.handleContentP = null
- }
-
- write (chunk, encoding, cb) {
- if (!this.handleContentP) {
- this.handleContentP = handleContent(
- this.inputStream,
- this.cache,
- this.opts
- )
- this.handleContentP.catch(error => this.emit('error', error))
- }
- return this.inputStream.write(chunk, encoding, cb)
- }
-
- flush (cb) {
- this.inputStream.end(() => {
- if (!this.handleContentP) {
- const e = new Error('Cache input stream was empty')
- e.code = 'ENODATA'
- // empty streams are probably emitting end right away.
- // defer this one tick by rejecting a promise on it.
- return Promise.reject(e).catch(cb)
- }
- // eslint-disable-next-line promise/catch-or-return
- this.handleContentP.then(
- (res) => {
- res.integrity && this.emit('integrity', res.integrity)
- // eslint-disable-next-line promise/always-return
- res.size !== null && this.emit('size', res.size)
- cb()
- },
- (er) => cb(er)
- )
- })
- }
-}
-
-function writeStream (cache, opts = {}) {
- return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
- const tmp = await makeTmp(cache, opts)
- try {
- const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
- await moveToDestination(
- tmp,
- cache,
- res.integrity,
- opts
- )
- return res
- } finally {
- if (!tmp.moved) {
- await fs.rm(tmp.target, { recursive: true, force: true })
- }
- }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
- const outStream = new fsm.WriteStream(tmpTarget, {
- flags: 'wx',
- })
-
- if (opts.integrityEmitter) {
- // we need to create these all simultaneously since they can fire in any order
- const [integrity, size] = await Promise.all([
- events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
- events.once(opts.integrityEmitter, 'size').then(res => res[0]),
- new Pipeline(inputStream, outStream).promise(),
- ])
- return { integrity, size }
- }
-
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size,
- })
- hashStream.on('integrity', i => {
- integrity = i
- })
- hashStream.on('size', s => {
- size = s
- })
-
- const pipeline = new Pipeline(inputStream, hashStream, outStream)
- await pipeline.promise()
- return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
- return {
- target: tmpTarget,
- moved: false,
- }
-}
-
-async function moveToDestination (tmp, cache, sri) {
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
- if (moveOperations.has(destination)) {
- return moveOperations.get(destination)
- }
- moveOperations.set(
- destination,
- fs.mkdir(destDir, { recursive: true })
- .then(async () => {
- await moveFile(tmp.target, destination, { overwrite: false })
- tmp.moved = true
- return tmp.moved
- })
- .catch(err => {
- if (!err.message.startsWith('The destination file exists')) {
- throw Object.assign(err, { code: 'EEXIST' })
- }
- }).finally(() => {
- moveOperations.delete(destination)
- })
-
- )
- return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- const err = new Error(`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 89c28f2f257d48..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,336 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
- appendFile,
- mkdir,
- readFile,
- readdir,
- rm,
- writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-const pMap = require('p-map')
-const lsStreamConcurrency = 5
-
-module.exports.NotFoundError = class NotFoundError extends Error {
- constructor (cache, key) {
- super(`No cache entry for ${key} found in ${cache}`)
- this.code = 'ENOENT'
- this.cache = cache
- this.key = key
- }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
- const bucket = bucketPath(cache, key)
- const entries = await bucketEntries(bucket)
- const newEntries = []
- // we loop backwards because the bottom-most result is the newest
- // since we add new entries with appendFile
- for (let i = entries.length - 1; i >= 0; --i) {
- const entry = entries[i]
- // a null integrity could mean either a delete was appended
- // or the user has simply stored an index that does not map
- // to any content. we determine if the user wants to keep the
- // null integrity based on the validateEntry function passed in options.
- // if the integrity is null and no validateEntry is provided, we break
- // as we consider the null integrity to be a deletion of everything
- // that came before it.
- if (entry.integrity === null && !opts.validateEntry) {
- break
- }
-
- // if this entry is valid, and it is either the first entry or
- // the newEntries array doesn't already include an entry that
- // matches this one based on the provided matchFn, then we add
- // it to the beginning of our list
- if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
- (newEntries.length === 0 ||
- !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
- newEntries.unshift(entry)
- }
- }
-
- const newIndex = '\n' + newEntries.map((entry) => {
- const stringified = JSON.stringify(entry)
- const hash = hashEntry(stringified)
- return `${hash}\t${stringified}`
- }).join('\n')
-
- const setup = async () => {
- const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- await mkdir(path.dirname(target), { recursive: true })
- return {
- target,
- moved: false,
- }
- }
-
- const teardown = async (tmp) => {
- if (!tmp.moved) {
- return rm(tmp.target, { recursive: true, force: true })
- }
- }
-
- const write = async (tmp) => {
- await writeFile(tmp.target, newIndex, { flag: 'wx' })
- await mkdir(path.dirname(bucket), { recursive: true })
- // we use @npmcli/move-file directly here because we
- // want to overwrite the existing file
- await moveFile(tmp.target, bucket)
- tmp.moved = true
- }
-
- // write the file atomically
- const tmp = await setup()
- try {
- await write(tmp)
- } finally {
- await teardown(tmp)
- }
-
- // we reverse the list we generated such that the newest
- // entries come first in order to make looping through them easier
- // the true passed to formatEntry tells it to keep null
- // integrity values, if they made it this far it's because
- // validateEntry returned true, and as such we should return it
- return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
- const { metadata, size, time } = opts
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: time || Date.now(),
- size,
- metadata,
- }
- try {
- await mkdir(path.dirname(bucket), { recursive: true })
- const stringified = JSON.stringify(entry)
- // NOTE - Cleverness ahoy!
- //
- // This works because it's tremendously unlikely for an entry to corrupt
- // another while still preserving the string length of the JSON in
- // question. So, we just slap the length in there and verify it on read.
- //
- // Thanks to @isaacs for the whiteboarding session that ended up with
- // this.
- await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return undefined
- }
-
- throw err
- }
- return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
- const bucket = bucketPath(cache, key)
- try {
- const entries = await bucketEntries(bucket)
- return entries.reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
- if (!opts.removeFully) {
- return insert(cache, key, null, opts)
- }
-
- const bucket = bucketPath(cache, key)
- return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
- const indexDir = bucketDir(cache)
- const stream = new Minipass({ objectMode: true })
-
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const buckets = await readdirOrEmpty(indexDir)
- await pMap(buckets, async (bucket) => {
- const bucketPath = path.join(indexDir, bucket)
- const subbuckets = await readdirOrEmpty(bucketPath)
- await pMap(subbuckets, async (subbucket) => {
- const subbucketPath = path.join(bucketPath, subbucket)
-
- // "/cachename//./*"
- const subbucketEntries = await readdirOrEmpty(subbucketPath)
- await pMap(subbucketEntries, async (entry) => {
- const entryPath = path.join(subbucketPath, entry)
- try {
- const entries = await bucketEntries(entryPath)
- // using a Map here prevents duplicate keys from showing up
- // twice, I guess?
- const reduced = entries.reduce((acc, entry) => {
- acc.set(entry.key, entry)
- return acc
- }, new Map())
- // reduced is a map of key => entry
- for (const entry of reduced.values()) {
- const formatted = formatEntry(cache, entry)
- if (formatted) {
- stream.write(formatted)
- }
- }
- } catch (err) {
- if (err.code === 'ENOENT') {
- return undefined
- }
- throw err
- }
- },
- { concurrency: lsStreamConcurrency })
- },
- { concurrency: lsStreamConcurrency })
- },
- { concurrency: lsStreamConcurrency })
- stream.end()
- return stream
- }).catch(err => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
- const entries = await lsStream(cache).collect()
- return entries.reduce((acc, xs) => {
- acc[xs.key] = xs
- return acc
- }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
- const data = await readFile(bucket, 'utf8')
- return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data) {
- const entries = []
- data.split('\n').forEach((entry) => {
- if (!entry) {
- return
- }
-
- const pieces = entry.split('\t')
- if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
- // Hash is no good! Corruption or malice? Doesn't matter!
- // EJECT EJECT
- return
- }
- let obj
- try {
- obj = JSON.parse(pieces[1])
- } catch (_) {
- // eslint-ignore-next-line no-empty-block
- }
- // coverage disabled here, no need to test with an entry that parses to something falsey
- // istanbul ignore else
- if (obj) {
- entries.push(obj)
- }
- })
- return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
- return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
- const hashed = hashKey(key)
- return path.join.apply(
- path,
- [bucketDir(cache)].concat(hashToSegments(hashed))
- )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
- return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
- return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
- return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
- // Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity && !keepAll) {
- return null
- }
-
- return {
- key: entry.key,
- integrity: entry.integrity,
- path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
- size: entry.size,
- time: entry.time,
- metadata: entry.metadata,
- }
-}
-
-function readdirOrEmpty (dir) {
- return readdir(dir).catch((err) => {
- if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
- return []
- }
-
- throw err
- })
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaaa..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
- const { integrity, memoize, size } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size,
- }
- }
-
- const entry = await index.find(cache, key, opts)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
- const data = await read(cache, entry.integrity, { integrity, size })
- if (memoize) {
- memo.put(cache, entry, data, opts)
- }
-
- return {
- data,
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
- const { integrity, memoize, size } = opts
- const memoized = memo.get.byDigest(cache, key, opts)
- if (memoized && memoize !== false) {
- return memoized
- }
-
- const res = await read(cache, key, { integrity, size })
- if (memoize) {
- memo.put.byDigest(cache, key, res, opts)
- }
- return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
- const stream = new Minipass()
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'integrity' && cb(memoized.entry.integrity)
- ev === 'size' && cb(memoized.entry.size)
- })
- stream.end(memoized.data)
- return stream
-}
-
-function getStream (cache, key, opts = {}) {
- const { memoize, size } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return getMemoizedStream(memoized)
- }
-
- const stream = new Pipeline()
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const entry = await index.find(cache, key)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
-
- stream.emit('metadata', entry.metadata)
- stream.emit('integrity', entry.integrity)
- stream.emit('size', entry.size)
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(entry.metadata)
- ev === 'integrity' && cb(entry.integrity)
- ev === 'size' && cb(entry.size)
- })
-
- const src = read.readStream(
- cache,
- entry.integrity,
- { ...opts, size: typeof size !== 'number' ? entry.size : size }
- )
-
- if (memoize) {
- const memoStream = new Collect.PassThrough()
- memoStream.on('collect', data => memo.put(cache, entry, data, opts))
- stream.unshift(memoStream)
- }
- stream.unshift(src)
- return stream
- }).catch((err) => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
- const { memoize } = opts
- const memoized = memo.get.byDigest(cache, integrity, opts)
- if (memoized && memoize !== false) {
- const stream = new Minipass()
- stream.end(memoized)
- return stream
- } else {
- const stream = read.readStream(cache, integrity, opts)
- if (!memoize) {
- return stream
- }
-
- const memoStream = new Collect.PassThrough()
- memoStream.on('collect', data => memo.put.byDigest(
- cache,
- integrity,
- data,
- opts
- ))
- return new Pipeline(stream, memoStream)
- }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
- const { memoize } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return Promise.resolve(memoized.entry)
- } else {
- return index.find(cache, key)
- }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
- const entry = await index.find(cache, key, opts)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
- await read.copy(cache, entry.integrity, dest, opts)
- return {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
- await read.copy(cache, key, dest, opts)
- return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271b..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 2ecc60912e4563..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-
-const MEMOIZED = new LRUCache({
- max: 500,
- maxSize: 50 * 1024 * 1024, // 50MB
- ttl: 3 * 60 * 1000, // 3 minutes
- sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
- const old = {}
- MEMOIZED.forEach((v, k) => {
- old[k] = v
- })
- MEMOIZED.clear()
- return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
- pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
- putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
- pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
- return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
- return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
- constructor (obj) {
- this.obj = obj
- }
-
- get (key) {
- return this.obj[key]
- }
-
- set (key, val) {
- this.obj[key] = val
- }
-}
-
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- return new ObjProxy(opts.memoize)
- } else {
- return MEMOIZED
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec5..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
- algorithms: ['sha512'],
- ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
- const { memoize } = opts
- opts = putOpts(opts)
- const res = await write(cache, data, opts)
- const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
- if (memoize) {
- memo.put(cache, entry, data, opts)
- }
-
- return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
- const { memoize } = opts
- opts = putOpts(opts)
- let integrity
- let size
- let error
-
- let memoData
- const pipeline = new Pipeline()
- // first item in the pipeline is the memoizer, because we need
- // that to end first and get the collected data.
- if (memoize) {
- const memoizer = new PassThrough().on('collect', data => {
- memoData = data
- })
- pipeline.push(memoizer)
- }
-
- // contentStream is a write-only, not a passthrough
- // no data comes out of it.
- const contentStream = write.stream(cache, opts)
- .on('integrity', (int) => {
- integrity = int
- })
- .on('size', (s) => {
- size = s
- })
- .on('error', (err) => {
- error = err
- })
-
- pipeline.push(contentStream)
-
- // last but not least, we write the index and emit hash and size,
- // and memoize if we're doing that
- pipeline.push(new Flush({
- async flush () {
- if (!error) {
- const entry = await index.insert(cache, key, integrity, { ...opts, size })
- if (memoize && memoData) {
- memo.put(cache, entry, memoData, opts)
- }
- pipeline.emit('integrity', integrity)
- pipeline.emit('size', size)
- }
- },
- }))
-
- return pipeline
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf2430..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
- memo.clearMemoized()
- return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
- memo.clearMemoized()
- return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
- memo.clearMemoized()
- const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
- return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429f..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b5038088..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
- return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebeb..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
- const { tmpPrefix } = opts
- const tmpDir = path.join(cache, 'tmp')
- await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
- // do not use path.join(), it drops the trailing / if tmpPrefix is unset
- const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
- return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
- if (!cb) {
- cb = opts
- opts = {}
- }
- return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js
deleted file mode 100644
index d7423da1295b68..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
- mkdir,
- readFile,
- rm,
- stat,
- truncate,
- writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
- Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
- concurrency: 20,
- log: { silly () {} },
- ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
- opts = verifyOpts(opts)
- opts.log.silly('verify', 'verifying cache at', cache)
-
- const steps = [
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime,
- ]
-
- const stats = {}
- for (const step of steps) {
- const label = step.name
- const start = new Date()
- const s = await step(cache, opts)
- if (s) {
- Object.keys(s).forEach((k) => {
- stats[k] = s[k]
- })
- }
- const end = new Date()
- if (!stats.runTime) {
- stats.runTime = {}
- }
- stats.runTime[label] = end - start
- }
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log.silly(
- 'verify',
- 'verification finished for',
- cache,
- 'in',
- `${stats.runTime.total}ms`
- )
- return stats
-}
-
-async function markStartTime () {
- return { startTime: new Date() }
-}
-
-async function markEndTime () {
- return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
- opts.log.silly('verify', 'fixing cache permissions')
- await mkdir(cache, { recursive: true })
- return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
- opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', (entry) => {
- if (opts.filter && !opts.filter(entry)) {
- return
- }
-
- // integrity is stringified, re-parse it so we can get each hash
- const integrity = ssri.parse(entry.integrity)
- for (const algo in integrity) {
- liveContent.add(integrity[algo].toString())
- }
- })
- await new Promise((resolve, reject) => {
- indexStream.on('end', resolve).on('error', reject)
- })
- const contentDir = contentPath.contentDir(cache)
- const files = await glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true,
- })
- const stats = {
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0,
- }
- await pMap(
- files,
- async (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- const info = await verifyContent(f, integrity)
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- } else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- const s = await stat(f)
- await rm(f, { recursive: true, force: true })
- stats.reclaimedSize += s.size
- }
- return stats
- },
- { concurrency: opts.concurrency }
- )
- return stats
-}
-
-async function verifyContent (filepath, sri) {
- const contentInfo = {}
- try {
- const { size } = await stat(filepath)
- contentInfo.size = size
- contentInfo.valid = true
- await ssri.checkStream(new fsm.ReadStream(filepath), sri)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return { size: 0, valid: false }
- }
- if (err.code !== 'EINTEGRITY') {
- throw err
- }
-
- await rm(filepath, { recursive: true, force: true })
- contentInfo.valid = false
- }
- return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
- opts.log.silly('verify', 'rebuilding index')
- const entries = await index.ls(cache)
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0,
- }
- const buckets = {}
- for (const k in entries) {
- /* istanbul ignore else */
- if (hasOwnProperty(entries, k)) {
- const hashed = index.hashKey(k)
- const entry = entries[k]
- const excluded = opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index.bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index.bucketPath(cache, k)
- }
- }
- }
- await pMap(
- Object.keys(buckets),
- (key) => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- },
- { concurrency: opts.concurrency }
- )
- return stats
-}
-
-async function rebuildBucket (cache, bucket, stats) {
- await truncate(bucket._path)
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- for (const entry of bucket) {
- const content = contentPath(cache, entry.integrity)
- try {
- await stat(content)
- await index.insert(cache, entry.key, entry.integrity, {
- metadata: entry.metadata,
- size: entry.size,
- time: entry.time,
- })
- stats.totalEntries++
- } catch (err) {
- if (err.code === 'ENOENT') {
- stats.rejectedEntries++
- stats.missingContent++
- } else {
- throw err
- }
- }
- }
-}
-
-function cleanTmp (cache, opts) {
- opts.log.silly('verify', 'cleaning tmp directory')
- return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log.silly('verify', 'writing verifile to ' + verifile)
- return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
- const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
- return new Date(+data)
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json
deleted file mode 100644
index 6e6219158ed759..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "name": "cacache",
- "version": "18.0.4",
- "cache-version": {
- "content": "2",
- "index": "5"
- },
- "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "test": "tap",
- "snap": "tap",
- "coverage": "tap",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "npmclilint": "npmcli-lint",
- "lintfix": "npm run lint -- --fix",
- "postsnap": "npm run lintfix --",
- "postlint": "template-oss-check",
- "posttest": "npm run lint",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/cacache.git"
- },
- "keywords": [
- "cache",
- "caching",
- "content-addressable",
- "sri",
- "sri hash",
- "subresource integrity",
- "cache",
- "storage",
- "store",
- "file store",
- "filesystem",
- "disk cache",
- "disk storage"
- ],
- "license": "ISC",
- "dependencies": {
- "@npmcli/fs": "^3.1.0",
- "fs-minipass": "^3.0.0",
- "glob": "^10.2.2",
- "lru-cache": "^10.0.1",
- "minipass": "^7.0.3",
- "minipass-collect": "^2.0.1",
- "minipass-flush": "^1.0.5",
- "minipass-pipeline": "^1.2.4",
- "p-map": "^4.0.0",
- "ssri": "^10.0.0",
- "tar": "^6.1.11",
- "unique-filename": "^3.0.0"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.0"
- },
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "windowsCI": false,
- "version": "4.22.0",
- "publish": "true"
- },
- "author": "GitHub Inc.",
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231c..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index bfcfacbcc95e18..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,471 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
- 'accept-charset',
- 'accept-encoding',
- 'accept-language',
- 'accept',
- 'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
- 'cache-control',
- 'content-encoding',
- 'content-language',
- 'content-type',
- 'date',
- 'etag',
- 'expires',
- 'last-modified',
- 'link',
- 'location',
- 'pragma',
- 'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
- const metadata = {
- time: Date.now(),
- url: request.url,
- reqHeaders: {},
- resHeaders: {},
-
- // options on which we must match the request and vary the response
- options: {
- compress: options.compress != null ? options.compress : request.compress,
- },
- }
-
- // only save the status if it's not a 200 or 304
- if (response.status !== 200 && response.status !== 304) {
- metadata.status = response.status
- }
-
- for (const name of KEEP_REQUEST_HEADERS) {
- if (request.headers.has(name)) {
- metadata.reqHeaders[name] = request.headers.get(name)
- }
- }
-
- // if the request's host header differs from the host in the url
- // we need to keep it, otherwise it's just noise and we ignore it
- const host = request.headers.get('host')
- const parsedUrl = new url.URL(request.url)
- if (host && parsedUrl.host !== host) {
- metadata.reqHeaders.host = host
- }
-
- // if the response has a vary header, make sure
- // we store the relevant request headers too
- if (response.headers.has('vary')) {
- const vary = response.headers.get('vary')
- // a vary of "*" means every header causes a different response.
- // in that scenario, we do not include any additional headers
- // as the freshness check will always fail anyway and we don't
- // want to bloat the cache indexes
- if (vary !== '*') {
- // copy any other request headers that will vary the response
- const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
- for (const name of varyHeaders) {
- if (request.headers.has(name)) {
- metadata.reqHeaders[name] = request.headers.get(name)
- }
- }
- }
- }
-
- for (const name of KEEP_RESPONSE_HEADERS) {
- if (response.headers.has(name)) {
- metadata.resHeaders[name] = response.headers.get(name)
- }
- }
-
- for (const name of options.cacheAdditionalHeaders) {
- if (response.headers.has(name)) {
- metadata.resHeaders[name] = response.headers.get(name)
- }
- }
-
- return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
- constructor ({ entry, request, response, options }) {
- if (entry) {
- this.key = entry.key
- this.entry = entry
- // previous versions of this module didn't write an explicit timestamp in
- // the metadata, so fall back to the entry's timestamp. we can't use the
- // entry timestamp to determine staleness because cacache will update it
- // when it verifies its data
- this.entry.metadata.time = this.entry.metadata.time || this.entry.time
- } else {
- this.key = cacheKey(request)
- }
-
- this.options = options
-
- // these properties are behind getters that lazily evaluate
- this[_request] = request
- this[_response] = response
- this[_policy] = null
- }
-
- // returns a CacheEntry instance that satisfies the given request
- // or undefined if no existing entry satisfies
- static async find (request, options) {
- try {
- // compacts the index and returns an array of unique entries
- var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
- const entryA = new CacheEntry({ entry: A, options })
- const entryB = new CacheEntry({ entry: B, options })
- return entryA.policy.satisfies(entryB.request)
- }, {
- validateEntry: (entry) => {
- // clean out entries with a buggy content-encoding value
- if (entry.metadata &&
- entry.metadata.resHeaders &&
- entry.metadata.resHeaders['content-encoding'] === null) {
- return false
- }
-
- // if an integrity is null, it needs to have a status specified
- if (entry.integrity === null) {
- return !!(entry.metadata && entry.metadata.status)
- }
-
- return true
- },
- })
- } catch (err) {
- // if the compact request fails, ignore the error and return
- return
- }
-
- // a cache mode of 'reload' means to behave as though we have no cache
- // on the way to the network. return undefined to allow cacheFetch to
- // create a brand new request no matter what.
- if (options.cache === 'reload') {
- return
- }
-
- // find the specific entry that satisfies the request
- let match
- for (const entry of matches) {
- const _entry = new CacheEntry({
- entry,
- options,
- })
-
- if (_entry.policy.satisfies(request)) {
- match = _entry
- break
- }
- }
-
- return match
- }
-
- // if the user made a PUT/POST/PATCH then we invalidate our
- // cache for the same url by deleting the index entirely
- static async invalidate (request, options) {
- const key = cacheKey(request)
- try {
- await cacache.rm.entry(options.cachePath, key, { removeFully: true })
- } catch (err) {
- // ignore errors
- }
- }
-
- get request () {
- if (!this[_request]) {
- this[_request] = new Request(this.entry.metadata.url, {
- method: 'GET',
- headers: this.entry.metadata.reqHeaders,
- ...this.entry.metadata.options,
- })
- }
-
- return this[_request]
- }
-
- get response () {
- if (!this[_response]) {
- this[_response] = new Response(null, {
- url: this.entry.metadata.url,
- counter: this.options.counter,
- status: this.entry.metadata.status || 200,
- headers: {
- ...this.entry.metadata.resHeaders,
- 'content-length': this.entry.size,
- },
- })
- }
-
- return this[_response]
- }
-
- get policy () {
- if (!this[_policy]) {
- this[_policy] = new CachePolicy({
- entry: this.entry,
- request: this.request,
- response: this.response,
- options: this.options,
- })
- }
-
- return this[_policy]
- }
-
- // wraps the response in a pipeline that stores the data
- // in the cache while the user consumes it
- async store (status) {
- // if we got a status other than 200, 301, or 308,
- // or the CachePolicy forbid storage, append the
- // cache status header and return it untouched
- if (
- this.request.method !== 'GET' ||
- ![200, 301, 308].includes(this.response.status) ||
- !this.policy.storable()
- ) {
- this.response.headers.set('x-local-cache-status', 'skip')
- return this.response
- }
-
- const size = this.response.headers.get('content-length')
- const cacheOpts = {
- algorithms: this.options.algorithms,
- metadata: getMetadata(this.request, this.response, this.options),
- size,
- integrity: this.options.integrity,
- integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
- }
-
- let body = null
- // we only set a body if the status is a 200, redirects are
- // stored as metadata only
- if (this.response.status === 200) {
- let cacheWriteResolve, cacheWriteReject
- const cacheWritePromise = new Promise((resolve, reject) => {
- cacheWriteResolve = resolve
- cacheWriteReject = reject
- }).catch((err) => {
- body.emit('error', err)
- })
-
- body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
- flush () {
- return cacheWritePromise
- },
- }))
- // this is always true since if we aren't reusing the one from the remote fetch, we
- // are using the one from cacache
- body.hasIntegrityEmitter = true
-
- const onResume = () => {
- const tee = new Minipass()
- const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
- // re-emit the integrity and size events on our new response body so they can be reused
- cacheStream.on('integrity', i => body.emit('integrity', i))
- cacheStream.on('size', s => body.emit('size', s))
- // stick a flag on here so downstream users will know if they can expect integrity events
- tee.pipe(cacheStream)
- // TODO if the cache write fails, log a warning but return the response anyway
- // eslint-disable-next-line promise/catch-or-return
- cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
- body.unshift(tee)
- body.unshift(this.response.body)
- }
-
- body.once('resume', onResume)
- body.once('end', () => body.removeListener('resume', onResume))
- } else {
- await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
- }
-
- // note: we do not set the x-local-cache-hash header because we do not know
- // the hash value until after the write to the cache completes, which doesn't
- // happen until after the response has been sent and it's too late to write
- // the header anyway
- this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
- this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
- this.response.headers.set('x-local-cache-mode', 'stream')
- this.response.headers.set('x-local-cache-status', status)
- this.response.headers.set('x-local-cache-time', new Date().toISOString())
- const newResponse = new Response(body, {
- url: this.response.url,
- status: this.response.status,
- headers: this.response.headers,
- counter: this.options.counter,
- })
- return newResponse
- }
-
- // use the cached data to create a response and return it
- async respond (method, options, status) {
- let response
- if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
- // if the request is a HEAD, or the response is a redirect,
- // then the metadata in the entry already includes everything
- // we need to build a response
- response = this.response
- } else {
- // we're responding with a full cached response, so create a body
- // that reads from cacache and attach it to a new Response
- const body = new Minipass()
- const headers = { ...this.policy.responseHeaders() }
-
- const onResume = () => {
- const cacheStream = cacache.get.stream.byDigest(
- this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
- )
- cacheStream.on('error', async (err) => {
- cacheStream.pause()
- if (err.code === 'EINTEGRITY') {
- await cacache.rm.content(
- this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
- )
- }
- if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
- await CacheEntry.invalidate(this.request, this.options)
- }
- body.emit('error', err)
- cacheStream.resume()
- })
- // emit the integrity and size events based on our metadata so we're consistent
- body.emit('integrity', this.entry.integrity)
- body.emit('size', Number(headers['content-length']))
- cacheStream.pipe(body)
- }
-
- body.once('resume', onResume)
- body.once('end', () => body.removeListener('resume', onResume))
- response = new Response(body, {
- url: this.entry.metadata.url,
- counter: options.counter,
- status: 200,
- headers,
- })
- }
-
- response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
- response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
- response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
- response.headers.set('x-local-cache-mode', 'stream')
- response.headers.set('x-local-cache-status', status)
- response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
- return response
- }
-
- // use the provided request along with this cache entry to
- // revalidate the stored response. returns a response, either
- // from the cache or from the update
- async revalidate (request, options) {
- const revalidateRequest = new Request(request, {
- headers: this.policy.revalidationHeaders(request),
- })
-
- try {
- // NOTE: be sure to remove the headers property from the
- // user supplied options, since we have already defined
- // them on the new request object. if they're still in the
- // options then those will overwrite the ones from the policy
- var response = await remote(revalidateRequest, {
- ...options,
- headers: undefined,
- })
- } catch (err) {
- // if the network fetch fails, return the stale
- // cached response unless it has a cache-control
- // of 'must-revalidate'
- if (!this.policy.mustRevalidate) {
- return this.respond(request.method, options, 'stale')
- }
-
- throw err
- }
-
- if (this.policy.revalidated(revalidateRequest, response)) {
- // we got a 304, write a new index to the cache and respond from cache
- const metadata = getMetadata(request, response, options)
- // 304 responses do not include headers that are specific to the response data
- // since they do not include a body, so we copy values for headers that were
- // in the old cache entry to the new one, if the new metadata does not already
- // include that header
- for (const name of KEEP_RESPONSE_HEADERS) {
- if (
- !hasOwnProperty(metadata.resHeaders, name) &&
- hasOwnProperty(this.entry.metadata.resHeaders, name)
- ) {
- metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
- }
- }
-
- for (const name of options.cacheAdditionalHeaders) {
- const inMeta = hasOwnProperty(metadata.resHeaders, name)
- const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
- const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
- // if the header is in the existing entry, but it is not in the metadata
- // then we need to write it to the metadata as this will refresh the on-disk cache
- if (!inMeta && inEntry) {
- metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
- }
- // if the header is in the metadata, but not in the policy, then we need to set
- // it in the policy so that it's included in the immediate response. future
- // responses will load a new cache entry, so we don't need to change that
- if (!inPolicy && inMeta) {
- this.policy.response.headers[name] = metadata.resHeaders[name]
- }
- }
-
- try {
- await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
- size: this.entry.size,
- metadata,
- })
- } catch (err) {
- // if updating the cache index fails, we ignore it and
- // respond anyway
- }
- return this.respond(request.method, options, 'revalidated')
- }
-
- // if we got a modified response, create a new entry based on it
- const newEntry = new CacheEntry({
- request,
- response,
- options,
- })
-
- // respond with the new entry while writing it to the cache
- return newEntry.store('updated')
- }
-}
-
-module.exports = CacheEntry
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe66..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
- constructor (url) {
- /* eslint-disable-next-line max-len */
- super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
- this.code = 'ENOTCACHED'
- }
-}
-
-module.exports = {
- NotCachedError,
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb9336..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
- // try to find a cached entry that satisfies this request
- const entry = await CacheEntry.find(request, options)
- if (!entry) {
- // no cached result, if the cache mode is 'only-if-cached' that's a failure
- if (options.cache === 'only-if-cached') {
- throw new NotCachedError(request.url)
- }
-
- // otherwise, we make a request, store it and return it
- const response = await remote(request, options)
- const newEntry = new CacheEntry({ request, response, options })
- return newEntry.store('miss')
- }
-
- // we have a cached response that satisfies this request, however if the cache
- // mode is 'no-cache' then we send the revalidation request no matter what
- if (options.cache === 'no-cache') {
- return entry.revalidate(request, options)
- }
-
- // if the cached entry is not stale, or if the cache mode is 'force-cache' or
- // 'only-if-cached' we can respond with the cached entry. set the status
- // based on the result of needsRevalidation and respond
- const _needsRevalidation = entry.policy.needsRevalidation(request)
- if (options.cache === 'force-cache' ||
- options.cache === 'only-if-cached' ||
- !_needsRevalidation) {
- return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
- }
-
- // if we got here, the cache entry is stale so revalidate it
- return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
- if (!options.cachePath) {
- return
- }
-
- return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fae..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
- auth: false,
- fragment: false,
- search: true,
- unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
- const parsed = new URL(request.url)
- return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae92..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
- shared: false,
- ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
- const _obj = {
- method: request.method,
- url: request.url,
- headers: {},
- compress: request.compress,
- }
-
- request.headers.forEach((value, key) => {
- _obj.headers[key] = value
- })
-
- return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
- const _obj = {
- status: response.status,
- headers: {},
- }
-
- response.headers.forEach((value, key) => {
- _obj.headers[key] = value
- })
-
- return _obj
-}
-
-class CachePolicy {
- constructor ({ entry, request, response, options }) {
- this.entry = entry
- this.request = requestObject(request)
- this.response = responseObject(response)
- this.options = options
- this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
- if (this.entry) {
- // if we have an entry, copy the timestamp to the _responseTime
- // this is necessary because the CacheSemantics constructor forces
- // the value to Date.now() which means a policy created from a
- // cache entry is likely to always identify itself as stale
- this.policy._responseTime = this.entry.metadata.time
- }
- }
-
- // static method to quickly determine if a request alone is storable
- static storable (request, options) {
- // no cachePath means no caching
- if (!options.cachePath) {
- return false
- }
-
- // user explicitly asked not to cache
- if (options.cache === 'no-store') {
- return false
- }
-
- // we only cache GET and HEAD requests
- if (!['GET', 'HEAD'].includes(request.method)) {
- return false
- }
-
- // otherwise, let http-cache-semantics make the decision
- // based on the request's headers
- const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
- return policy.storable()
- }
-
- // returns true if the policy satisfies the request
- satisfies (request) {
- const _req = requestObject(request)
- if (this.request.headers.host !== _req.headers.host) {
- return false
- }
-
- if (this.request.compress !== _req.compress) {
- return false
- }
-
- const negotiatorA = new Negotiator(this.request)
- const negotiatorB = new Negotiator(_req)
-
- if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
- return false
- }
-
- if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
- return false
- }
-
- if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
- return false
- }
-
- if (this.options.integrity) {
- return ssri.parse(this.options.integrity).match(this.entry.integrity)
- }
-
- return true
- }
-
- // returns true if the request and response allow caching
- storable () {
- return this.policy.storable()
- }
-
- // NOTE: this is a hack to avoid parsing the cache-control
- // header ourselves, it returns true if the response's
- // cache-control contains must-revalidate
- get mustRevalidate () {
- return !!this.policy._rescc['must-revalidate']
- }
-
- // returns true if the cached response requires revalidation
- // for the given request
- needsRevalidation (request) {
- const _req = requestObject(request)
- // force method to GET because we only cache GETs
- // but can serve a HEAD from a cached GET
- _req.method = 'GET'
- return !this.policy.satisfiesWithoutRevalidation(_req)
- }
-
- responseHeaders () {
- return this.policy.responseHeaders()
- }
-
- // returns a new object containing the appropriate headers
- // to send a revalidation request
- revalidationHeaders (request) {
- const _req = requestObject(request)
- return this.policy.revalidationHeaders(_req)
- }
-
- // returns true if the request/response was revalidated
- // successfully. returns false if a new response was received
- revalidated (request, response) {
- const _req = requestObject(request)
- const _res = responseObject(response)
- const policy = this.policy.revalidatedPolicy(_req, _res)
- return !policy.modified
- }
-}
-
-module.exports = CachePolicy
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e165502..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
- if (!isRedirect(response.status)) {
- return false
- }
-
- if (options.redirect === 'manual') {
- return false
- }
-
- if (options.redirect === 'error') {
- throw new FetchError(`redirect mode is set to error: ${request.url}`,
- 'no-redirect', { code: 'ENOREDIRECT' })
- }
-
- if (!response.headers.has('location')) {
- throw new FetchError(`redirect location header missing for: ${request.url}`,
- 'no-location', { code: 'EINVALIDREDIRECT' })
- }
-
- if (request.counter >= request.follow) {
- throw new FetchError(`maximum redirect reached at: ${request.url}`,
- 'max-redirect', { code: 'EMAXREDIRECT' })
- }
-
- return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
- const _opts = { ...options }
- const location = response.headers.get('location')
- const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
- // Comment below is used under the following license:
- /**
- * @license
- * Copyright (c) 2010-2012 Mikeal Rogers
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an "AS
- * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language
- * governing permissions and limitations under the License.
- */
-
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
- request.headers.delete('authorization')
- request.headers.delete('cookie')
- }
-
- // for POST request with 301/302 response, or any request with 303 response,
- // use GET when following redirect
- if (
- response.status === 303 ||
- (request.method === 'POST' && [301, 302].includes(response.status))
- ) {
- _opts.method = 'GET'
- _opts.body = null
- request.headers.delete('content-length')
- }
-
- _opts.headers = {}
- request.headers.forEach((value, key) => {
- _opts.headers[key] = value
- })
-
- _opts.counter = ++request.counter
- const redirectReq = new Request(url.format(redirectUrl), _opts)
- return {
- request: redirectReq,
- options: _opts,
- }
-}
-
-const fetch = async (request, options) => {
- const response = CachePolicy.storable(request, options)
- ? await cache(request, options)
- : await remote(request, options)
-
- // if the request wasn't a GET or HEAD, and the response
- // status is between 200 and 399 inclusive, invalidate the
- // request url
- if (!['GET', 'HEAD'].includes(request.method) &&
- response.status >= 200 &&
- response.status <= 399) {
- await cache.invalidate(request, options)
- }
-
- if (!canFollowRedirect(request, response, options)) {
- return response
- }
-
- const redirect = getRedirect(request, response, options)
- return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b61131..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
- const options = configureOptions(opts)
-
- const request = new Request(url, options)
- return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
- if (typeof defaultUrl === 'object') {
- defaultOptions = defaultUrl
- defaultUrl = null
- }
-
- const defaultedFetch = (url, options = {}) => {
- const finalUrl = url || defaultUrl
- const finalOptions = {
- ...defaultOptions,
- ...options,
- headers: {
- ...defaultOptions.headers,
- ...options.headers,
- },
- }
- return wrappedFetch(finalUrl, finalOptions)
- }
-
- defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
- makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
- return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index f77511279f831d..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
- 'if-modified-since',
- 'if-none-match',
- 'if-unmodified-since',
- 'if-match',
- 'if-range',
-]
-
-const configureOptions = (opts) => {
- const { strictSSL, ...options } = { ...opts }
- options.method = options.method ? options.method.toUpperCase() : 'GET'
- options.rejectUnauthorized = strictSSL !== false
-
- if (!options.retry) {
- options.retry = { retries: 0 }
- } else if (typeof options.retry === 'string') {
- const retries = parseInt(options.retry, 10)
- if (isFinite(retries)) {
- options.retry = { retries }
- } else {
- options.retry = { retries: 0 }
- }
- } else if (typeof options.retry === 'number') {
- options.retry = { retries: options.retry }
- } else {
- options.retry = { retries: 0, ...options.retry }
- }
-
- options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
- options.cache = options.cache || 'default'
- if (options.cache === 'default') {
- const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
- return conditionalHeaders.includes(name.toLowerCase())
- })
- if (hasConditionalHeader) {
- options.cache = 'no-store'
- }
- }
-
- options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
- // cacheManager is deprecated, but if it's set and
- // cachePath is not we should copy it to the new field
- if (options.cacheManager && !options.cachePath) {
- options.cachePath = options.cacheManager
- }
-
- return options
-}
-
-module.exports = configureOptions
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce31..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
- #events = []
- #data = new Map()
-
- constructor (opts, ...streams) {
- // CRITICAL: do NOT pass the streams to the call to super(), this will start
- // the flow of data and potentially cause the events we need to catch to emit
- // before we've finished our own setup. instead we call super() with no args,
- // finish our setup, and then push the streams into ourselves to start the
- // data flow
- super()
- this.#events = opts.events
-
- /* istanbul ignore next - coverage disabled because this is pointless to test here */
- if (streams.length) {
- this.push(...streams)
- }
- }
-
- on (event, handler) {
- if (this.#events.includes(event) && this.#data.has(event)) {
- return handler(...this.#data.get(event))
- }
-
- return super.on(event, handler)
- }
-
- emit (event, ...data) {
- if (this.#events.includes(event)) {
- this.#data.set(event, data)
- }
-
- return super.emit(event, ...data)
- }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index 8554564074de6e..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,131 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-const { log } = require('proc-log')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const { getAgent } = require('@npmcli/agent')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
- 'ECONNRESET', // remote socket closed on us
- 'ECONNREFUSED', // remote host refused to open connection
- 'EADDRINUSE', // failed to bind to a local port (proxy?)
- 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
- // from @npmcli/agent
- 'ECONNECTIONTIMEOUT',
- 'EIDLETIMEOUT',
- 'ERESPONSETIMEOUT',
- 'ETRANSFERTIMEOUT',
- // Known codes we do NOT retry on:
- // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
- // EINVALIDPROXY // invalid protocol from @npmcli/agent
- // EINVALIDRESPONSE // invalid status code from @npmcli/agent
-]
-
-const RETRY_TYPES = [
- 'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
- const agent = getAgent(request.url, options)
- if (!request.headers.has('connection')) {
- request.headers.set('connection', agent ? 'keep-alive' : 'close')
- }
-
- if (!request.headers.has('user-agent')) {
- request.headers.set('user-agent', USER_AGENT)
- }
-
- // keep our own options since we're overriding the agent
- // and the redirect mode
- const _opts = {
- ...options,
- agent,
- redirect: 'manual',
- }
-
- return promiseRetry(async (retryHandler, attemptNum) => {
- const req = new fetch.Request(request, _opts)
- try {
- let res = await fetch(req, _opts)
- if (_opts.integrity && res.status === 200) {
- // we got a 200 response and the user has specified an expected
- // integrity value, so wrap the response in an ssri stream to verify it
- const integrityStream = ssri.integrityStream({
- algorithms: _opts.algorithms,
- integrity: _opts.integrity,
- size: _opts.size,
- })
- const pipeline = new CachingMinipassPipeline({
- events: ['integrity', 'size'],
- }, res.body, integrityStream)
- // we also propagate the integrity and size events out to the pipeline so we can use
- // this new response body as an integrityEmitter for cacache
- integrityStream.on('integrity', i => pipeline.emit('integrity', i))
- integrityStream.on('size', s => pipeline.emit('size', s))
- res = new fetch.Response(pipeline, res)
- // set an explicit flag so we know if our response body will emit integrity and size
- res.body.hasIntegrityEmitter = true
- }
-
- res.headers.set('x-fetch-attempts', attemptNum)
-
- // do not retry POST requests, or requests with a streaming body
- // do retry requests with a 408, 420, 429 or 500+ status in the response
- const isStream = Minipass.isStream(req.body)
- const isRetriable = req.method !== 'POST' &&
- !isStream &&
- ([408, 420, 429].includes(res.status) || res.status >= 500)
-
- if (isRetriable) {
- if (typeof options.onRetry === 'function') {
- options.onRetry(res)
- }
-
- /* eslint-disable-next-line max-len */
- log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
- return retryHandler(res)
- }
-
- return res
- } catch (err) {
- const code = (err.code === 'EPROMISERETRY')
- ? err.retried.code
- : err.code
-
- // err.retried will be the thing that was thrown from above
- // if it's a response, we just got a bad status code and we
- // can re-throw to allow the retry
- const isRetryError = err.retried instanceof fetch.Response ||
- (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
- if (req.method === 'POST' || isRetryError) {
- throw err
- }
-
- if (typeof options.onRetry === 'function') {
- options.onRetry(err)
- }
-
- log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
- return retryHandler(err)
- }
- }, options.retry).catch((err) => {
- // don't reject for http errors, just return them
- if (err.status >= 400 && err.type !== 'system') {
- return err
- }
-
- throw err
- })
-}
-
-module.exports = remoteFetch
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index 7adb4d1e7f9719..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "name": "make-fetch-happen",
- "version": "13.0.1",
- "description": "Opinionated, caching, retrying fetch client",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "test": "tap",
- "posttest": "npm run lint",
- "eslint": "eslint",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "lintfix": "npm run lint -- --fix",
- "postlint": "template-oss-check",
- "snap": "tap",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/make-fetch-happen.git"
- },
- "keywords": [
- "http",
- "request",
- "fetch",
- "mean girls",
- "caching",
- "cache",
- "subresource integrity"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "dependencies": {
- "@npmcli/agent": "^2.0.0",
- "cacache": "^18.0.0",
- "http-cache-semantics": "^4.1.1",
- "is-lambda": "^1.0.1",
- "minipass": "^7.0.2",
- "minipass-fetch": "^3.0.0",
- "minipass-flush": "^1.0.5",
- "minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.3",
- "proc-log": "^4.2.0",
- "promise-retry": "^2.0.1",
- "ssri": "^10.0.0"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.4",
- "nock": "^13.2.4",
- "safe-buffer": "^5.2.1",
- "standard-version": "^9.3.2",
- "tap": "^16.0.0"
- },
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "tap": {
- "color": 1,
- "files": "test/*.js",
- "check-coverage": true,
- "timeout": 60,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.4",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE
deleted file mode 100644
index 3c3410cdc12ee3..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE
+++ /dev/null
@@ -1,28 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-Copyright (c) 2016 David Frank
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
----
-
-Note: This is a derivative work based on "node-fetch" by David Frank,
-modified and distributed under the terms of the MIT license above.
-https://github.com/bitinn/node-fetch
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js
deleted file mode 100644
index b18f643269e375..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-class AbortError extends Error {
- constructor (message) {
- super(message)
- this.code = 'FETCH_ABORTED'
- this.type = 'aborted'
- Error.captureStackTrace(this, this.constructor)
- }
-
- get name () {
- return 'AbortError'
- }
-
- // don't allow name to be overridden, but don't throw either
- set name (s) {}
-}
-module.exports = AbortError
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js
deleted file mode 100644
index 121b1730102e72..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const TYPE = Symbol('type')
-const BUFFER = Symbol('buffer')
-
-class Blob {
- constructor (blobParts, options) {
- this[TYPE] = ''
-
- const buffers = []
- let size = 0
-
- if (blobParts) {
- const a = blobParts
- const length = Number(a.length)
- for (let i = 0; i < length; i++) {
- const element = a[i]
- const buffer = element instanceof Buffer ? element
- : ArrayBuffer.isView(element)
- ? Buffer.from(element.buffer, element.byteOffset, element.byteLength)
- : element instanceof ArrayBuffer ? Buffer.from(element)
- : element instanceof Blob ? element[BUFFER]
- : typeof element === 'string' ? Buffer.from(element)
- : Buffer.from(String(element))
- size += buffer.length
- buffers.push(buffer)
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers, size)
-
- const type = options && options.type !== undefined
- && String(options.type).toLowerCase()
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type
- }
- }
-
- get size () {
- return this[BUFFER].length
- }
-
- get type () {
- return this[TYPE]
- }
-
- text () {
- return Promise.resolve(this[BUFFER].toString())
- }
-
- arrayBuffer () {
- const buf = this[BUFFER]
- const off = buf.byteOffset
- const len = buf.byteLength
- const ab = buf.buffer.slice(off, off + len)
- return Promise.resolve(ab)
- }
-
- stream () {
- return new Minipass().end(this[BUFFER])
- }
-
- slice (start, end, type) {
- const size = this.size
- const relativeStart = start === undefined ? 0
- : start < 0 ? Math.max(size + start, 0)
- : Math.min(start, size)
- const relativeEnd = end === undefined ? size
- : end < 0 ? Math.max(size + end, 0)
- : Math.min(end, size)
- const span = Math.max(relativeEnd - relativeStart, 0)
-
- const buffer = this[BUFFER]
- const slicedBuffer = buffer.slice(
- relativeStart,
- relativeStart + span
- )
- const blob = new Blob([], { type })
- blob[BUFFER] = slicedBuffer
- return blob
- }
-
- get [Symbol.toStringTag] () {
- return 'Blob'
- }
-
- static get BUFFER () {
- return BUFFER
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
-})
-
-module.exports = Blob
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js
deleted file mode 100644
index 62286bd1de0d91..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js
+++ /dev/null
@@ -1,350 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const MinipassSized = require('minipass-sized')
-
-const Blob = require('./blob.js')
-const { BUFFER } = Blob
-const FetchError = require('./fetch-error.js')
-
-// optional dependency on 'encoding'
-let convert
-try {
- convert = require('encoding').convert
-} catch (e) {
- // defer error until textConverted is called
-}
-
-const INTERNALS = Symbol('Body internals')
-const CONSUME_BODY = Symbol('consumeBody')
-
-class Body {
- constructor (bodyArg, options = {}) {
- const { size = 0, timeout = 0 } = options
- const body = bodyArg === undefined || bodyArg === null ? null
- : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString())
- : isBlob(bodyArg) ? bodyArg
- : Buffer.isBuffer(bodyArg) ? bodyArg
- : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]'
- ? Buffer.from(bodyArg)
- : ArrayBuffer.isView(bodyArg)
- ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength)
- : Minipass.isStream(bodyArg) ? bodyArg
- : Buffer.from(String(bodyArg))
-
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null,
- }
-
- this.size = size
- this.timeout = timeout
-
- if (Minipass.isStream(body)) {
- body.on('error', er => {
- const error = er.name === 'AbortError' ? er
- : new FetchError(`Invalid response while trying to fetch ${
- this.url}: ${er.message}`, 'system', er)
- this[INTERNALS].error = error
- })
- }
- }
-
- get body () {
- return this[INTERNALS].body
- }
-
- get bodyUsed () {
- return this[INTERNALS].disturbed
- }
-
- arrayBuffer () {
- return this[CONSUME_BODY]().then(buf =>
- buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
- }
-
- blob () {
- const ct = this.headers && this.headers.get('content-type') || ''
- return this[CONSUME_BODY]().then(buf => Object.assign(
- new Blob([], { type: ct.toLowerCase() }),
- { [BUFFER]: buf }
- ))
- }
-
- async json () {
- const buf = await this[CONSUME_BODY]()
- try {
- return JSON.parse(buf.toString())
- } catch (er) {
- throw new FetchError(
- `invalid json response body at ${this.url} reason: ${er.message}`,
- 'invalid-json'
- )
- }
- }
-
- text () {
- return this[CONSUME_BODY]().then(buf => buf.toString())
- }
-
- buffer () {
- return this[CONSUME_BODY]()
- }
-
- textConverted () {
- return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers))
- }
-
- [CONSUME_BODY] () {
- if (this[INTERNALS].disturbed) {
- return Promise.reject(new TypeError(`body used already for: ${
- this.url}`))
- }
-
- this[INTERNALS].disturbed = true
-
- if (this[INTERNALS].error) {
- return Promise.reject(this[INTERNALS].error)
- }
-
- // body is null
- if (this.body === null) {
- return Promise.resolve(Buffer.alloc(0))
- }
-
- if (Buffer.isBuffer(this.body)) {
- return Promise.resolve(this.body)
- }
-
- const upstream = isBlob(this.body) ? this.body.stream() : this.body
-
- /* istanbul ignore if: should never happen */
- if (!Minipass.isStream(upstream)) {
- return Promise.resolve(Buffer.alloc(0))
- }
-
- const stream = this.size && upstream instanceof MinipassSized ? upstream
- : !this.size && upstream instanceof Minipass &&
- !(upstream instanceof MinipassSized) ? upstream
- : this.size ? new MinipassSized({ size: this.size })
- : new Minipass()
-
- // allow timeout on slow response body, but only if the stream is still writable. this
- // makes the timeout center on the socket stream from lib/index.js rather than the
- // intermediary minipass stream we create to receive the data
- const resTimeout = this.timeout && stream.writable ? setTimeout(() => {
- stream.emit('error', new FetchError(
- `Response timeout while trying to fetch ${
- this.url} (over ${this.timeout}ms)`, 'body-timeout'))
- }, this.timeout) : null
-
- // do not keep the process open just for this timeout, even
- // though we expect it'll get cleared eventually.
- if (resTimeout && resTimeout.unref) {
- resTimeout.unref()
- }
-
- // do the pipe in the promise, because the pipe() can send too much
- // data through right away and upset the MP Sized object
- return new Promise((resolve) => {
- // if the stream is some other kind of stream, then pipe through a MP
- // so we can collect it more easily.
- if (stream !== upstream) {
- upstream.on('error', er => stream.emit('error', er))
- upstream.pipe(stream)
- }
- resolve()
- }).then(() => stream.concat()).then(buf => {
- clearTimeout(resTimeout)
- return buf
- }).catch(er => {
- clearTimeout(resTimeout)
- // request was aborted, reject with this Error
- if (er.name === 'AbortError' || er.name === 'FetchError') {
- throw er
- } else if (er.name === 'RangeError') {
- throw new FetchError(`Could not create Buffer from response body for ${
- this.url}: ${er.message}`, 'system', er)
- } else {
- // other errors, such as incorrect content-encoding or content-length
- throw new FetchError(`Invalid response body while trying to fetch ${
- this.url}: ${er.message}`, 'system', er)
- }
- })
- }
-
- static clone (instance) {
- if (instance.bodyUsed) {
- throw new Error('cannot clone body after it is used')
- }
-
- const body = instance.body
-
- // check that body is a stream and not form-data object
- // NB: can't clone the form-data object without having it as a dependency
- if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') {
- // create a dedicated tee stream so that we don't lose data
- // potentially sitting in the body stream's buffer by writing it
- // immediately to p1 and not having it for p2.
- const tee = new Minipass()
- const p1 = new Minipass()
- const p2 = new Minipass()
- tee.on('error', er => {
- p1.emit('error', er)
- p2.emit('error', er)
- })
- body.on('error', er => tee.emit('error', er))
- tee.pipe(p1)
- tee.pipe(p2)
- body.pipe(tee)
- // set instance body to one fork, return the other
- instance[INTERNALS].body = p1
- return p2
- } else {
- return instance.body
- }
- }
-
- static extractContentType (body) {
- return body === null || body === undefined ? null
- : typeof body === 'string' ? 'text/plain;charset=UTF-8'
- : isURLSearchParams(body)
- ? 'application/x-www-form-urlencoded;charset=UTF-8'
- : isBlob(body) ? body.type || null
- : Buffer.isBuffer(body) ? null
- : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null
- : ArrayBuffer.isView(body) ? null
- : typeof body.getBoundary === 'function'
- ? `multipart/form-data;boundary=${body.getBoundary()}`
- : Minipass.isStream(body) ? null
- : 'text/plain;charset=UTF-8'
- }
-
- static getTotalBytes (instance) {
- const { body } = instance
- return (body === null || body === undefined) ? 0
- : isBlob(body) ? body.size
- : Buffer.isBuffer(body) ? body.length
- : body && typeof body.getLengthSync === 'function' && (
- // detect form data input from form-data module
- body._lengthRetrievers &&
- /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x
- body.hasKnownLength && body.hasKnownLength()) // 2.x
- ? body.getLengthSync()
- : null
- }
-
- static writeToStream (dest, instance) {
- const { body } = instance
-
- if (body === null || body === undefined) {
- dest.end()
- } else if (Buffer.isBuffer(body) || typeof body === 'string') {
- dest.end(body)
- } else {
- // body is stream or blob
- const stream = isBlob(body) ? body.stream() : body
- stream.on('error', er => dest.emit('error', er)).pipe(dest)
- }
-
- return dest
- }
-}
-
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true },
-})
-
-const isURLSearchParams = obj =>
- // Duck-typing as a necessary condition.
- (typeof obj !== 'object' ||
- typeof obj.append !== 'function' ||
- typeof obj.delete !== 'function' ||
- typeof obj.get !== 'function' ||
- typeof obj.getAll !== 'function' ||
- typeof obj.has !== 'function' ||
- typeof obj.set !== 'function') ? false
- // Brand-checking and more duck-typing as optional condition.
- : obj.constructor.name === 'URLSearchParams' ||
- Object.prototype.toString.call(obj) === '[object URLSearchParams]' ||
- typeof obj.sort === 'function'
-
-const isBlob = obj =>
- typeof obj === 'object' &&
- typeof obj.arrayBuffer === 'function' &&
- typeof obj.type === 'string' &&
- typeof obj.stream === 'function' &&
- typeof obj.constructor === 'function' &&
- typeof obj.constructor.name === 'string' &&
- /^(Blob|File)$/.test(obj.constructor.name) &&
- /^(Blob|File)$/.test(obj[Symbol.toStringTag])
-
-const convertBody = (buffer, headers) => {
- /* istanbul ignore if */
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function')
- }
-
- const ct = headers && headers.get('content-type')
- let charset = 'utf-8'
- let res
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct)
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- const str = buffer.slice(0, 1024).toString()
-
- // html5
- if (!res && str) {
- res = / this.expect
- ? 'max-size' : type
- this.message = message
- Error.captureStackTrace(this, this.constructor)
- }
-
- get name () {
- return 'FetchError'
- }
-
- // don't allow name to be overwritten
- set name (n) {}
-
- get [Symbol.toStringTag] () {
- return 'FetchError'
- }
-}
-module.exports = FetchError
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js
deleted file mode 100644
index dd6e854d5ba399..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js
+++ /dev/null
@@ -1,267 +0,0 @@
-'use strict'
-const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/
-const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/
-
-const validateName = name => {
- name = `${name}`
- if (invalidTokenRegex.test(name) || name === '') {
- throw new TypeError(`${name} is not a legal HTTP header name`)
- }
-}
-
-const validateValue = value => {
- value = `${value}`
- if (invalidHeaderCharRegex.test(value)) {
- throw new TypeError(`${value} is not a legal HTTP header value`)
- }
-}
-
-const find = (map, name) => {
- name = name.toLowerCase()
- for (const key in map) {
- if (key.toLowerCase() === name) {
- return key
- }
- }
- return undefined
-}
-
-const MAP = Symbol('map')
-class Headers {
- constructor (init = undefined) {
- this[MAP] = Object.create(null)
- if (init instanceof Headers) {
- const rawHeaders = init.raw()
- const headerNames = Object.keys(rawHeaders)
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value)
- }
- }
- return
- }
-
- // no-op
- if (init === undefined || init === null) {
- return
- }
-
- if (typeof init === 'object') {
- const method = init[Symbol.iterator]
- if (method !== null && method !== undefined) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable')
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = []
- for (const pair of init) {
- if (typeof pair !== 'object' ||
- typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable')
- }
- const arrPair = Array.from(pair)
- if (arrPair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple')
- }
- pairs.push(arrPair)
- }
-
- for (const pair of pairs) {
- this.append(pair[0], pair[1])
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- this.append(key, init[key])
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object')
- }
- }
-
- get (name) {
- name = `${name}`
- validateName(name)
- const key = find(this[MAP], name)
- if (key === undefined) {
- return null
- }
-
- return this[MAP][key].join(', ')
- }
-
- forEach (callback, thisArg = undefined) {
- let pairs = getHeaders(this)
- for (let i = 0; i < pairs.length; i++) {
- const [name, value] = pairs[i]
- callback.call(thisArg, value, name, this)
- // refresh in case the callback added more headers
- pairs = getHeaders(this)
- }
- }
-
- set (name, value) {
- name = `${name}`
- value = `${value}`
- validateName(name)
- validateValue(value)
- const key = find(this[MAP], name)
- this[MAP][key !== undefined ? key : name] = [value]
- }
-
- append (name, value) {
- name = `${name}`
- value = `${value}`
- validateName(name)
- validateValue(value)
- const key = find(this[MAP], name)
- if (key !== undefined) {
- this[MAP][key].push(value)
- } else {
- this[MAP][name] = [value]
- }
- }
-
- has (name) {
- name = `${name}`
- validateName(name)
- return find(this[MAP], name) !== undefined
- }
-
- delete (name) {
- name = `${name}`
- validateName(name)
- const key = find(this[MAP], name)
- if (key !== undefined) {
- delete this[MAP][key]
- }
- }
-
- raw () {
- return this[MAP]
- }
-
- keys () {
- return new HeadersIterator(this, 'key')
- }
-
- values () {
- return new HeadersIterator(this, 'value')
- }
-
- [Symbol.iterator] () {
- return new HeadersIterator(this, 'key+value')
- }
-
- entries () {
- return new HeadersIterator(this, 'key+value')
- }
-
- get [Symbol.toStringTag] () {
- return 'Headers'
- }
-
- static exportNodeCompatibleHeaders (headers) {
- const obj = Object.assign(Object.create(null), headers[MAP])
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host')
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0]
- }
-
- return obj
- }
-
- static createHeadersLenient (obj) {
- const headers = new Headers()
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue
- }
-
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue
- }
-
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val]
- } else {
- headers[MAP][name].push(val)
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]]
- }
- }
- return headers
- }
-}
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true },
-})
-
-const getHeaders = (headers, kind = 'key+value') =>
- Object.keys(headers[MAP]).sort().map(
- kind === 'key' ? k => k.toLowerCase()
- : kind === 'value' ? k => headers[MAP][k].join(', ')
- : k => [k.toLowerCase(), headers[MAP][k].join(', ')]
- )
-
-const INTERNAL = Symbol('internal')
-
-class HeadersIterator {
- constructor (target, kind) {
- this[INTERNAL] = {
- target,
- kind,
- index: 0,
- }
- }
-
- get [Symbol.toStringTag] () {
- return 'HeadersIterator'
- }
-
- next () {
- /* istanbul ignore if: should be impossible */
- if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator')
- }
-
- const { target, kind, index } = this[INTERNAL]
- const values = getHeaders(target, kind)
- const len = values.length
- if (index >= len) {
- return {
- value: undefined,
- done: true,
- }
- }
-
- this[INTERNAL].index++
-
- return { value: values[index], done: false }
- }
-}
-
-// manually extend because 'extends' requires a ctor
-Object.setPrototypeOf(HeadersIterator.prototype,
- Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())))
-
-module.exports = Headers
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js
deleted file mode 100644
index da402161670e65..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js
+++ /dev/null
@@ -1,377 +0,0 @@
-'use strict'
-const { URL } = require('url')
-const http = require('http')
-const https = require('https')
-const zlib = require('minizlib')
-const { Minipass } = require('minipass')
-
-const Body = require('./body.js')
-const { writeToStream, getTotalBytes } = Body
-const Response = require('./response.js')
-const Headers = require('./headers.js')
-const { createHeadersLenient } = Headers
-const Request = require('./request.js')
-const { getNodeRequestOptions } = Request
-const FetchError = require('./fetch-error.js')
-const AbortError = require('./abort-error.js')
-
-// XXX this should really be split up and unit-ized for easier testing
-// and better DRY implementation of data/http request aborting
-const fetch = async (url, opts) => {
- if (/^data:/.test(url)) {
- const request = new Request(url, opts)
- // delay 1 promise tick so that the consumer can abort right away
- return Promise.resolve().then(() => new Promise((resolve, reject) => {
- let type, data
- try {
- const { pathname, search } = new URL(url)
- const split = pathname.split(',')
- if (split.length < 2) {
- throw new Error('invalid data: URI')
- }
- const mime = split.shift()
- const base64 = /;base64$/.test(mime)
- type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime
- const rawData = decodeURIComponent(split.join(',') + search)
- data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData)
- } catch (er) {
- return reject(new FetchError(`[${request.method}] ${
- request.url} invalid URL, ${er.message}`, 'system', er))
- }
-
- const { signal } = request
- if (signal && signal.aborted) {
- return reject(new AbortError('The user aborted a request.'))
- }
-
- const headers = { 'Content-Length': data.length }
- if (type) {
- headers['Content-Type'] = type
- }
- return resolve(new Response(data, { headers }))
- }))
- }
-
- return new Promise((resolve, reject) => {
- // build request object
- const request = new Request(url, opts)
- let options
- try {
- options = getNodeRequestOptions(request)
- } catch (er) {
- return reject(er)
- }
-
- const send = (options.protocol === 'https:' ? https : http).request
- const { signal } = request
- let response = null
- const abort = () => {
- const error = new AbortError('The user aborted a request.')
- reject(error)
- if (Minipass.isStream(request.body) &&
- typeof request.body.destroy === 'function') {
- request.body.destroy(error)
- }
- if (response && response.body) {
- response.body.emit('error', error)
- }
- }
-
- if (signal && signal.aborted) {
- return abort()
- }
-
- const abortAndFinalize = () => {
- abort()
- finalize()
- }
-
- const finalize = () => {
- req.abort()
- if (signal) {
- signal.removeEventListener('abort', abortAndFinalize)
- }
- clearTimeout(reqTimeout)
- }
-
- // send request
- const req = send(options)
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize)
- }
-
- let reqTimeout = null
- if (request.timeout) {
- req.once('socket', () => {
- reqTimeout = setTimeout(() => {
- reject(new FetchError(`network timeout at: ${
- request.url}`, 'request-timeout'))
- finalize()
- }, request.timeout)
- })
- }
-
- req.on('error', er => {
- // if a 'response' event is emitted before the 'error' event, then by the
- // time this handler is run it's too late to reject the Promise for the
- // response. instead, we forward the error event to the response stream
- // so that the error will surface to the user when they try to consume
- // the body. this is done as a side effect of aborting the request except
- // for in windows, where we must forward the event manually, otherwise
- // there is no longer a ref'd socket attached to the request and the
- // stream never ends so the event loop runs out of work and the process
- // exits without warning.
- // coverage skipped here due to the difficulty in testing
- // istanbul ignore next
- if (req.res) {
- req.res.emit('error', er)
- }
- reject(new FetchError(`request to ${request.url} failed, reason: ${
- er.message}`, 'system', er))
- finalize()
- })
-
- req.on('response', res => {
- clearTimeout(reqTimeout)
-
- const headers = createHeadersLenient(res.headers)
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location')
-
- // HTTP fetch step 5.3
- let locationURL = null
- try {
- locationURL = location === null ? null : new URL(location, request.url).toString()
- } catch {
- // error here can only be invalid URL in Location: header
- // do not throw when options.redirect == manual
- // let the user extract the errorneous redirect URL
- if (request.redirect !== 'manual') {
- /* eslint-disable-next-line max-len */
- reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'))
- finalize()
- return
- }
- }
-
- // HTTP fetch step 5.5
- if (request.redirect === 'error') {
- reject(new FetchError('uri requested responds with a redirect, ' +
- `redirect mode is set to error: ${request.url}`, 'no-redirect'))
- finalize()
- return
- } else if (request.redirect === 'manual') {
- // node-fetch-specific step: make manual redirect a bit easier to
- // use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL)
- } catch (err) {
- /* istanbul ignore next: nodejs server prevent invalid
- response headers, we can't test this through normal
- request */
- reject(err)
- }
- }
- } else if (request.redirect === 'follow' && locationURL !== null) {
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${
- request.url}`, 'max-redirect'))
- finalize()
- return
- }
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 &&
- request.body &&
- getTotalBytes(request) === null) {
- reject(new FetchError(
- 'Cannot follow redirect with body being a readable stream',
- 'unsupported-redirect'
- ))
- finalize()
- return
- }
-
- // Update host due to redirection
- request.headers.set('host', (new URL(locationURL)).host)
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout,
- }
-
- // if the redirect is to a new hostname, strip the authorization and cookie headers
- const parsedOriginal = new URL(request.url)
- const parsedRedirect = new URL(locationURL)
- if (parsedOriginal.hostname !== parsedRedirect.hostname) {
- requestOpts.headers.delete('authorization')
- requestOpts.headers.delete('cookie')
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (
- (res.statusCode === 301 || res.statusCode === 302) &&
- request.method === 'POST'
- )) {
- requestOpts.method = 'GET'
- requestOpts.body = undefined
- requestOpts.headers.delete('content-length')
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)))
- finalize()
- return
- }
- } // end if(isRedirect)
-
- // prepare response
- res.once('end', () =>
- signal && signal.removeEventListener('abort', abortAndFinalize))
-
- const body = new Minipass()
- // if an error occurs, either on the response stream itself, on one of the
- // decoder streams, or a response length timeout from the Body class, we
- // forward the error through to our internal body stream. If we see an
- // error event on that, we call finalize to abort the request and ensure
- // we don't leave a socket believing a request is in flight.
- // this is difficult to test, so lacks specific coverage.
- body.on('error', finalize)
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- res.on('error', /* istanbul ignore next */ er => body.emit('error', er))
- res.on('data', (chunk) => body.write(chunk))
- res.on('end', () => body.end())
-
- const responseOptions = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter,
- trailer: new Promise(resolveTrailer =>
- res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))),
- }
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding')
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress ||
- request.method === 'HEAD' ||
- codings === null ||
- res.statusCode === 204 ||
- res.statusCode === 304) {
- response = new Response(body, responseOptions)
- resolve(response)
- return
- }
-
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.constants.Z_SYNC_FLUSH,
- finishFlush: zlib.constants.Z_SYNC_FLUSH,
- }
-
- // for gzip
- if (codings === 'gzip' || codings === 'x-gzip') {
- const unzip = new zlib.Gunzip(zlibOptions)
- response = new Response(
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip),
- responseOptions
- )
- resolve(response)
- return
- }
-
- // for deflate
- if (codings === 'deflate' || codings === 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new Minipass())
- raw.once('data', chunk => {
- // see http://stackoverflow.com/questions/37519828
- const decoder = (chunk[0] & 0x0F) === 0x08
- ? new zlib.Inflate()
- : new zlib.InflateRaw()
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
- response = new Response(decoder, responseOptions)
- resolve(response)
- })
- return
- }
-
- // for br
- if (codings === 'br') {
- // ignoring coverage so tests don't have to fake support (or lack of) for brotli
- // istanbul ignore next
- try {
- var decoder = new zlib.BrotliDecompress()
- } catch (err) {
- reject(err)
- finalize()
- return
- }
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
- response = new Response(decoder, responseOptions)
- resolve(response)
- return
- }
-
- // otherwise, use response as-is
- response = new Response(body, responseOptions)
- resolve(response)
- })
-
- writeToStream(req, request)
- })
-}
-
-module.exports = fetch
-
-fetch.isRedirect = code =>
- code === 301 ||
- code === 302 ||
- code === 303 ||
- code === 307 ||
- code === 308
-
-fetch.Headers = Headers
-fetch.Request = Request
-fetch.Response = Response
-fetch.FetchError = FetchError
-fetch.AbortError = AbortError
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js
deleted file mode 100644
index 054439e6699107..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js
+++ /dev/null
@@ -1,282 +0,0 @@
-'use strict'
-const { URL } = require('url')
-const { Minipass } = require('minipass')
-const Headers = require('./headers.js')
-const { exportNodeCompatibleHeaders } = Headers
-const Body = require('./body.js')
-const { clone, extractContentType, getTotalBytes } = Body
-
-const version = require('../package.json').version
-const defaultUserAgent =
- `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)`
-
-const INTERNALS = Symbol('Request internals')
-
-const isRequest = input =>
- typeof input === 'object' && typeof input[INTERNALS] === 'object'
-
-const isAbortSignal = signal => {
- const proto = (
- signal
- && typeof signal === 'object'
- && Object.getPrototypeOf(signal)
- )
- return !!(proto && proto.constructor.name === 'AbortSignal')
-}
-
-class Request extends Body {
- constructor (input, init = {}) {
- const parsedURL = isRequest(input) ? new URL(input.url)
- : input && input.href ? new URL(input.href)
- : new URL(`${input}`)
-
- if (isRequest(input)) {
- init = { ...input[INTERNALS], ...init }
- } else if (!input || typeof input === 'string') {
- input = {}
- }
-
- const method = (init.method || input.method || 'GET').toUpperCase()
- const isGETHEAD = method === 'GET' || method === 'HEAD'
-
- if ((init.body !== null && init.body !== undefined ||
- isRequest(input) && input.body !== null) && isGETHEAD) {
- throw new TypeError('Request with GET/HEAD method cannot have body')
- }
-
- const inputBody = init.body !== null && init.body !== undefined ? init.body
- : isRequest(input) && input.body !== null ? clone(input)
- : null
-
- super(inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0,
- })
-
- const headers = new Headers(init.headers || input.headers || {})
-
- if (inputBody !== null && inputBody !== undefined &&
- !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody)
- if (contentType) {
- headers.append('Content-Type', contentType)
- }
- }
-
- const signal = 'signal' in init ? init.signal
- : null
-
- if (signal !== null && signal !== undefined && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal must be an instanceof AbortSignal')
- }
-
- // TLS specific options that are handled by node
- const {
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0',
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- } = init
-
- this[INTERNALS] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal,
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- }
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow
- : input.follow !== undefined ? input.follow
- : 20
- this.compress = init.compress !== undefined ? init.compress
- : input.compress !== undefined ? input.compress
- : true
- this.counter = init.counter || input.counter || 0
- this.agent = init.agent || input.agent
- }
-
- get method () {
- return this[INTERNALS].method
- }
-
- get url () {
- return this[INTERNALS].parsedURL.toString()
- }
-
- get headers () {
- return this[INTERNALS].headers
- }
-
- get redirect () {
- return this[INTERNALS].redirect
- }
-
- get signal () {
- return this[INTERNALS].signal
- }
-
- clone () {
- return new Request(this)
- }
-
- get [Symbol.toStringTag] () {
- return 'Request'
- }
-
- static getNodeRequestOptions (request) {
- const parsedURL = request[INTERNALS].parsedURL
- const headers = new Headers(request[INTERNALS].headers)
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*')
- }
-
- // Basic fetch
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported')
- }
-
- if (request.signal &&
- Minipass.isStream(request.body) &&
- typeof request.body.destroy !== 'function') {
- throw new Error(
- 'Cancellation of streamed requests with AbortSignal is not supported')
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- const contentLengthValue =
- (request.body === null || request.body === undefined) &&
- /^(POST|PUT)$/i.test(request.method) ? '0'
- : request.body !== null && request.body !== undefined
- ? getTotalBytes(request)
- : null
-
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue + '')
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', defaultUserAgent)
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate')
- }
-
- const agent = typeof request.agent === 'function'
- ? request.agent(parsedURL)
- : request.agent
-
- if (!headers.has('Connection') && !agent) {
- headers.set('Connection', 'close')
- }
-
- // TLS specific options that are handled by node
- const {
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- } = request[INTERNALS]
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- // we cannot spread parsedURL directly, so we have to read each property one-by-one
- // and map them to the equivalent https?.request() method options
- const urlProps = {
- auth: parsedURL.username || parsedURL.password
- ? `${parsedURL.username}:${parsedURL.password}`
- : '',
- host: parsedURL.host,
- hostname: parsedURL.hostname,
- path: `${parsedURL.pathname}${parsedURL.search}`,
- port: parsedURL.port,
- protocol: parsedURL.protocol,
- }
-
- return {
- ...urlProps,
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent,
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- timeout: request.timeout,
- }
- }
-}
-
-module.exports = Request
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true },
-})
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js
deleted file mode 100644
index 54cb52db3594a7..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js
+++ /dev/null
@@ -1,90 +0,0 @@
-'use strict'
-const http = require('http')
-const { STATUS_CODES } = http
-
-const Headers = require('./headers.js')
-const Body = require('./body.js')
-const { clone, extractContentType } = Body
-
-const INTERNALS = Symbol('Response internals')
-
-class Response extends Body {
- constructor (body = null, opts = {}) {
- super(body, opts)
-
- const status = opts.status || 200
- const headers = new Headers(opts.headers)
-
- if (body !== null && body !== undefined && !headers.has('Content-Type')) {
- const contentType = extractContentType(body)
- if (contentType) {
- headers.append('Content-Type', contentType)
- }
- }
-
- this[INTERNALS] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter,
- trailer: Promise.resolve(opts.trailer || new Headers()),
- }
- }
-
- get trailer () {
- return this[INTERNALS].trailer
- }
-
- get url () {
- return this[INTERNALS].url || ''
- }
-
- get status () {
- return this[INTERNALS].status
- }
-
- get ok () {
- return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300
- }
-
- get redirected () {
- return this[INTERNALS].counter > 0
- }
-
- get statusText () {
- return this[INTERNALS].statusText
- }
-
- get headers () {
- return this[INTERNALS].headers
- }
-
- clone () {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected,
- trailer: this.trailer,
- })
- }
-
- get [Symbol.toStringTag] () {
- return 'Response'
- }
-}
-
-module.exports = Response
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true },
-})
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json
deleted file mode 100644
index d491a7fba126d0..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "name": "minipass-fetch",
- "version": "3.0.5",
- "description": "An implementation of window.fetch in Node.js using Minipass streams",
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test:tls-fixtures": "./test/fixtures/tls/setup.sh",
- "test": "tap",
- "snap": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "lintfix": "npm run lint -- --fix",
- "posttest": "npm run lint",
- "template-oss-apply": "template-oss-apply --force"
- },
- "tap": {
- "coverage-map": "map.js",
- "check-coverage": true,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "@ungap/url-search-params": "^0.2.2",
- "abort-controller": "^3.0.0",
- "abortcontroller-polyfill": "~1.7.3",
- "encoding": "^0.1.13",
- "form-data": "^4.0.0",
- "nock": "^13.2.4",
- "parted": "^0.1.1",
- "string-to-arraybuffer": "^1.0.2",
- "tap": "^16.0.0"
- },
- "dependencies": {
- "minipass": "^7.0.3",
- "minipass-sized": "^1.0.3",
- "minizlib": "^2.1.2"
- },
- "optionalDependencies": {
- "encoding": "^0.1.13"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/minipass-fetch.git"
- },
- "keywords": [
- "fetch",
- "minipass",
- "node-fetch",
- "window.fetch"
- ],
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "author": "GitHub Inc.",
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE
deleted file mode 100644
index 83837797202b70..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) GitHub, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js
deleted file mode 100644
index 86d90861078dab..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js
+++ /dev/null
@@ -1,153 +0,0 @@
-const META = Symbol('proc-log.meta')
-module.exports = {
- META: META,
- output: {
- LEVELS: [
- 'standard',
- 'error',
- 'buffer',
- 'flush',
- ],
- KEYS: {
- standard: 'standard',
- error: 'error',
- buffer: 'buffer',
- flush: 'flush',
- },
- standard: function (...args) {
- return process.emit('output', 'standard', ...args)
- },
- error: function (...args) {
- return process.emit('output', 'error', ...args)
- },
- buffer: function (...args) {
- return process.emit('output', 'buffer', ...args)
- },
- flush: function (...args) {
- return process.emit('output', 'flush', ...args)
- },
- },
- log: {
- LEVELS: [
- 'notice',
- 'error',
- 'warn',
- 'info',
- 'verbose',
- 'http',
- 'silly',
- 'timing',
- 'pause',
- 'resume',
- ],
- KEYS: {
- notice: 'notice',
- error: 'error',
- warn: 'warn',
- info: 'info',
- verbose: 'verbose',
- http: 'http',
- silly: 'silly',
- timing: 'timing',
- pause: 'pause',
- resume: 'resume',
- },
- error: function (...args) {
- return process.emit('log', 'error', ...args)
- },
- notice: function (...args) {
- return process.emit('log', 'notice', ...args)
- },
- warn: function (...args) {
- return process.emit('log', 'warn', ...args)
- },
- info: function (...args) {
- return process.emit('log', 'info', ...args)
- },
- verbose: function (...args) {
- return process.emit('log', 'verbose', ...args)
- },
- http: function (...args) {
- return process.emit('log', 'http', ...args)
- },
- silly: function (...args) {
- return process.emit('log', 'silly', ...args)
- },
- timing: function (...args) {
- return process.emit('log', 'timing', ...args)
- },
- pause: function () {
- return process.emit('log', 'pause')
- },
- resume: function () {
- return process.emit('log', 'resume')
- },
- },
- time: {
- LEVELS: [
- 'start',
- 'end',
- ],
- KEYS: {
- start: 'start',
- end: 'end',
- },
- start: function (name, fn) {
- process.emit('time', 'start', name)
- function end () {
- return process.emit('time', 'end', name)
- }
- if (typeof fn === 'function') {
- const res = fn()
- if (res && res.finally) {
- return res.finally(end)
- }
- end()
- return res
- }
- return end
- },
- end: function (name) {
- return process.emit('time', 'end', name)
- },
- },
- input: {
- LEVELS: [
- 'start',
- 'end',
- 'read',
- ],
- KEYS: {
- start: 'start',
- end: 'end',
- read: 'read',
- },
- start: function (fn) {
- process.emit('input', 'start')
- function end () {
- return process.emit('input', 'end')
- }
- if (typeof fn === 'function') {
- const res = fn()
- if (res && res.finally) {
- return res.finally(end)
- }
- end()
- return res
- }
- return end
- },
- end: function () {
- return process.emit('input', 'end')
- },
- read: function (...args) {
- let resolve, reject
- const promise = new Promise((_resolve, _reject) => {
- resolve = _resolve
- reject = _reject
- })
- process.emit('input', 'read', resolve, reject, ...args)
- return promise
- },
- },
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json
deleted file mode 100644
index 4ab89102ecc9b5..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "name": "proc-log",
- "version": "4.2.0",
- "files": [
- "bin/",
- "lib/"
- ],
- "main": "lib/index.js",
- "description": "just emit 'log' events on the process object",
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/proc-log.git"
- },
- "author": "GitHub Inc.",
- "license": "ISC",
- "scripts": {
- "test": "tap",
- "snap": "tap",
- "posttest": "npm run lint",
- "postsnap": "eslint index.js test/*.js --fix",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "lintfix": "npm run lint -- --fix",
- "template-oss-apply": "template-oss-apply --force"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.3",
- "tap": "^16.0.1"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.3",
- "publish": true
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md
deleted file mode 100644
index e335388869f50f..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2021 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js
deleted file mode 100644
index 7d749ed480fb98..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js
+++ /dev/null
@@ -1,580 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const { Minipass } = require('minipass')
-
-const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256']
-const DEFAULT_ALGORITHMS = ['sha512']
-
-// TODO: this should really be a hardcoded list of algorithms we support,
-// rather than [a-z0-9].
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const getOptString = options => options?.length ? `?${options.join('?')}` : ''
-
-class IntegrityStream extends Minipass {
- #emittedIntegrity
- #emittedSize
- #emittedVerified
-
- constructor (opts) {
- super()
- this.size = 0
- this.opts = opts
-
- // may be overridden later, but set now for class consistency
- this.#getOptions()
-
- // options used for calculating stream. can't be changed.
- if (opts?.algorithms) {
- this.algorithms = [...opts.algorithms]
- } else {
- this.algorithms = [...DEFAULT_ALGORITHMS]
- }
- if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) {
- this.algorithms.push(this.algorithm)
- }
-
- this.hashes = this.algorithms.map(crypto.createHash)
- }
-
- #getOptions () {
- // For verification
- this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null
- this.expectedSize = this.opts?.size
-
- if (!this.sri) {
- this.algorithm = null
- } else if (this.sri.isHash) {
- this.goodSri = true
- this.algorithm = this.sri.algorithm
- } else {
- this.goodSri = !this.sri.isEmpty()
- this.algorithm = this.sri.pickAlgorithm(this.opts)
- }
-
- this.digests = this.goodSri ? this.sri[this.algorithm] : null
- this.optString = getOptString(this.opts?.options)
- }
-
- on (ev, handler) {
- if (ev === 'size' && this.#emittedSize) {
- return handler(this.#emittedSize)
- }
-
- if (ev === 'integrity' && this.#emittedIntegrity) {
- return handler(this.#emittedIntegrity)
- }
-
- if (ev === 'verified' && this.#emittedVerified) {
- return handler(this.#emittedVerified)
- }
-
- return super.on(ev, handler)
- }
-
- emit (ev, data) {
- if (ev === 'end') {
- this.#onEnd()
- }
- return super.emit(ev, data)
- }
-
- write (data) {
- this.size += data.length
- this.hashes.forEach(h => h.update(data))
- return super.write(data)
- }
-
- #onEnd () {
- if (!this.goodSri) {
- this.#getOptions()
- }
- const newSri = parse(this.hashes.map((h, i) => {
- return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}`
- }).join(' '), this.opts)
- // Integrity verification mode
- const match = this.goodSri && newSri.match(this.sri, this.opts)
- if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`)
- err.code = 'EBADSIZE'
- err.found = this.size
- err.expected = this.expectedSize
- err.sri = this.sri
- this.emit('error', err)
- } else if (this.sri && !match) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = this.digests
- err.algorithm = this.algorithm
- err.sri = this.sri
- this.emit('error', err)
- } else {
- this.#emittedSize = this.size
- this.emit('size', this.size)
- this.#emittedIntegrity = newSri
- this.emit('integrity', newSri)
- if (match) {
- this.#emittedVerified = match
- this.emit('verified', match)
- }
- }
- }
-}
-
-class Hash {
- get isHash () {
- return true
- }
-
- constructor (hash, opts) {
- const strict = opts?.strict
- this.source = hash.trim()
-
- // set default values so that we make V8 happy to
- // always see a familiar object template.
- this.digest = ''
- this.algorithm = ''
- this.options = []
-
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) {
- return
- }
- if (strict && !SPEC_ALGORITHMS.includes(match[1])) {
- return
- }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- if (rawOpts) {
- this.options = rawOpts.slice(1).split('?')
- }
- }
-
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
-
- toJSON () {
- return this.toString()
- }
-
- match (integrity, opts) {
- const other = parse(integrity, opts)
- if (!other) {
- return false
- }
- if (other.isIntegrity) {
- const algo = other.pickAlgorithm(opts, [this.algorithm])
-
- if (!algo) {
- return false
- }
-
- const foundHash = other[algo].find(hash => hash.digest === this.digest)
-
- if (foundHash) {
- return foundHash
- }
-
- return false
- }
- return other.digest === this.digest ? other : false
- }
-
- toString (opts) {
- if (opts?.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.includes(this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- this.options.every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- return `${this.algorithm}-${this.digest}${getOptString(this.options)}`
- }
-}
-
-function integrityHashToString (toString, sep, opts, hashes) {
- const toStringIsNotEmpty = toString !== ''
-
- let shouldAddFirstSep = false
- let complement = ''
-
- const lastIndex = hashes.length - 1
-
- for (let i = 0; i < lastIndex; i++) {
- const hashString = Hash.prototype.toString.call(hashes[i], opts)
-
- if (hashString) {
- shouldAddFirstSep = true
-
- complement += hashString
- complement += sep
- }
- }
-
- const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts)
-
- if (finalHashString) {
- shouldAddFirstSep = true
- complement += finalHashString
- }
-
- if (toStringIsNotEmpty && shouldAddFirstSep) {
- return toString + sep + complement
- }
-
- return toString + complement
-}
-
-class Integrity {
- get isIntegrity () {
- return true
- }
-
- toJSON () {
- return this.toString()
- }
-
- isEmpty () {
- return Object.keys(this).length === 0
- }
-
- toString (opts) {
- let sep = opts?.sep || ' '
- let toString = ''
-
- if (opts?.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
-
- for (const hash of SPEC_ALGORITHMS) {
- if (this[hash]) {
- toString = integrityHashToString(toString, sep, opts, this[hash])
- }
- }
- } else {
- for (const hash of Object.keys(this)) {
- toString = integrityHashToString(toString, sep, opts, this[hash])
- }
- }
-
- return toString
- }
-
- concat (integrity, opts) {
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
-
- hexDigest () {
- return parse(this, { single: true }).hexDigest()
- }
-
- // add additional hashes to an integrity value, but prevent
- // *changing* an existing integrity hash.
- merge (integrity, opts) {
- const other = parse(integrity, opts)
- for (const algo in other) {
- if (this[algo]) {
- if (!this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest))) {
- throw new Error('hashes do not match, cannot update integrity')
- }
- } else {
- this[algo] = other[algo]
- }
- }
- }
-
- match (integrity, opts) {
- const other = parse(integrity, opts)
- if (!other) {
- return false
- }
- const algo = other.pickAlgorithm(opts, Object.keys(this))
- return (
- !!algo &&
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
-
- // Pick the highest priority algorithm present, optionally also limited to a
- // set of hashes found in another integrity. When limiting it may return
- // nothing.
- pickAlgorithm (opts, hashes) {
- const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash
- const keys = Object.keys(this).filter(k => {
- if (hashes?.length) {
- return hashes.includes(k)
- }
- return true
- })
- if (keys.length) {
- return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc)
- }
- // no intersection between this and hashes,
- return null
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- if (!sri) {
- return null
- }
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts?.single) {
- return new Hash(integrity, opts)
- }
- const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) {
- acc[algo] = []
- }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
- return hashes.isEmpty() ? null : hashes
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- const optString = getOptString(opts?.options)
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
- const optString = getOptString(opts?.options)
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- /* istanbul ignore else - it would be VERY strange if the string we
- * just calculated with an algo did not have an algo or digest.
- */
- if (hash.algorithm && hash.digest) {
- const hashAlgo = hash.algorithm
- if (!acc[hashAlgo]) {
- acc[hashAlgo] = []
- }
- acc[hashAlgo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- const istream = integrityStream(opts)
- return new Promise((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => {
- sri = s
- })
- istream.on('end', () => resolve(sri))
- istream.resume()
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- sri = parse(sri, opts)
- if (!sri || !Object.keys(sri).length) {
- if (opts?.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY',
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({ algorithm, digest })
- const match = newSri.match(sri, opts)
- opts = opts || {}
- if (match || !(opts.error)) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = opts || Object.create(null)
- opts.integrity = sri
- sri = parse(sri, opts)
- if (!sri || !Object.keys(sri).length) {
- return Promise.reject(Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY',
- }
- ))
- }
- const checker = integrityStream(opts)
- return new Promise((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let verified
- checker.on('verified', s => {
- verified = s
- })
- checker.on('end', () => resolve(verified))
- checker.resume()
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts = Object.create(null)) {
- return new IntegrityStream(opts)
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
- const optString = getOptString(opts?.options)
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function () {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- /* istanbul ignore else - it would be VERY strange if the hash we
- * just calculated with an algo did not have an algo or digest.
- */
- if (hash.algorithm && hash.digest) {
- const hashAlgo = hash.algorithm
- if (!acc[hashAlgo]) {
- acc[hashAlgo] = []
- }
- acc[hashAlgo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- },
- }
-}
-
-const NODE_HASHES = crypto.getHashes()
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512',
-].filter(algo => NODE_HASHES.includes(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- /* eslint-disable-next-line max-len */
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json
deleted file mode 100644
index 28395414e4643c..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "name": "ssri",
- "version": "10.0.6",
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "prerelease": "npm t",
- "postrelease": "npm publish",
- "posttest": "npm run lint",
- "test": "tap",
- "coverage": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap"
- },
- "tap": {
- "check-coverage": true,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/ssri.git"
- },
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "dependencies": {
- "minipass": "^7.0.3"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.1"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE
deleted file mode 100644
index 69619c125ea7ef..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE
+++ /dev/null
@@ -1,5 +0,0 @@
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js
deleted file mode 100644
index d067d2e709809a..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-var path = require('path')
-
-var uniqueSlug = require('unique-slug')
-
-module.exports = function (filepath, prefix, uniq) {
- return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json
deleted file mode 100644
index b2fbf0666489a6..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "name": "unique-filename",
- "version": "3.0.0",
- "description": "Generate a unique filename for use in temporary directories or caches.",
- "main": "lib/index.js",
- "scripts": {
- "test": "tap",
- "lint": "eslint \"**/*.js\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/unique-filename.git"
- },
- "keywords": [],
- "author": "GitHub Inc.",
- "license": "ISC",
- "bugs": {
- "url": "https://github.com/iarna/unique-filename/issues"
- },
- "homepage": "https://github.com/iarna/unique-filename",
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.5.1",
- "tap": "^16.3.0"
- },
- "dependencies": {
- "unique-slug": "^4.0.0"
- },
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE
deleted file mode 100644
index 7953647e7760b8..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js
deleted file mode 100644
index 1bac84d95d7307..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-var MurmurHash3 = require('imurmurhash')
-
-module.exports = function (uniq) {
- if (uniq) {
- var hash = new MurmurHash3(uniq)
- return ('00000000' + hash.result().toString(16)).slice(-8)
- } else {
- return (Math.random().toString(16) + '0000000').slice(2, 10)
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json
deleted file mode 100644
index 33732cdbb42859..00000000000000
--- a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "name": "unique-slug",
- "version": "4.0.0",
- "description": "Generate a unique character string suitible for use in files and URLs.",
- "main": "lib/index.js",
- "scripts": {
- "test": "tap",
- "lint": "eslint \"**/*.js\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "keywords": [],
- "author": "GitHub Inc.",
- "license": "ISC",
- "devDependencies": {
- "@npmcli/eslint-config": "^3.1.0",
- "@npmcli/template-oss": "4.5.1",
- "tap": "^16.3.0"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/unique-slug.git"
- },
- "dependencies": {
- "imurmurhash": "^0.1.4"
- },
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/appdata.js b/deps/npm/node_modules/@sigstore/tuf/dist/appdata.js
index c9a8ee92b531eb..06a8143e70da2f 100644
--- a/deps/npm/node_modules/@sigstore/tuf/dist/appdata.js
+++ b/deps/npm/node_modules/@sigstore/tuf/dist/appdata.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.appDataPath = void 0;
+exports.appDataPath = appDataPath;
/*
Copyright 2023 The Sigstore Authors.
@@ -41,4 +41,3 @@ function appDataPath(name) {
}
}
}
-exports.appDataPath = appDataPath;
diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/client.js b/deps/npm/node_modules/@sigstore/tuf/dist/client.js
index 2019c1fd30f886..328f49e40dbbd7 100644
--- a/deps/npm/node_modules/@sigstore/tuf/dist/client.js
+++ b/deps/npm/node_modules/@sigstore/tuf/dist/client.js
@@ -79,7 +79,6 @@ function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) {
fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
}
else {
- /* eslint-disable @typescript-eslint/no-var-requires */
const seeds = require('../seeds.json');
const repoSeed = seeds[mirrorURL];
if (!repoSeed) {
diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/index.js b/deps/npm/node_modules/@sigstore/tuf/dist/index.js
index 678c81d45d21ed..2af5de93ec5d2f 100644
--- a/deps/npm/node_modules/@sigstore/tuf/dist/index.js
+++ b/deps/npm/node_modules/@sigstore/tuf/dist/index.js
@@ -1,6 +1,8 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFError = exports.initTUF = exports.getTrustedRoot = exports.DEFAULT_MIRROR_URL = void 0;
+exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0;
+exports.getTrustedRoot = getTrustedRoot;
+exports.initTUF = initTUF;
/*
Copyright 2023 The Sigstore Authors.
@@ -31,14 +33,12 @@ options = {}) {
const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
}
-exports.getTrustedRoot = getTrustedRoot;
async function initTUF(
/* istanbul ignore next */
options = {}) {
const client = createClient(options);
return client.refresh().then(() => client);
}
-exports.initTUF = initTUF;
// Create a TUF client with default options
function createClient(options) {
/* istanbul ignore next */
diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/target.js b/deps/npm/node_modules/@sigstore/tuf/dist/target.js
index 29eaf99a7e721c..5c6675bdfbf5fe 100644
--- a/deps/npm/node_modules/@sigstore/tuf/dist/target.js
+++ b/deps/npm/node_modules/@sigstore/tuf/dist/target.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.readTarget = void 0;
+exports.readTarget = readTarget;
/*
Copyright 2023 The Sigstore Authors.
@@ -39,7 +39,6 @@ async function readTarget(tuf, targetPath) {
});
});
}
-exports.readTarget = readTarget;
// Returns the local path to the specified target. If the target is not yet
// cached locally, the provided TUF Updater will be used to download and
// cache the target.
diff --git a/deps/npm/node_modules/@sigstore/tuf/package.json b/deps/npm/node_modules/@sigstore/tuf/package.json
index b7fd34ac9674eb..808689dfddf92f 100644
--- a/deps/npm/node_modules/@sigstore/tuf/package.json
+++ b/deps/npm/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/tuf",
- "version": "2.3.4",
+ "version": "3.0.0",
"description": "Client for the Sigstore TUF repository",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -28,14 +28,14 @@
},
"devDependencies": {
"@sigstore/jest": "^0.0.0",
- "@tufjs/repo-mock": "^2.0.1",
+ "@tufjs/repo-mock": "^3.0.1",
"@types/make-fetch-happen": "^10.0.4"
},
"dependencies": {
"@sigstore/protobuf-specs": "^0.3.2",
- "tuf-js": "^2.2.1"
+ "tuf-js": "^3.0.1"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/@sigstore/tuf/seeds.json b/deps/npm/node_modules/@sigstore/tuf/seeds.json
index e8d97d5fa7a672..d1d3c6b5c46040 100644
--- a/deps/npm/node_modules/@sigstore/tuf/seeds.json
+++ b/deps/npm/node_modules/@sigstore/tuf/seeds.json
@@ -1 +1 @@
-{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
	"signed": {
		"_type": "root",
		"spec_version": "1.0",
		"version": 9,
		"expires": "2024-09-12T06:53:10Z",
		"keys": {
			"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
				}
			},
			"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"
				}
			},
			"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
				}
			},
			"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
				}
			}
		},
		"roles": {
			"root": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"snapshot": {
				"keyids": [
					"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac"
				],
				"threshold": 1
			},
			"targets": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"timestamp": {
				"keyids": [
					"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d"
				],
				"threshold": 1
			}
		},
		"consistent_snapshot": true
	},
	"signatures": [
		{
			"keyid": "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		},
		{
			"keyid": "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		}
	]
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
+{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/deps/npm/node_modules/ci-info/index.js b/deps/npm/node_modules/ci-info/index.js
index 47907264581eb1..9eba6940c4147e 100644
--- a/deps/npm/node_modules/ci-info/index.js
+++ b/deps/npm/node_modules/ci-info/index.js
@@ -13,6 +13,7 @@ Object.defineProperty(exports, '_vendors', {
exports.name = null
exports.isPR = null
+exports.id = null
vendors.forEach(function (vendor) {
const envs = Array.isArray(vendor.env) ? vendor.env : [vendor.env]
@@ -27,45 +28,23 @@ vendors.forEach(function (vendor) {
}
exports.name = vendor.name
-
- switch (typeof vendor.pr) {
- case 'string':
- // "pr": "CIRRUS_PR"
- exports.isPR = !!env[vendor.pr]
- break
- case 'object':
- if ('env' in vendor.pr) {
- // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" }
- exports.isPR = vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne
- } else if ('any' in vendor.pr) {
- // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] }
- exports.isPR = vendor.pr.any.some(function (key) {
- return !!env[key]
- })
- } else {
- // "pr": { "DRONE_BUILD_EVENT": "pull_request" }
- exports.isPR = checkEnv(vendor.pr)
- }
- break
- default:
- // PR detection not supported for this vendor
- exports.isPR = null
- }
+ exports.isPR = checkPR(vendor)
+ exports.id = vendor.constant
})
exports.isCI = !!(
env.CI !== 'false' && // Bypass all checks if CI env is explicitly set to 'false'
(env.BUILD_ID || // Jenkins, Cloudbees
- env.BUILD_NUMBER || // Jenkins, TeamCity
- env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari
- env.CI_APP_ID || // Appflow
- env.CI_BUILD_ID || // Appflow
- env.CI_BUILD_NUMBER || // Appflow
- env.CI_NAME || // Codeship and others
- env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI
- env.RUN_ID || // TaskCluster, dsari
- exports.name ||
- false)
+ env.BUILD_NUMBER || // Jenkins, TeamCity
+ env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari
+ env.CI_APP_ID || // Appflow
+ env.CI_BUILD_ID || // Appflow
+ env.CI_BUILD_NUMBER || // Appflow
+ env.CI_NAME || // Codeship and others
+ env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI
+ env.RUN_ID || // TaskCluster, dsari
+ exports.name ||
+ false)
)
function checkEnv (obj) {
@@ -79,12 +58,45 @@ function checkEnv (obj) {
return env[obj.env] && env[obj.env].includes(obj.includes)
// }
}
+
if ('any' in obj) {
return obj.any.some(function (k) {
return !!env[k]
})
}
+
return Object.keys(obj).every(function (k) {
return env[k] === obj[k]
})
}
+
+function checkPR (vendor) {
+ switch (typeof vendor.pr) {
+ case 'string':
+ // "pr": "CIRRUS_PR"
+ return !!env[vendor.pr]
+ case 'object':
+ if ('env' in vendor.pr) {
+ if ('any' in vendor.pr) {
+ // "pr": { "env": "CODEBUILD_WEBHOOK_EVENT", "any": ["PULL_REQUEST_CREATED", "PULL_REQUEST_UPDATED"] }
+ return vendor.pr.any.some(function (key) {
+ return env[vendor.pr.env] === key
+ })
+ } else {
+ // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" }
+ return vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne
+ }
+ } else if ('any' in vendor.pr) {
+ // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] }
+ return vendor.pr.any.some(function (key) {
+ return !!env[key]
+ })
+ } else {
+ // "pr": { "DRONE_BUILD_EVENT": "pull_request" }
+ return checkEnv(vendor.pr)
+ }
+ default:
+ // PR detection not supported for this vendor
+ return null
+ }
+}
diff --git a/deps/npm/node_modules/ci-info/package.json b/deps/npm/node_modules/ci-info/package.json
index 3c6b9e4adac8e8..156329d2ce379c 100644
--- a/deps/npm/node_modules/ci-info/package.json
+++ b/deps/npm/node_modules/ci-info/package.json
@@ -1,6 +1,6 @@
{
"name": "ci-info",
- "version": "4.0.0",
+ "version": "4.1.0",
"description": "Get details about the current Continuous Integration environment",
"main": "index.js",
"typings": "index.d.ts",
@@ -9,6 +9,18 @@
"repository": "https://github.com/watson/ci-info.git",
"bugs": "https://github.com/watson/ci-info/issues",
"homepage": "https://github.com/watson/ci-info",
+ "contributors": [
+ {
+ "name": "Sibiraj",
+ "url": "https://github.com/sibiraj-s"
+ }
+ ],
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/sibiraj-s"
+ }
+ ],
"keywords": [
"ci",
"continuous",
@@ -22,22 +34,16 @@
"index.d.ts",
"CHANGELOG.md"
],
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/sibiraj-s"
- }
- ],
"scripts": {
"lint:fix": "standard --fix",
"test": "standard && node test.js",
- "prepare": "husky install"
+ "prepare": "husky install || true"
},
"devDependencies": {
"clear-module": "^4.1.2",
- "husky": "^8.0.3",
- "standard": "^17.1.0",
- "tape": "^5.7.0"
+ "husky": "^9.1.6",
+ "standard": "^17.1.2",
+ "tape": "^5.9.0"
},
"engines": {
"node": ">=8"
diff --git a/deps/npm/node_modules/ci-info/vendors.json b/deps/npm/node_modules/ci-info/vendors.json
index 6b65e3f9b541f8..64d5924d1a557e 100644
--- a/deps/npm/node_modules/ci-info/vendors.json
+++ b/deps/npm/node_modules/ci-info/vendors.json
@@ -8,7 +8,11 @@
{
"name": "Appcircle",
"constant": "APPCIRCLE",
- "env": "AC_APPCIRCLE"
+ "env": "AC_APPCIRCLE",
+ "pr": {
+ "env": "AC_GIT_PR",
+ "ne": "false"
+ }
},
{
"name": "AppVeyor",
@@ -19,7 +23,15 @@
{
"name": "AWS CodeBuild",
"constant": "CODEBUILD",
- "env": "CODEBUILD_BUILD_ARN"
+ "env": "CODEBUILD_BUILD_ARN",
+ "pr": {
+ "env": "CODEBUILD_WEBHOOK_EVENT",
+ "any": [
+ "PULL_REQUEST_CREATED",
+ "PULL_REQUEST_UPDATED",
+ "PULL_REQUEST_REOPENED"
+ ]
+ }
},
{
"name": "Azure Pipelines",
diff --git a/deps/npm/node_modules/cross-spawn/lib/enoent.js b/deps/npm/node_modules/cross-spawn/lib/enoent.js
index 14df9b623d0a20..da33471369c23f 100644
--- a/deps/npm/node_modules/cross-spawn/lib/enoent.js
+++ b/deps/npm/node_modules/cross-spawn/lib/enoent.js
@@ -24,7 +24,7 @@ function hookChildProcess(cp, parsed) {
// the command exists and emit an "error" instead
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
if (name === 'exit') {
- const err = verifyENOENT(arg1, parsed, 'spawn');
+ const err = verifyENOENT(arg1, parsed);
if (err) {
return originalEmit.call(cp, 'error', err);
diff --git a/deps/npm/node_modules/cross-spawn/lib/util/escape.js b/deps/npm/node_modules/cross-spawn/lib/util/escape.js
index b0bb84c3a14092..7bf2905cd035ad 100644
--- a/deps/npm/node_modules/cross-spawn/lib/util/escape.js
+++ b/deps/npm/node_modules/cross-spawn/lib/util/escape.js
@@ -15,15 +15,17 @@ function escapeArgument(arg, doubleEscapeMetaChars) {
arg = `${arg}`;
// Algorithm below is based on https://qntm.org/cmd
+ // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input
+ // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information
// Sequence of backslashes followed by a double quote:
// double up all the backslashes and escape the double quote
- arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+ arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"');
// Sequence of backslashes followed by the end of the string
// (which will become a double quote later):
// double up all the backslashes
- arg = arg.replace(/(\\*)$/, '$1$1');
+ arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1');
// All other backslashes occur literally
diff --git a/deps/npm/node_modules/cross-spawn/package.json b/deps/npm/node_modules/cross-spawn/package.json
index 232ff97e04b213..24b2eb4c9900cf 100644
--- a/deps/npm/node_modules/cross-spawn/package.json
+++ b/deps/npm/node_modules/cross-spawn/package.json
@@ -1,6 +1,6 @@
{
"name": "cross-spawn",
- "version": "7.0.3",
+ "version": "7.0.6",
"description": "Cross platform child_process#spawn and child_process#spawnSync",
"keywords": [
"spawn",
@@ -65,7 +65,7 @@
"lint-staged": "^9.2.5",
"mkdirp": "^0.5.1",
"rimraf": "^3.0.0",
- "standard-version": "^7.0.0"
+ "standard-version": "^9.5.0"
},
"engines": {
"node": ">= 8"
diff --git a/deps/npm/node_modules/debug/node_modules/ms/index.js b/deps/npm/node_modules/debug/node_modules/ms/index.js
deleted file mode 100644
index c4498bcc212589..00000000000000
--- a/deps/npm/node_modules/debug/node_modules/ms/index.js
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Helpers.
- */
-
-var s = 1000;
-var m = s * 60;
-var h = m * 60;
-var d = h * 24;
-var w = d * 7;
-var y = d * 365.25;
-
-/**
- * Parse or format the given `val`.
- *
- * Options:
- *
- * - `long` verbose formatting [false]
- *
- * @param {String|Number} val
- * @param {Object} [options]
- * @throws {Error} throw an error if val is not a non-empty string or a number
- * @return {String|Number}
- * @api public
- */
-
-module.exports = function(val, options) {
- options = options || {};
- var type = typeof val;
- if (type === 'string' && val.length > 0) {
- return parse(val);
- } else if (type === 'number' && isFinite(val)) {
- return options.long ? fmtLong(val) : fmtShort(val);
- }
- throw new Error(
- 'val is not a non-empty string or a valid number. val=' +
- JSON.stringify(val)
- );
-};
-
-/**
- * Parse the given `str` and return milliseconds.
- *
- * @param {String} str
- * @return {Number}
- * @api private
- */
-
-function parse(str) {
- str = String(str);
- if (str.length > 100) {
- return;
- }
- var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
- str
- );
- if (!match) {
- return;
- }
- var n = parseFloat(match[1]);
- var type = (match[2] || 'ms').toLowerCase();
- switch (type) {
- case 'years':
- case 'year':
- case 'yrs':
- case 'yr':
- case 'y':
- return n * y;
- case 'weeks':
- case 'week':
- case 'w':
- return n * w;
- case 'days':
- case 'day':
- case 'd':
- return n * d;
- case 'hours':
- case 'hour':
- case 'hrs':
- case 'hr':
- case 'h':
- return n * h;
- case 'minutes':
- case 'minute':
- case 'mins':
- case 'min':
- case 'm':
- return n * m;
- case 'seconds':
- case 'second':
- case 'secs':
- case 'sec':
- case 's':
- return n * s;
- case 'milliseconds':
- case 'millisecond':
- case 'msecs':
- case 'msec':
- case 'ms':
- return n;
- default:
- return undefined;
- }
-}
-
-/**
- * Short format for `ms`.
- *
- * @param {Number} ms
- * @return {String}
- * @api private
- */
-
-function fmtShort(ms) {
- var msAbs = Math.abs(ms);
- if (msAbs >= d) {
- return Math.round(ms / d) + 'd';
- }
- if (msAbs >= h) {
- return Math.round(ms / h) + 'h';
- }
- if (msAbs >= m) {
- return Math.round(ms / m) + 'm';
- }
- if (msAbs >= s) {
- return Math.round(ms / s) + 's';
- }
- return ms + 'ms';
-}
-
-/**
- * Long format for `ms`.
- *
- * @param {Number} ms
- * @return {String}
- * @api private
- */
-
-function fmtLong(ms) {
- var msAbs = Math.abs(ms);
- if (msAbs >= d) {
- return plural(ms, msAbs, d, 'day');
- }
- if (msAbs >= h) {
- return plural(ms, msAbs, h, 'hour');
- }
- if (msAbs >= m) {
- return plural(ms, msAbs, m, 'minute');
- }
- if (msAbs >= s) {
- return plural(ms, msAbs, s, 'second');
- }
- return ms + ' ms';
-}
-
-/**
- * Pluralization helper.
- */
-
-function plural(ms, msAbs, n, name) {
- var isPlural = msAbs >= n * 1.5;
- return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
-}
diff --git a/deps/npm/node_modules/debug/node_modules/ms/license.md b/deps/npm/node_modules/debug/node_modules/ms/license.md
deleted file mode 100644
index 69b61253a38926..00000000000000
--- a/deps/npm/node_modules/debug/node_modules/ms/license.md
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2016 Zeit, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/debug/node_modules/ms/package.json b/deps/npm/node_modules/debug/node_modules/ms/package.json
deleted file mode 100644
index eea666e1fb03d6..00000000000000
--- a/deps/npm/node_modules/debug/node_modules/ms/package.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "name": "ms",
- "version": "2.1.2",
- "description": "Tiny millisecond conversion utility",
- "repository": "zeit/ms",
- "main": "./index",
- "files": [
- "index.js"
- ],
- "scripts": {
- "precommit": "lint-staged",
- "lint": "eslint lib/* bin/*",
- "test": "mocha tests.js"
- },
- "eslintConfig": {
- "extends": "eslint:recommended",
- "env": {
- "node": true,
- "es6": true
- }
- },
- "lint-staged": {
- "*.js": [
- "npm run lint",
- "prettier --single-quote --write",
- "git add"
- ]
- },
- "license": "MIT",
- "devDependencies": {
- "eslint": "4.12.1",
- "expect.js": "0.3.1",
- "husky": "0.14.3",
- "lint-staged": "5.0.0",
- "mocha": "4.0.1"
- }
-}
diff --git a/deps/npm/node_modules/debug/package.json b/deps/npm/node_modules/debug/package.json
index 8eea05520554eb..2f782eb9aef450 100644
--- a/deps/npm/node_modules/debug/package.json
+++ b/deps/npm/node_modules/debug/package.json
@@ -1,6 +1,6 @@
{
"name": "debug",
- "version": "4.3.6",
+ "version": "4.3.7",
"repository": {
"type": "git",
"url": "git://github.com/debug-js/debug.git"
@@ -31,7 +31,7 @@
"test:coverage": "cat ./coverage/lcov.info | coveralls"
},
"dependencies": {
- "ms": "2.1.2"
+ "ms": "^2.1.3"
},
"devDependencies": {
"brfs": "^2.0.1",
diff --git a/deps/npm/node_modules/hosted-git-info/lib/hosts.js b/deps/npm/node_modules/hosted-git-info/lib/hosts.js
index 9a08efd1b2d7e9..2a88e95927772a 100644
--- a/deps/npm/node_modules/hosted-git-info/lib/hosts.js
+++ b/deps/npm/node_modules/hosted-git-info/lib/hosts.js
@@ -4,7 +4,11 @@
const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+const formatHashFragment = (f) => f.toLowerCase()
+ .replace(/^\W+/g, '') // strip leading non-characters
+ .replace(/(?
diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json
index 3bb8bcd1f49825..78356159af7723 100644
--- a/deps/npm/node_modules/hosted-git-info/package.json
+++ b/deps/npm/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
{
"name": "hosted-git-info",
- "version": "8.0.0",
+ "version": "8.0.2",
"description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
"main": "./lib/index.js",
"repository": {
@@ -35,7 +35,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.4",
"tap": "^16.0.1"
},
"files": [
@@ -55,7 +55,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.4",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/libnpmpublish/lib/publish.js b/deps/npm/node_modules/libnpmpublish/lib/publish.js
index 93d546efb5f0e1..2bcab4f3ba304e 100644
--- a/deps/npm/node_modules/libnpmpublish/lib/publish.js
+++ b/deps/npm/node_modules/libnpmpublish/lib/publish.js
@@ -137,7 +137,7 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => {
if (provenance === true) {
await ensureProvenanceGeneration(registry, spec, opts)
- provenanceBundle = await generateProvenance([subject], opts)
+ provenanceBundle = await generateProvenance([subject], { legacyCompatibility: true, ...opts })
/* eslint-disable-next-line max-len */
log.notice('publish', `Signed provenance statement with source and build information from ${ciInfo.name}`)
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index f63d50f4e7b9c1..594f5041480b4a 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpublish",
- "version": "10.0.0",
+ "version": "10.0.1",
"description": "Programmatic API for the bits behind npm publish and unpublish",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -45,7 +45,7 @@
"npm-registry-fetch": "^18.0.1",
"proc-log": "^5.0.0",
"semver": "^7.3.7",
- "sigstore": "^2.2.0",
+ "sigstore": "^3.0.0",
"ssri": "^12.0.0"
},
"engines": {
diff --git a/deps/npm/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/make-fetch-happen/lib/options.js
index f77511279f831d..db51cc63248176 100644
--- a/deps/npm/node_modules/make-fetch-happen/lib/options.js
+++ b/deps/npm/node_modules/make-fetch-happen/lib/options.js
@@ -11,7 +11,12 @@ const conditionalHeaders = [
const configureOptions = (opts) => {
const { strictSSL, ...options } = { ...opts }
options.method = options.method ? options.method.toUpperCase() : 'GET'
- options.rejectUnauthorized = strictSSL !== false
+
+ if (strictSSL === undefined || strictSSL === null) {
+ options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
+ } else {
+ options.rejectUnauthorized = strictSSL !== false
+ }
if (!options.retry) {
options.retry = { retries: 0 }
diff --git a/deps/npm/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/make-fetch-happen/lib/remote.js
index 8554564074de6e..1d640e5380baaf 100644
--- a/deps/npm/node_modules/make-fetch-happen/lib/remote.js
+++ b/deps/npm/node_modules/make-fetch-happen/lib/remote.js
@@ -35,7 +35,8 @@ const RETRY_TYPES = [
// following redirects (through the cache if necessary)
// and verifying response integrity
const remoteFetch = (request, options) => {
- const agent = getAgent(request.url, options)
+ // options.signal is intended for the fetch itself, not the agent. Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
+ const agent = getAgent(request.url, { ...options, signal: undefined })
if (!request.headers.has('connection')) {
request.headers.set('connection', agent ? 'keep-alive' : 'close')
}
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
new file mode 100644
index 00000000000000..63d537d3f68114
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
@@ -0,0 +1,114 @@
+1.0.0 / 2024-08-31
+==================
+
+ * Drop support for node <18
+ * Added an option preferred encodings array #59
+
+0.6.3 / 2022-01-22
+==================
+
+ * Revert "Lazy-load modules from main entry point"
+
+0.6.2 / 2019-04-29
+==================
+
+ * Fix sorting charset, encoding, and language with extra parameters
+
+0.6.1 / 2016-05-02
+==================
+
+ * perf: improve `Accept` parsing speed
+ * perf: improve `Accept-Charset` parsing speed
+ * perf: improve `Accept-Encoding` parsing speed
+ * perf: improve `Accept-Language` parsing speed
+
+0.6.0 / 2015-09-29
+==================
+
+ * Fix including type extensions in parameters in `Accept` parsing
+ * Fix parsing `Accept` parameters with quoted equals
+ * Fix parsing `Accept` parameters with quoted semicolons
+ * Lazy-load modules from main entry point
+ * perf: delay type concatenation until needed
+ * perf: enable strict mode
+ * perf: hoist regular expressions
+ * perf: remove closures getting spec properties
+ * perf: remove a closure from media type parsing
+ * perf: remove property delete from media type parsing
+
+0.5.3 / 2015-05-10
+==================
+
+ * Fix media type parameter matching to be case-insensitive
+
+0.5.2 / 2015-05-06
+==================
+
+ * Fix comparing media types with quoted values
+ * Fix splitting media types with quoted commas
+
+0.5.1 / 2015-02-14
+==================
+
+ * Fix preference sorting to be stable for long acceptable lists
+
+0.5.0 / 2014-12-18
+==================
+
+ * Fix list return order when large accepted list
+ * Fix missing identity encoding when q=0 exists
+ * Remove dynamic building of Negotiator class
+
+0.4.9 / 2014-10-14
+==================
+
+ * Fix error when media type has invalid parameter
+
+0.4.8 / 2014-09-28
+==================
+
+ * Fix all negotiations to be case-insensitive
+ * Stable sort preferences of same quality according to client order
+ * Support Node.js 0.6
+
+0.4.7 / 2014-06-24
+==================
+
+ * Handle invalid provided languages
+ * Handle invalid provided media types
+
+0.4.6 / 2014-06-11
+==================
+
+ * Order by specificity when quality is the same
+
+0.4.5 / 2014-05-29
+==================
+
+ * Fix regression in empty header handling
+
+0.4.4 / 2014-05-29
+==================
+
+ * Fix behaviors when headers are not present
+
+0.4.3 / 2014-04-16
+==================
+
+ * Handle slashes on media params correctly
+
+0.4.2 / 2014-02-28
+==================
+
+ * Fix media type sorting
+ * Handle media types params strictly
+
+0.4.1 / 2014-01-16
+==================
+
+ * Use most specific matches
+
+0.4.0 / 2014-01-09
+==================
+
+ * Remove preferred prefix from methods
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
new file mode 100644
index 00000000000000..ea6b9e2e9ac251
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 Federico Romero
+Copyright (c) 2012-2014 Isaac Z. Schlueter
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js
new file mode 100644
index 00000000000000..4f51315d6af4bd
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js
@@ -0,0 +1,83 @@
+/*!
+ * negotiator
+ * Copyright(c) 2012 Federico Romero
+ * Copyright(c) 2012-2014 Isaac Z. Schlueter
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = Negotiator;
+module.exports.Negotiator = Negotiator;
+
+/**
+ * Create a Negotiator instance from a request.
+ * @param {object} request
+ * @public
+ */
+
+function Negotiator(request) {
+ if (!(this instanceof Negotiator)) {
+ return new Negotiator(request);
+ }
+
+ this.request = request;
+}
+
+Negotiator.prototype.charset = function charset(available) {
+ var set = this.charsets(available);
+ return set && set[0];
+};
+
+Negotiator.prototype.charsets = function charsets(available) {
+ return preferredCharsets(this.request.headers['accept-charset'], available);
+};
+
+Negotiator.prototype.encoding = function encoding(available, opts) {
+ var set = this.encodings(available, opts);
+ return set && set[0];
+};
+
+Negotiator.prototype.encodings = function encodings(available, options) {
+ var opts = options || {};
+ return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
+};
+
+Negotiator.prototype.language = function language(available) {
+ var set = this.languages(available);
+ return set && set[0];
+};
+
+Negotiator.prototype.languages = function languages(available) {
+ return preferredLanguages(this.request.headers['accept-language'], available);
+};
+
+Negotiator.prototype.mediaType = function mediaType(available) {
+ var set = this.mediaTypes(available);
+ return set && set[0];
+};
+
+Negotiator.prototype.mediaTypes = function mediaTypes(available) {
+ return preferredMediaTypes(this.request.headers.accept, available);
+};
+
+// Backwards compatibility
+Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
+Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
+Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
+Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
+Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
+Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
+Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
+Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
new file mode 100644
index 00000000000000..cdd014803474a4
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+ var accepts = accept.split(',');
+
+ for (var i = 0, j = 0; i < accepts.length; i++) {
+ var charset = parseCharset(accepts[i].trim(), i);
+
+ if (charset) {
+ accepts[j++] = charset;
+ }
+ }
+
+ // trim accepts
+ accepts.length = j;
+
+ return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+ var match = simpleCharsetRegExp.exec(str);
+ if (!match) return null;
+
+ var charset = match[1];
+ var q = 1;
+ if (match[2]) {
+ var params = match[2].split(';')
+ for (var j = 0; j < params.length; j++) {
+ var p = params[j].trim().split('=');
+ if (p[0] === 'q') {
+ q = parseFloat(p[1]);
+ break;
+ }
+ }
+ }
+
+ return {
+ charset: charset,
+ q: q,
+ i: i
+ };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+ var priority = {o: -1, q: 0, s: 0};
+
+ for (var i = 0; i < accepted.length; i++) {
+ var spec = specify(charset, accepted[i], index);
+
+ if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+ priority = spec;
+ }
+ }
+
+ return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+ var s = 0;
+ if(spec.charset.toLowerCase() === charset.toLowerCase()){
+ s |= 1;
+ } else if (spec.charset !== '*' ) {
+ return null
+ }
+
+ return {
+ i: index,
+ o: spec.i,
+ q: spec.q,
+ s: s
+ }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+ // RFC 2616 sec 14.2: no header = *
+ var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+ if (!provided) {
+ // sorted list of all charsets
+ return accepts
+ .filter(isQuality)
+ .sort(compareSpecs)
+ .map(getFullCharset);
+ }
+
+ var priorities = provided.map(function getPriority(type, index) {
+ return getCharsetPriority(type, accepts, index);
+ });
+
+ // sorted list of accepted charsets
+ return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+ return provided[priorities.indexOf(priority)];
+ });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+ return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+ return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+ return spec.q > 0;
+}
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 00000000000000..9ebb633d677433
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+ var accepts = accept.split(',');
+ var hasIdentity = false;
+ var minQuality = 1;
+
+ for (var i = 0, j = 0; i < accepts.length; i++) {
+ var encoding = parseEncoding(accepts[i].trim(), i);
+
+ if (encoding) {
+ accepts[j++] = encoding;
+ hasIdentity = hasIdentity || specify('identity', encoding);
+ minQuality = Math.min(minQuality, encoding.q || 1);
+ }
+ }
+
+ if (!hasIdentity) {
+ /*
+ * If identity doesn't explicitly appear in the accept-encoding header,
+ * it's added to the list of acceptable encoding with the lowest q
+ */
+ accepts[j++] = {
+ encoding: 'identity',
+ q: minQuality,
+ i: i
+ };
+ }
+
+ // trim accepts
+ accepts.length = j;
+
+ return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+ var match = simpleEncodingRegExp.exec(str);
+ if (!match) return null;
+
+ var encoding = match[1];
+ var q = 1;
+ if (match[2]) {
+ var params = match[2].split(';');
+ for (var j = 0; j < params.length; j++) {
+ var p = params[j].trim().split('=');
+ if (p[0] === 'q') {
+ q = parseFloat(p[1]);
+ break;
+ }
+ }
+ }
+
+ return {
+ encoding: encoding,
+ q: q,
+ i: i
+ };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+ var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+ for (var i = 0; i < accepted.length; i++) {
+ var spec = specify(encoding, accepted[i], index);
+
+ if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+ priority = spec;
+ }
+ }
+
+ return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+ var s = 0;
+ if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+ s |= 1;
+ } else if (spec.encoding !== '*' ) {
+ return null
+ }
+
+ return {
+ encoding: encoding,
+ i: index,
+ o: spec.i,
+ q: spec.q,
+ s: s
+ }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+ var accepts = parseAcceptEncoding(accept || '');
+
+ var comparator = preferred ? function comparator (a, b) {
+ if (a.q !== b.q) {
+ return b.q - a.q // higher quality first
+ }
+
+ var aPreferred = preferred.indexOf(a.encoding)
+ var bPreferred = preferred.indexOf(b.encoding)
+
+ if (aPreferred === -1 && bPreferred === -1) {
+ // consider the original specifity/order
+ return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+ }
+
+ if (aPreferred !== -1 && bPreferred !== -1) {
+ return aPreferred - bPreferred // consider the preferred order
+ }
+
+ return aPreferred === -1 ? 1 : -1 // preferred first
+ } : compareSpecs;
+
+ if (!provided) {
+ // sorted list of all encodings
+ return accepts
+ .filter(isQuality)
+ .sort(comparator)
+ .map(getFullEncoding);
+ }
+
+ var priorities = provided.map(function getPriority(type, index) {
+ return getEncodingPriority(type, accepts, index);
+ });
+
+ // sorted list of accepted encodings
+ return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+ return provided[priorities.indexOf(priority)];
+ });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+ return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+ return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+ return spec.q > 0;
+}
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
new file mode 100644
index 00000000000000..a23167252719be
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+ var accepts = accept.split(',');
+
+ for (var i = 0, j = 0; i < accepts.length; i++) {
+ var language = parseLanguage(accepts[i].trim(), i);
+
+ if (language) {
+ accepts[j++] = language;
+ }
+ }
+
+ // trim accepts
+ accepts.length = j;
+
+ return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+ var match = simpleLanguageRegExp.exec(str);
+ if (!match) return null;
+
+ var prefix = match[1]
+ var suffix = match[2]
+ var full = prefix
+
+ if (suffix) full += "-" + suffix;
+
+ var q = 1;
+ if (match[3]) {
+ var params = match[3].split(';')
+ for (var j = 0; j < params.length; j++) {
+ var p = params[j].split('=');
+ if (p[0] === 'q') q = parseFloat(p[1]);
+ }
+ }
+
+ return {
+ prefix: prefix,
+ suffix: suffix,
+ q: q,
+ i: i,
+ full: full
+ };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+ var priority = {o: -1, q: 0, s: 0};
+
+ for (var i = 0; i < accepted.length; i++) {
+ var spec = specify(language, accepted[i], index);
+
+ if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+ priority = spec;
+ }
+ }
+
+ return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+ var p = parseLanguage(language)
+ if (!p) return null;
+ var s = 0;
+ if(spec.full.toLowerCase() === p.full.toLowerCase()){
+ s |= 4;
+ } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+ s |= 2;
+ } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+ s |= 1;
+ } else if (spec.full !== '*' ) {
+ return null
+ }
+
+ return {
+ i: index,
+ o: spec.i,
+ q: spec.q,
+ s: s
+ }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+ // RFC 2616 sec 14.4: no header = *
+ var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+ if (!provided) {
+ // sorted list of all languages
+ return accepts
+ .filter(isQuality)
+ .sort(compareSpecs)
+ .map(getFullLanguage);
+ }
+
+ var priorities = provided.map(function getPriority(type, index) {
+ return getLanguagePriority(type, accepts, index);
+ });
+
+ // sorted list of accepted languages
+ return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+ return provided[priorities.indexOf(priority)];
+ });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+ return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+ return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+ return spec.q > 0;
+}
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 00000000000000..8e402ea88394c0
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+ var accepts = splitMediaTypes(accept);
+
+ for (var i = 0, j = 0; i < accepts.length; i++) {
+ var mediaType = parseMediaType(accepts[i].trim(), i);
+
+ if (mediaType) {
+ accepts[j++] = mediaType;
+ }
+ }
+
+ // trim accepts
+ accepts.length = j;
+
+ return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+ var match = simpleMediaTypeRegExp.exec(str);
+ if (!match) return null;
+
+ var params = Object.create(null);
+ var q = 1;
+ var subtype = match[2];
+ var type = match[1];
+
+ if (match[3]) {
+ var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+ for (var j = 0; j < kvps.length; j++) {
+ var pair = kvps[j];
+ var key = pair[0].toLowerCase();
+ var val = pair[1];
+
+ // get the value, unwrapping quotes
+ var value = val && val[0] === '"' && val[val.length - 1] === '"'
+ ? val.slice(1, -1)
+ : val;
+
+ if (key === 'q') {
+ q = parseFloat(value);
+ break;
+ }
+
+ // store parameter
+ params[key] = value;
+ }
+ }
+
+ return {
+ type: type,
+ subtype: subtype,
+ params: params,
+ q: q,
+ i: i
+ };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+ var priority = {o: -1, q: 0, s: 0};
+
+ for (var i = 0; i < accepted.length; i++) {
+ var spec = specify(type, accepted[i], index);
+
+ if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+ priority = spec;
+ }
+ }
+
+ return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+ var p = parseMediaType(type);
+ var s = 0;
+
+ if (!p) {
+ return null;
+ }
+
+ if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+ s |= 4
+ } else if(spec.type != '*') {
+ return null;
+ }
+
+ if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+ s |= 2
+ } else if(spec.subtype != '*') {
+ return null;
+ }
+
+ var keys = Object.keys(spec.params);
+ if (keys.length > 0) {
+ if (keys.every(function (k) {
+ return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+ })) {
+ s |= 1
+ } else {
+ return null
+ }
+ }
+
+ return {
+ i: index,
+ o: spec.i,
+ q: spec.q,
+ s: s,
+ }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+ // RFC 2616 sec 14.2: no header = */*
+ var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+ if (!provided) {
+ // sorted list of all types
+ return accepts
+ .filter(isQuality)
+ .sort(compareSpecs)
+ .map(getFullType);
+ }
+
+ var priorities = provided.map(function getPriority(type, index) {
+ return getMediaTypePriority(type, accepts, index);
+ });
+
+ // sorted list of accepted types
+ return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+ return provided[priorities.indexOf(priority)];
+ });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+ return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+ return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+ return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+ var count = 0;
+ var index = 0;
+
+ while ((index = string.indexOf('"', index)) !== -1) {
+ count++;
+ index++;
+ }
+
+ return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+ var index = str.indexOf('=');
+ var key;
+ var val;
+
+ if (index === -1) {
+ key = str;
+ } else {
+ key = str.slice(0, index);
+ val = str.slice(index + 1);
+ }
+
+ return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+ var accepts = accept.split(',');
+
+ for (var i = 1, j = 0; i < accepts.length; i++) {
+ if (quoteCount(accepts[j]) % 2 == 0) {
+ accepts[++j] = accepts[i];
+ } else {
+ accepts[j] += ',' + accepts[i];
+ }
+ }
+
+ // trim accepts
+ accepts.length = j + 1;
+
+ return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+ var parameters = str.split(';');
+
+ for (var i = 1, j = 0; i < parameters.length; i++) {
+ if (quoteCount(parameters[j]) % 2 == 0) {
+ parameters[++j] = parameters[i];
+ } else {
+ parameters[j] += ';' + parameters[i];
+ }
+ }
+
+ // trim parameters
+ parameters.length = j + 1;
+
+ for (var i = 0; i < parameters.length; i++) {
+ parameters[i] = parameters[i].trim();
+ }
+
+ return parameters;
+}
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json
new file mode 100644
index 00000000000000..e4bdc1ef4f7481
--- /dev/null
+++ b/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json
@@ -0,0 +1,43 @@
+{
+ "name": "negotiator",
+ "description": "HTTP content negotiation",
+ "version": "1.0.0",
+ "contributors": [
+ "Douglas Christopher Wilson ",
+ "Federico Romero ",
+ "Isaac Z. Schlueter (http://blog.izs.me/)"
+ ],
+ "license": "MIT",
+ "keywords": [
+ "http",
+ "content negotiation",
+ "accept",
+ "accept-language",
+ "accept-encoding",
+ "accept-charset"
+ ],
+ "repository": "jshttp/negotiator",
+ "devDependencies": {
+ "eslint": "7.32.0",
+ "eslint-plugin-markdown": "2.2.1",
+ "mocha": "9.1.3",
+ "nyc": "15.1.0"
+ },
+ "files": [
+ "lib/",
+ "HISTORY.md",
+ "LICENSE",
+ "index.js",
+ "README.md"
+ ],
+ "engines": {
+ "node": ">= 0.6"
+ },
+ "scripts": {
+ "lint": "eslint .",
+ "test": "mocha --reporter spec --check-leaks --bail test/",
+ "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
+ "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+ "test-cov": "nyc --reporter=html --reporter=text npm test"
+ }
+}
diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json
index 0868ff6d7efa54..054fe841f13b73 100644
--- a/deps/npm/node_modules/make-fetch-happen/package.json
+++ b/deps/npm/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
{
"name": "make-fetch-happen",
- "version": "14.0.1",
+ "version": "14.0.3",
"description": "Opinionated, caching, retrying fetch client",
"main": "lib/index.js",
"files": [
@@ -40,14 +40,14 @@
"minipass-fetch": "^4.0.0",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.3",
+ "negotiator": "^1.0.0",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1",
"ssri": "^12.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.4",
"nock": "^13.2.4",
"safe-buffer": "^5.2.1",
"standard-version": "^9.3.2",
@@ -68,7 +68,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.4",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/negotiator/HISTORY.md b/deps/npm/node_modules/negotiator/HISTORY.md
index a9a544914c43bb..e1929aba8e22ce 100644
--- a/deps/npm/node_modules/negotiator/HISTORY.md
+++ b/deps/npm/node_modules/negotiator/HISTORY.md
@@ -1,3 +1,8 @@
+unreleased
+==================
+
+ * Added an option preferred encodings array #59
+
0.6.3 / 2022-01-22
==================
diff --git a/deps/npm/node_modules/negotiator/index.js b/deps/npm/node_modules/negotiator/index.js
index 4788264b16c9f2..7df0b0a5318156 100644
--- a/deps/npm/node_modules/negotiator/index.js
+++ b/deps/npm/node_modules/negotiator/index.js
@@ -44,13 +44,13 @@ Negotiator.prototype.charsets = function charsets(available) {
return preferredCharsets(this.request.headers['accept-charset'], available);
};
-Negotiator.prototype.encoding = function encoding(available) {
- var set = this.encodings(available);
+Negotiator.prototype.encoding = function encoding(available, preferred) {
+ var set = this.encodings(available, preferred);
return set && set[0];
};
-Negotiator.prototype.encodings = function encodings(available) {
- return preferredEncodings(this.request.headers['accept-encoding'], available);
+Negotiator.prototype.encodings = function encodings(available, preferred) {
+ return preferredEncodings(this.request.headers['accept-encoding'], available, preferred);
};
Negotiator.prototype.language = function language(available) {
diff --git a/deps/npm/node_modules/negotiator/lib/encoding.js b/deps/npm/node_modules/negotiator/lib/encoding.js
index 8432cd77b8a969..9ebb633d677433 100644
--- a/deps/npm/node_modules/negotiator/lib/encoding.js
+++ b/deps/npm/node_modules/negotiator/lib/encoding.js
@@ -96,7 +96,7 @@ function parseEncoding(str, i) {
*/
function getEncodingPriority(encoding, accepted, index) {
- var priority = {o: -1, q: 0, s: 0};
+ var priority = {encoding: encoding, o: -1, q: 0, s: 0};
for (var i = 0; i < accepted.length; i++) {
var spec = specify(encoding, accepted[i], index);
@@ -123,6 +123,7 @@ function specify(encoding, spec, index) {
}
return {
+ encoding: encoding,
i: index,
o: spec.i,
q: spec.q,
@@ -135,14 +136,34 @@ function specify(encoding, spec, index) {
* @public
*/
-function preferredEncodings(accept, provided) {
+function preferredEncodings(accept, provided, preferred) {
var accepts = parseAcceptEncoding(accept || '');
+ var comparator = preferred ? function comparator (a, b) {
+ if (a.q !== b.q) {
+ return b.q - a.q // higher quality first
+ }
+
+ var aPreferred = preferred.indexOf(a.encoding)
+ var bPreferred = preferred.indexOf(b.encoding)
+
+ if (aPreferred === -1 && bPreferred === -1) {
+ // consider the original specifity/order
+ return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+ }
+
+ if (aPreferred !== -1 && bPreferred !== -1) {
+ return aPreferred - bPreferred // consider the preferred order
+ }
+
+ return aPreferred === -1 ? 1 : -1 // preferred first
+ } : compareSpecs;
+
if (!provided) {
// sorted list of all encodings
return accepts
.filter(isQuality)
- .sort(compareSpecs)
+ .sort(comparator)
.map(getFullEncoding);
}
@@ -151,7 +172,7 @@ function preferredEncodings(accept, provided) {
});
// sorted list of accepted encodings
- return priorities.filter(isQuality).sort(compareSpecs).map(function getEncoding(priority) {
+ return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
return provided[priorities.indexOf(priority)];
});
}
@@ -162,7 +183,7 @@ function preferredEncodings(accept, provided) {
*/
function compareSpecs(a, b) {
- return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+ return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
}
/**
diff --git a/deps/npm/node_modules/negotiator/lib/mediaType.js b/deps/npm/node_modules/negotiator/lib/mediaType.js
index 67309dd75f1b62..8e402ea88394c0 100644
--- a/deps/npm/node_modules/negotiator/lib/mediaType.js
+++ b/deps/npm/node_modules/negotiator/lib/mediaType.js
@@ -69,7 +69,7 @@ function parseMediaType(str, i) {
// get the value, unwrapping quotes
var value = val && val[0] === '"' && val[val.length - 1] === '"'
- ? val.substr(1, val.length - 2)
+ ? val.slice(1, -1)
: val;
if (key === 'q') {
@@ -238,8 +238,8 @@ function splitKeyValuePair(str) {
if (index === -1) {
key = str;
} else {
- key = str.substr(0, index);
- val = str.substr(index + 1);
+ key = str.slice(0, index);
+ val = str.slice(index + 1);
}
return [key, val];
diff --git a/deps/npm/node_modules/negotiator/package.json b/deps/npm/node_modules/negotiator/package.json
index 297635f6d34177..19b0a8a6ef6041 100644
--- a/deps/npm/node_modules/negotiator/package.json
+++ b/deps/npm/node_modules/negotiator/package.json
@@ -1,7 +1,7 @@
{
"name": "negotiator",
"description": "HTTP content negotiation",
- "version": "0.6.3",
+ "version": "0.6.4",
"contributors": [
"Douglas Christopher Wilson ",
"Federico Romero ",
diff --git a/deps/npm/node_modules/npm-install-checks/lib/current-env.js b/deps/npm/node_modules/npm-install-checks/lib/current-env.js
index 9babde1f277ff1..31f154aac59b32 100644
--- a/deps/npm/node_modules/npm-install-checks/lib/current-env.js
+++ b/deps/npm/node_modules/npm-install-checks/lib/current-env.js
@@ -1,5 +1,6 @@
const process = require('node:process')
const nodeOs = require('node:os')
+const fs = require('node:fs')
function isMusl (file) {
return file.includes('libc.musl-') || file.includes('ld-musl-')
@@ -13,12 +14,23 @@ function cpu () {
return process.arch
}
-function libc (osName) {
- // this is to make it faster on non linux machines
- if (osName !== 'linux') {
+const LDD_PATH = '/usr/bin/ldd'
+function getFamilyFromFilesystem () {
+ try {
+ const content = fs.readFileSync(LDD_PATH, 'utf-8')
+ if (content.includes('musl')) {
+ return 'musl'
+ }
+ if (content.includes('GNU C Library')) {
+ return 'glibc'
+ }
+ return null
+ } catch {
return undefined
}
- let family
+}
+
+function getFamilyFromReport () {
const originalExclude = process.report.excludeNetwork
process.report.excludeNetwork = true
const report = process.report.getReport()
@@ -27,6 +39,22 @@ function libc (osName) {
family = 'glibc'
} else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) {
family = 'musl'
+ } else {
+ family = null
+ }
+ return family
+}
+
+let family
+function libc (osName) {
+ if (osName !== 'linux') {
+ return undefined
+ }
+ if (family === undefined) {
+ family = getFamilyFromFilesystem()
+ if (family === undefined) {
+ family = getFamilyFromReport()
+ }
}
return family
}
diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json
index e9e69575a6dc6d..967f5f659b2fac 100644
--- a/deps/npm/node_modules/npm-install-checks/package.json
+++ b/deps/npm/node_modules/npm-install-checks/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-install-checks",
- "version": "7.1.0",
+ "version": "7.1.1",
"description": "Check the engines and platform fields in package.json",
"main": "lib/index.js",
"dependencies": {
@@ -8,7 +8,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.4",
"tap": "^16.0.1"
},
"scripts": {
@@ -40,7 +40,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.4",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js b/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js
index 65eea2963b0b4c..2f183082ab2ce2 100644
--- a/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js
+++ b/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js
@@ -48,10 +48,18 @@ function logRequest (method, res, startTime) {
const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
const urlStr = cleanUrl(res.url)
- log.http(
- 'fetch',
- `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
- )
+ // If make-fetch-happen reports a cache hit, then there was no fetch
+ if (cacheStatus === 'hit') {
+ log.http(
+ 'cache',
+ `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+ )
+ } else {
+ log.http(
+ 'fetch',
+ `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+ )
+ }
}
function checkErrors (method, res, startTime, opts) {
diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json
index 559473b964aaa5..bd7a79d35e26ac 100644
--- a/deps/npm/node_modules/npm-registry-fetch/package.json
+++ b/deps/npm/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-registry-fetch",
- "version": "18.0.1",
+ "version": "18.0.2",
"description": "Fetch-based http client for use with npm registry APIs",
"main": "lib",
"files": [
@@ -42,8 +42,8 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
- "cacache": "^18.0.0",
+ "@npmcli/template-oss": "4.23.4",
+ "cacache": "^19.0.1",
"nock": "^13.2.4",
"require-inject": "^1.4.4",
"ssri": "^12.0.0",
@@ -62,7 +62,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.4",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/package-json-from-dist/dist/commonjs/index.js b/deps/npm/node_modules/package-json-from-dist/dist/commonjs/index.js
index 5cff210d855cb0..b966ac9fef535b 100644
--- a/deps/npm/node_modules/package-json-from-dist/dist/commonjs/index.js
+++ b/deps/npm/node_modules/package-json-from-dist/dist/commonjs/index.js
@@ -5,6 +5,8 @@ const node_fs_1 = require("node:fs");
const node_path_1 = require("node:path");
const node_url_1 = require("node:url");
const NM = `${node_path_1.sep}node_modules${node_path_1.sep}`;
+const STORE = `.store${node_path_1.sep}`;
+const PKG = `${node_path_1.sep}package${node_path_1.sep}`;
const DIST = `${node_path_1.sep}dist${node_path_1.sep}`;
/**
* Find the package.json file, either from a TypeScript file somewhere not
@@ -59,8 +61,16 @@ const findPackageJson = (from, pathFromSrc = '../package.json') => {
// inside of node_modules. find the dist directly under package name.
const nm = __dirname.substring(0, nms + NM.length);
const pkgDir = __dirname.substring(nms + NM.length);
+ // affordance for yarn berry, which puts package contents in
+ // '.../node_modules/.store/${id}-${hash}/package/...'
+ if (pkgDir.startsWith(STORE)) {
+ const pkg = pkgDir.indexOf(PKG, STORE.length);
+ if (pkg) {
+ return (0, node_path_1.resolve)(nm, pkgDir.substring(0, pkg + PKG.length), 'package.json');
+ }
+ }
const pkgName = pkgDir.startsWith('@') ?
- pkgDir.split(node_path_1.sep).slice(0, 2).join(node_path_1.sep)
+ pkgDir.split(node_path_1.sep, 2).join(node_path_1.sep)
: String(pkgDir.split(node_path_1.sep)[0]);
return (0, node_path_1.resolve)(nm, pkgName, 'package.json');
}
diff --git a/deps/npm/node_modules/package-json-from-dist/dist/esm/index.js b/deps/npm/node_modules/package-json-from-dist/dist/esm/index.js
index 0627645f9c35a4..426ad3c2d18597 100644
--- a/deps/npm/node_modules/package-json-from-dist/dist/esm/index.js
+++ b/deps/npm/node_modules/package-json-from-dist/dist/esm/index.js
@@ -2,6 +2,8 @@ import { readFileSync } from 'node:fs';
import { dirname, resolve, sep } from 'node:path';
import { fileURLToPath } from 'node:url';
const NM = `${sep}node_modules${sep}`;
+const STORE = `.store${sep}`;
+const PKG = `${sep}package${sep}`;
const DIST = `${sep}dist${sep}`;
/**
* Find the package.json file, either from a TypeScript file somewhere not
@@ -56,8 +58,16 @@ export const findPackageJson = (from, pathFromSrc = '../package.json') => {
// inside of node_modules. find the dist directly under package name.
const nm = __dirname.substring(0, nms + NM.length);
const pkgDir = __dirname.substring(nms + NM.length);
+ // affordance for yarn berry, which puts package contents in
+ // '.../node_modules/.store/${id}-${hash}/package/...'
+ if (pkgDir.startsWith(STORE)) {
+ const pkg = pkgDir.indexOf(PKG, STORE.length);
+ if (pkg) {
+ return resolve(nm, pkgDir.substring(0, pkg + PKG.length), 'package.json');
+ }
+ }
const pkgName = pkgDir.startsWith('@') ?
- pkgDir.split(sep).slice(0, 2).join(sep)
+ pkgDir.split(sep, 2).join(sep)
: String(pkgDir.split(sep)[0]);
return resolve(nm, pkgName, 'package.json');
}
diff --git a/deps/npm/node_modules/package-json-from-dist/package.json b/deps/npm/node_modules/package-json-from-dist/package.json
index 2d5526e87b7fa0..a2d03c3269d72d 100644
--- a/deps/npm/node_modules/package-json-from-dist/package.json
+++ b/deps/npm/node_modules/package-json-from-dist/package.json
@@ -1,6 +1,6 @@
{
"name": "package-json-from-dist",
- "version": "1.0.0",
+ "version": "1.0.1",
"description": "Load the local package.json from either src or dist folder",
"main": "./dist/commonjs/index.js",
"exports": {
@@ -28,7 +28,7 @@
"presnap": "npm run prepare",
"test": "tap",
"snap": "tap",
- "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
+ "format": "prettier --write . --log-level warn",
"typedoc": "typedoc"
},
"author": "Isaac Z. Schlueter (https://izs.me)",
diff --git a/deps/npm/node_modules/pacote/lib/dir.js b/deps/npm/node_modules/pacote/lib/dir.js
index f3229b34e463ab..4ae97c216fe64f 100644
--- a/deps/npm/node_modules/pacote/lib/dir.js
+++ b/deps/npm/node_modules/pacote/lib/dir.js
@@ -39,6 +39,8 @@ class DirFetcher extends Fetcher {
const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
return runScript({
+ // this || undefined is because runScript will be unhappy with the default null value
+ scriptShell: this.opts.scriptShell || undefined,
pkg: mani,
event: 'prepare',
path: this.resolved,
diff --git a/deps/npm/node_modules/pacote/lib/fetcher.js b/deps/npm/node_modules/pacote/lib/fetcher.js
index cc2c2db70c697d..f2ac97619d3af1 100644
--- a/deps/npm/node_modules/pacote/lib/fetcher.js
+++ b/deps/npm/node_modules/pacote/lib/fetcher.js
@@ -188,7 +188,15 @@ class FetcherBase {
// private
// Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
#tarballFromCache () {
- return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+ const startTime = Date.now()
+ const stream = cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+ const elapsedTime = Date.now() - startTime
+ // cache is good, so log it as a hit in particular since there was no fetch logged
+ log.http(
+ 'cache',
+ `${this.spec} ${elapsedTime}ms (cache hit)`
+ )
+ return stream
}
get [_.cacheFetches] () {
diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json
index 0eb8261af96e0c..71c9aa1ce32572 100644
--- a/deps/npm/node_modules/pacote/package.json
+++ b/deps/npm/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
{
"name": "pacote",
- "version": "19.0.0",
+ "version": "19.0.1",
"description": "JavaScript package downloader",
"author": "GitHub Inc.",
"bin": {
@@ -59,7 +59,7 @@
"npm-registry-fetch": "^18.0.0",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1",
- "sigstore": "^2.2.0",
+ "sigstore": "^3.0.0",
"ssri": "^12.0.0",
"tar": "^6.1.11"
},
diff --git a/deps/npm/node_modules/promise-call-limit/dist/commonjs/index.js b/deps/npm/node_modules/promise-call-limit/dist/commonjs/index.js
index 6ce5cfcef9559e..b32a85bb11aa39 100644
--- a/deps/npm/node_modules/promise-call-limit/dist/commonjs/index.js
+++ b/deps/npm/node_modules/promise-call-limit/dist/commonjs/index.js
@@ -29,8 +29,8 @@ const os = __importStar(require("node:os"));
// cpus() cpus() can return an empty list if /proc is not mounted, use 1 in
// this case
/* c8 ignore start */
-const defLimit = 'availableParallelism' in os
- ? Math.max(1, os.availableParallelism() - 1)
+const defLimit = 'availableParallelism' in os ?
+ Math.max(1, os.availableParallelism() - 1)
: Math.max(1, os.cpus().length - 1);
const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => {
let active = 0;
diff --git a/deps/npm/node_modules/promise-call-limit/dist/esm/index.js b/deps/npm/node_modules/promise-call-limit/dist/esm/index.js
index 030099929b3483..fe709db7fc04cc 100644
--- a/deps/npm/node_modules/promise-call-limit/dist/esm/index.js
+++ b/deps/npm/node_modules/promise-call-limit/dist/esm/index.js
@@ -3,8 +3,8 @@ import * as os from 'node:os';
// cpus() cpus() can return an empty list if /proc is not mounted, use 1 in
// this case
/* c8 ignore start */
-const defLimit = 'availableParallelism' in os
- ? Math.max(1, os.availableParallelism() - 1)
+const defLimit = 'availableParallelism' in os ?
+ Math.max(1, os.availableParallelism() - 1)
: Math.max(1, os.cpus().length - 1);
export const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => {
let active = 0;
diff --git a/deps/npm/node_modules/promise-call-limit/package.json b/deps/npm/node_modules/promise-call-limit/package.json
index a3aa548d6538ac..ab14595366e223 100644
--- a/deps/npm/node_modules/promise-call-limit/package.json
+++ b/deps/npm/node_modules/promise-call-limit/package.json
@@ -1,6 +1,6 @@
{
"name": "promise-call-limit",
- "version": "3.0.1",
+ "version": "3.0.2",
"files": [
"dist"
],
@@ -18,16 +18,17 @@
"test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags"
+ "prepublishOnly": "git push origin --follow-tags",
+ "format": "prettier --write . --log-level warn --cache"
},
"devDependencies": {
- "prettier": "^3.2.1",
- "tap": "^18.6.1",
- "tshy": "^1.8.2",
- "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
- "typedoc": "typedoc"
+ "prettier": "^3.3.3",
+ "tap": "^21.0.1",
+ "tshy": "^3.0.2",
+ "typedoc": "^0.26.6"
},
"prettier": {
+ "experimentalTernaries": true,
"semi": false,
"printWidth": 70,
"tabWidth": 2,
@@ -62,5 +63,6 @@
},
"main": "./dist/commonjs/index.js",
"types": "./dist/commonjs/index.d.ts",
- "type": "module"
+ "type": "module",
+ "module": "./dist/esm/index.js"
}
diff --git a/deps/npm/node_modules/sigstore/dist/config.js b/deps/npm/node_modules/sigstore/dist/config.js
index b4f0eea74fa4b4..e8b2392f97f236 100644
--- a/deps/npm/node_modules/sigstore/dist/config.js
+++ b/deps/npm/node_modules/sigstore/dist/config.js
@@ -1,6 +1,9 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.createVerificationPolicy = exports.createKeyFinder = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
+exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
+exports.createBundleBuilder = createBundleBuilder;
+exports.createKeyFinder = createKeyFinder;
+exports.createVerificationPolicy = createVerificationPolicy;
/*
Copyright 2023 The Sigstore Authors.
@@ -30,10 +33,12 @@ function createBundleBuilder(bundleType, options) {
case 'messageSignature':
return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
case 'dsseEnvelope':
- return new sign_1.DSSEBundleBuilder(bundlerOptions);
+ return new sign_1.DSSEBundleBuilder({
+ ...bundlerOptions,
+ certificateChain: options.legacyCompatibility,
+ });
}
}
-exports.createBundleBuilder = createBundleBuilder;
// Translates the public KeySelector type into the KeyFinderFunc type needed by
// the verifier.
function createKeyFinder(keySelector) {
@@ -51,7 +56,6 @@ function createKeyFinder(keySelector) {
};
};
}
-exports.createKeyFinder = createKeyFinder;
function createVerificationPolicy(options) {
const policy = {};
const san = options.certificateIdentityEmail || options.certificateIdentityURI;
@@ -63,7 +67,6 @@ function createVerificationPolicy(options) {
}
return policy;
}
-exports.createVerificationPolicy = createVerificationPolicy;
// Instantiate the FulcioSigner based on the supplied options.
function initSigner(options) {
return new sign_1.FulcioSigner({
@@ -92,6 +95,7 @@ function initWitnesses(options) {
if (isRekorEnabled(options)) {
witnesses.push(new sign_1.RekorWitness({
rekorBaseURL: options.rekorURL,
+ entryType: options.legacyCompatibility ? 'intoto' : 'dsse',
fetchOnConflict: false,
retry: options.retry ?? exports.DEFAULT_RETRY,
timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.js b/deps/npm/node_modules/sigstore/dist/sigstore.js
index 79d3440670cd50..c45524bbe21c22 100644
--- a/deps/npm/node_modules/sigstore/dist/sigstore.js
+++ b/deps/npm/node_modules/sigstore/dist/sigstore.js
@@ -23,7 +23,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
+exports.sign = sign;
+exports.attest = attest;
+exports.verify = verify;
+exports.createVerifier = createVerifier;
/*
Copyright 2023 The Sigstore Authors.
@@ -50,7 +53,6 @@ options = {}) {
const bundle = await bundler.create({ data: payload });
return (0, bundle_1.bundleToJSON)(bundle);
}
-exports.sign = sign;
async function attest(payload, payloadType,
/* istanbul ignore next */
options = {}) {
@@ -58,7 +60,6 @@ options = {}) {
const bundle = await bundler.create({ data: payload, type: payloadType });
return (0, bundle_1.bundleToJSON)(bundle);
}
-exports.attest = attest;
async function verify(bundle, dataOrOptions, options) {
let data;
if (Buffer.isBuffer(dataOrOptions)) {
@@ -69,7 +70,6 @@ async function verify(bundle, dataOrOptions, options) {
}
return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
}
-exports.verify = verify;
async function createVerifier(
/* istanbul ignore next */
options = {}) {
@@ -100,4 +100,3 @@ options = {}) {
},
};
}
-exports.createVerifier = createVerifier;
diff --git a/deps/npm/node_modules/@sigstore/bundle/LICENSE b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/LICENSE
similarity index 100%
rename from deps/npm/node_modules/@sigstore/bundle/LICENSE
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/LICENSE
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/build.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/build.js
similarity index 87%
rename from deps/npm/node_modules/@sigstore/bundle/dist/build.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/build.js
index 65c71b100ad58f..ade736407554c6 100644
--- a/deps/npm/node_modules/@sigstore/bundle/dist/build.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/build.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+exports.toDSSEBundle = toDSSEBundle;
/*
Copyright 2023 The Sigstore Authors.
@@ -21,9 +22,9 @@ const bundle_1 = require("./bundle");
// Message signature bundle - $case: 'messageSignature'
function toMessageSignatureBundle(options) {
return {
- mediaType: options.singleCertificate
- ? bundle_1.BUNDLE_V03_MEDIA_TYPE
- : bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ mediaType: options.certificateChain
+ ? bundle_1.BUNDLE_V02_MEDIA_TYPE
+ : bundle_1.BUNDLE_V03_MEDIA_TYPE,
content: {
$case: 'messageSignature',
messageSignature: {
@@ -37,13 +38,12 @@ function toMessageSignatureBundle(options) {
verificationMaterial: toVerificationMaterial(options),
};
}
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
// DSSE envelope bundle - $case: 'dsseEnvelope'
function toDSSEBundle(options) {
return {
- mediaType: options.singleCertificate
- ? bundle_1.BUNDLE_V03_MEDIA_TYPE
- : bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ mediaType: options.certificateChain
+ ? bundle_1.BUNDLE_V02_MEDIA_TYPE
+ : bundle_1.BUNDLE_V03_MEDIA_TYPE,
content: {
$case: 'dsseEnvelope',
dsseEnvelope: toEnvelope(options),
@@ -51,7 +51,6 @@ function toDSSEBundle(options) {
verificationMaterial: toVerificationMaterial(options),
};
}
-exports.toDSSEBundle = toDSSEBundle;
function toEnvelope(options) {
return {
payloadType: options.artifactType,
@@ -75,13 +74,7 @@ function toVerificationMaterial(options) {
}
function toKeyContent(options) {
if (options.certificate) {
- if (options.singleCertificate) {
- return {
- $case: 'certificate',
- certificate: { rawBytes: options.certificate },
- };
- }
- else {
+ if (options.certificateChain) {
return {
$case: 'x509CertificateChain',
x509CertificateChain: {
@@ -89,6 +82,12 @@ function toKeyContent(options) {
},
};
}
+ else {
+ return {
+ $case: 'certificate',
+ certificate: { rawBytes: options.certificate },
+ };
+ }
}
else {
return {
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/bundle.js
similarity index 79%
rename from deps/npm/node_modules/@sigstore/bundle/dist/bundle.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/bundle.js
index dbd35df2ca2bb3..eb67a0ddc17bbb 100644
--- a/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/bundle.js
@@ -1,6 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+exports.isBundleWithPublicKey = isBundleWithPublicKey;
+exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
+exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3';
@@ -9,16 +13,12 @@ exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json';
function isBundleWithCertificateChain(b) {
return b.verificationMaterial.content.$case === 'x509CertificateChain';
}
-exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
function isBundleWithPublicKey(b) {
return b.verificationMaterial.content.$case === 'publicKey';
}
-exports.isBundleWithPublicKey = isBundleWithPublicKey;
function isBundleWithMessageSignature(b) {
return b.content.$case === 'messageSignature';
}
-exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
function isBundleWithDsseEnvelope(b) {
return b.content.$case === 'dsseEnvelope';
}
-exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/error.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/bundle/dist/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/error.js
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/bundle/dist/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/index.js
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/serialized.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/bundle/dist/serialized.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/serialized.js
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/utility.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/utility.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/bundle/dist/utility.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/utility.js
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/validate.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/validate.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/bundle/dist/validate.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/validate.js
index 67079cd1f680a9..21b8b5ee293ba1 100644
--- a/deps/npm/node_modules/@sigstore/bundle/dist/validate.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/dist/validate.js
@@ -1,6 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertBundleLatest = exports.assertBundleV02 = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0;
+exports.assertBundle = assertBundle;
+exports.assertBundleV01 = assertBundleV01;
+exports.isBundleV01 = isBundleV01;
+exports.assertBundleV02 = assertBundleV02;
+exports.assertBundleLatest = assertBundleLatest;
/*
Copyright 2023 The Sigstore Authors.
@@ -27,7 +31,6 @@ function assertBundle(b) {
throw new error_1.ValidationError('invalid bundle', invalidValues);
}
}
-exports.assertBundle = assertBundle;
// Asserts that the given bundle conforms to the v0.1 bundle format.
function assertBundleV01(b) {
const invalidValues = [];
@@ -37,7 +40,6 @@ function assertBundleV01(b) {
throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
}
}
-exports.assertBundleV01 = assertBundleV01;
// Type guard to determine if Bundle is a v0.1 bundle.
function isBundleV01(b) {
try {
@@ -48,7 +50,6 @@ function isBundleV01(b) {
return false;
}
}
-exports.isBundleV01 = isBundleV01;
// Asserts that the given bundle conforms to the v0.2 bundle format.
function assertBundleV02(b) {
const invalidValues = [];
@@ -58,7 +59,6 @@ function assertBundleV02(b) {
throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
}
}
-exports.assertBundleV02 = assertBundleV02;
// Asserts that the given bundle conforms to the newest (0.3) bundle format.
function assertBundleLatest(b) {
const invalidValues = [];
@@ -69,7 +69,6 @@ function assertBundleLatest(b) {
throw new error_1.ValidationError('invalid bundle', invalidValues);
}
}
-exports.assertBundleLatest = assertBundleLatest;
function validateBundleBase(b) {
const invalidValues = [];
// Media type validation
@@ -192,6 +191,7 @@ function validateInclusionProof(b) {
// Necessary for V03 and later bundles
function validateNoCertificateChain(b) {
const invalidValues = [];
+ /* istanbul ignore next */
if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') {
invalidValues.push('verificationMaterial.content.$case');
}
diff --git a/deps/npm/node_modules/@sigstore/bundle/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
similarity index 92%
rename from deps/npm/node_modules/@sigstore/bundle/package.json
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
index dd853897226d2f..ee5d2b92b801a5 100644
--- a/deps/npm/node_modules/@sigstore/bundle/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/bundle",
- "version": "2.3.2",
+ "version": "3.0.0",
"description": "Sigstore bundle type",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -30,6 +30,6 @@
"@sigstore/protobuf-specs": "^0.3.2"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/@sigstore/core/LICENSE b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/LICENSE
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/LICENSE
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/LICENSE
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/error.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/error.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/index.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/length.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/length.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/length.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/length.js
index 36fdaf5b9777fd..cb7ebf09dbefa4 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/asn1/length.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/length.js
@@ -15,7 +15,8 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.encodeLength = exports.decodeLength = void 0;
+exports.decodeLength = decodeLength;
+exports.encodeLength = encodeLength;
const error_1 = require("./error");
// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
@@ -44,7 +45,6 @@ function decodeLength(stream) {
}
return len;
}
-exports.decodeLength = decodeLength;
// Translates the supplied value to a DER-encoded length.
function encodeLength(len) {
if (len < 128) {
@@ -60,4 +60,3 @@ function encodeLength(len) {
}
return Buffer.from([0x80 | bytes.length, ...bytes]);
}
-exports.encodeLength = encodeLength;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/obj.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/obj.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/obj.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/obj.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/parse.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/parse.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/parse.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/parse.js
index 482c7239e83162..7fbb42632c60e8 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/asn1/parse.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/parse.js
@@ -1,6 +1,11 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseBitString = exports.parseBoolean = exports.parseOID = exports.parseTime = exports.parseStringASCII = exports.parseInteger = void 0;
+exports.parseInteger = parseInteger;
+exports.parseStringASCII = parseStringASCII;
+exports.parseTime = parseTime;
+exports.parseOID = parseOID;
+exports.parseBoolean = parseBoolean;
+exports.parseBitString = parseBitString;
/*
Copyright 2023 The Sigstore Authors.
@@ -43,13 +48,11 @@ function parseInteger(buf) {
}
return n;
}
-exports.parseInteger = parseInteger;
// Parse an ASCII string from the DER-encoded buffer
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
function parseStringASCII(buf) {
return buf.toString('ascii');
}
-exports.parseStringASCII = parseStringASCII;
// Parse a Date from the DER-encoded buffer
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
function parseTime(buf, shortYear) {
@@ -70,7 +73,6 @@ function parseTime(buf, shortYear) {
// Translate to ISO8601 format and parse
return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
}
-exports.parseTime = parseTime;
// Parse an OID from the DER-encoded buffer
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
function parseOID(buf) {
@@ -95,13 +97,11 @@ function parseOID(buf) {
}
return oid;
}
-exports.parseOID = parseOID;
// Parse a boolean from the DER-encoded buffer
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
function parseBoolean(buf) {
return buf[0] !== 0;
}
-exports.parseBoolean = parseBoolean;
// Parse a bit string from the DER-encoded buffer
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
function parseBitString(buf) {
@@ -122,4 +122,3 @@ function parseBitString(buf) {
}
return bits;
}
-exports.parseBitString = parseBitString;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/asn1/tag.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/tag.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/asn1/tag.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/asn1/tag.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/crypto.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/crypto.js
similarity index 83%
rename from deps/npm/node_modules/@sigstore/core/dist/crypto.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/crypto.js
index dbe65b165d3574..296b5ba43e86a0 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/crypto.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/crypto.js
@@ -3,7 +3,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.bufferEqual = exports.verify = exports.hash = exports.digest = exports.createPublicKey = void 0;
+exports.createPublicKey = createPublicKey;
+exports.digest = digest;
+exports.verify = verify;
+exports.bufferEqual = bufferEqual;
/*
Copyright 2023 The Sigstore Authors.
@@ -20,7 +23,6 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
const crypto_1 = __importDefault(require("crypto"));
-const SHA256_ALGORITHM = 'sha256';
function createPublicKey(key, type = 'spki') {
if (typeof key === 'string') {
return crypto_1.default.createPublicKey(key);
@@ -29,7 +31,6 @@ function createPublicKey(key, type = 'spki') {
return crypto_1.default.createPublicKey({ key, format: 'der', type: type });
}
}
-exports.createPublicKey = createPublicKey;
function digest(algorithm, ...data) {
const hash = crypto_1.default.createHash(algorithm);
for (const d of data) {
@@ -37,16 +38,6 @@ function digest(algorithm, ...data) {
}
return hash.digest();
}
-exports.digest = digest;
-// TODO: deprecate this in favor of digest()
-function hash(...data) {
- const hash = crypto_1.default.createHash(SHA256_ALGORITHM);
- for (const d of data) {
- hash.update(d);
- }
- return hash.digest();
-}
-exports.hash = hash;
function verify(data, key, signature, algorithm) {
// The try/catch is to work around an issue in Node 14.x where verify throws
// an error in some scenarios if the signature is invalid.
@@ -58,7 +49,6 @@ function verify(data, key, signature, algorithm) {
return false;
}
}
-exports.verify = verify;
function bufferEqual(a, b) {
try {
return crypto_1.default.timingSafeEqual(a, b);
@@ -68,4 +58,3 @@ function bufferEqual(a, b) {
return false;
}
}
-exports.bufferEqual = bufferEqual;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/dsse.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/dsse.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/core/dist/dsse.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/dsse.js
index a78783c919a256..ca7b63630e2ba9 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/dsse.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/dsse.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.preAuthEncoding = void 0;
+exports.preAuthEncoding = preAuthEncoding;
/*
Copyright 2023 The Sigstore Authors.
@@ -28,4 +28,3 @@ function preAuthEncoding(payloadType, payload) {
].join(' ');
return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]);
}
-exports.preAuthEncoding = preAuthEncoding;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/encoding.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/encoding.js
similarity index 94%
rename from deps/npm/node_modules/@sigstore/core/dist/encoding.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/encoding.js
index b020ac4d6ecd42..7113af66db4c2d 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/encoding.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/encoding.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.base64Decode = exports.base64Encode = void 0;
+exports.base64Encode = base64Encode;
+exports.base64Decode = base64Decode;
/*
Copyright 2023 The Sigstore Authors.
@@ -21,8 +22,6 @@ const UTF8_ENCODING = 'utf-8';
function base64Encode(str) {
return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
}
-exports.base64Encode = base64Encode;
function base64Decode(str) {
return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
}
-exports.base64Decode = base64Decode;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/index.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/json.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/json.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/core/dist/json.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/json.js
index a50df7233c7c58..7808d033b98cc9 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/json.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/json.js
@@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.canonicalize = void 0;
+exports.canonicalize = canonicalize;
// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function canonicalize(object) {
@@ -58,4 +58,3 @@ function canonicalize(object) {
}
return buffer;
}
-exports.canonicalize = canonicalize;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/oid.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/oid.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/oid.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/oid.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/pem.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/pem.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/core/dist/pem.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/pem.js
index f35bc3835bbd10..f1241d28d586ec 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/pem.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/pem.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.fromDER = exports.toDER = void 0;
+exports.toDER = toDER;
+exports.fromDER = fromDER;
/*
Copyright 2023 The Sigstore Authors.
@@ -28,7 +29,6 @@ function toDER(certificate) {
});
return Buffer.from(der, 'base64');
}
-exports.toDER = toDER;
// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
// encoding dictates that each certificate should have a trailing newline after
// the footer.
@@ -41,4 +41,3 @@ function fromDER(certificate, type = 'CERTIFICATE') {
.join('\n')
.concat('\n');
}
-exports.fromDER = fromDER;
diff --git a/deps/npm/node_modules/@sigstore/core/dist/rfc3161/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/error.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/rfc3161/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/error.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/rfc3161/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/rfc3161/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/index.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/stream.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/stream.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/stream.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/stream.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/x509/cert.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/cert.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/core/dist/x509/cert.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/cert.js
index 16c0c40d858d8a..72ea8e0738bc83 100644
--- a/deps/npm/node_modules/@sigstore/core/dist/x509/cert.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/cert.js
@@ -97,13 +97,15 @@ class X509Certificate {
}
get subjectAltName() {
const ext = this.extSubjectAltName;
- return ext?.uri || ext?.rfc822Name;
+ return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name;
}
get extensions() {
// The extension list is the first (and only) element of the extensions
// context specific tag
+ /* istanbul ignore next */
const extSeq = this.extensionsObj?.subs[0];
- return extSeq?.subs || /* istanbul ignore next */ [];
+ /* istanbul ignore next */
+ return extSeq?.subs || [];
}
get extKeyUsage() {
const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
@@ -135,8 +137,10 @@ class X509Certificate {
const ca = this.extBasicConstraints?.isCA || false;
// If the KeyUsage extension is present, keyCertSign must be set
if (this.extKeyUsage) {
- ca && this.extKeyUsage.keyCertSign;
+ return ca && this.extKeyUsage.keyCertSign;
}
+ // TODO: test coverage for this case
+ /* istanbul ignore next */
return ca;
}
extension(oid) {
diff --git a/deps/npm/node_modules/@sigstore/core/dist/x509/ext.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/ext.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/x509/ext.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/ext.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/x509/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/x509/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/index.js
diff --git a/deps/npm/node_modules/@sigstore/core/dist/x509/sct.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/sct.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/core/dist/x509/sct.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/dist/x509/sct.js
diff --git a/deps/npm/node_modules/@sigstore/core/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/package.json
similarity index 92%
rename from deps/npm/node_modules/@sigstore/core/package.json
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/core/package.json
index 621ff1715bcd1c..af5dd281ac90e4 100644
--- a/deps/npm/node_modules/@sigstore/core/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/core/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/core",
- "version": "1.1.0",
+ "version": "2.0.0",
"description": "Base library for Sigstore",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -26,6 +26,6 @@
"provenance": true
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/@sigstore/sign/LICENSE b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/LICENSE
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/LICENSE
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/LICENSE
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/base.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/base.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
similarity index 93%
rename from deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
index 7c2ca9164f0dfe..ed32286ad88efd 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -23,7 +23,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+exports.toDSSEBundle = toDSSEBundle;
/*
Copyright 2023 The Sigstore Authors.
@@ -44,7 +45,7 @@ const util_1 = require("../util");
// Helper functions for assembling the parts of a Sigstore bundle
// Message signature bundle - $case: 'messageSignature'
function toMessageSignatureBundle(artifact, signature) {
- const digest = util_1.crypto.hash(artifact.data);
+ const digest = util_1.crypto.digest('sha256', artifact.data);
return sigstore.toMessageSignatureBundle({
digest,
signature: signature.signature,
@@ -52,11 +53,11 @@ function toMessageSignatureBundle(artifact, signature) {
? util_1.pem.toDER(signature.key.certificate)
: undefined,
keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+ certificateChain: true,
});
}
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
// DSSE envelope bundle - $case: 'dsseEnvelope'
-function toDSSEBundle(artifact, signature, singleCertificate) {
+function toDSSEBundle(artifact, signature, certificateChain) {
return sigstore.toDSSEBundle({
artifact: artifact.data,
artifactType: artifact.type,
@@ -65,7 +66,6 @@ function toDSSEBundle(artifact, signature, singleCertificate) {
? util_1.pem.toDER(signature.key.certificate)
: undefined,
keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
- singleCertificate,
+ certificateChain,
});
}
-exports.toDSSEBundle = toDSSEBundle;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/dsse.js
similarity index 93%
rename from deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/dsse.js
index 621700df93842a..86046ba8f3013b 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/dsse.js
@@ -23,7 +23,7 @@ const bundle_1 = require("./bundle");
class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
constructor(options) {
super(options);
- this.singleCertificate = options.singleCertificate ?? false;
+ this.certificateChain = options.certificateChain ?? false;
}
// DSSE requires the artifact to be pre-encoded with the payload type
// before the signature is generated.
@@ -33,7 +33,7 @@ class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
}
// Packages the artifact and signature into a DSSE bundle
async package(artifact, signature) {
- return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.singleCertificate);
+ return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain);
}
}
exports.DSSEBundleBuilder = DSSEBundleBuilder;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/message.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/message.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/error.js
similarity index 95%
rename from deps/npm/node_modules/@sigstore/sign/dist/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/error.js
index d57e4567fb89ee..d28f1913cc77e9 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/error.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/error.js
@@ -15,7 +15,8 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.internalError = exports.InternalError = void 0;
+exports.InternalError = void 0;
+exports.internalError = internalError;
const error_1 = require("./external/error");
class InternalError extends Error {
constructor({ code, message, cause, }) {
@@ -36,4 +37,3 @@ function internalError(err, code, message) {
cause: err,
});
}
-exports.internalError = internalError;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/error.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/external/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/error.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/fetch.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fetch.js
similarity index 95%
rename from deps/npm/node_modules/@sigstore/sign/dist/external/fetch.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fetch.js
index b2d81bde7be16f..116090f3c641ef 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/external/fetch.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fetch.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.fetchWithRetry = void 0;
+exports.fetchWithRetry = fetchWithRetry;
/*
Copyright 2023 The Sigstore Authors.
@@ -58,14 +58,13 @@ async function fetchWithRetry(url, options) {
}
}, retryOpts(options.retry));
}
-exports.fetchWithRetry = fetchWithRetry;
// Translate a Response into an HTTPError instance. This will attempt to parse
// the response body for a message, but will default to the statusText if none
// is found.
const errorFromResponse = async (response) => {
let message = response.statusText;
- const location = response.headers?.get(HTTP2_HEADER_LOCATION) || undefined;
- const contentType = response.headers?.get(HTTP2_HEADER_CONTENT_TYPE);
+ const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined;
+ const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE);
// If response type is JSON, try to parse the body for a message
if (contentType?.includes('application/json')) {
try {
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fulcio.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/fulcio.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/rekor.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/rekor.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/tsa.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/external/tsa.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/ci.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/ci.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/identity/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/provider.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/identity/provider.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
index 81b421eabadb2e..f01703cfab5645 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -35,7 +35,6 @@ class CAClient {
const cert = resp.signedCertificateEmbeddedSct
? resp.signedCertificateEmbeddedSct
: resp.signedCertificateDetachedSct;
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return cert.chain.certificates;
}
catch (err) {
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/signer/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/signer.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/signer/signer.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/types/fetch.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/types/fetch.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/util/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/oidc.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/oidc.js
index 2f5947d7b6b878..37c5b168ee12e6 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/oidc.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.extractJWTSubject = void 0;
+exports.extractJWTSubject = extractJWTSubject;
/*
Copyright 2023 The Sigstore Authors.
@@ -28,4 +28,3 @@ function extractJWTSubject(jwt) {
return payload.sub;
}
}
-exports.extractJWTSubject = extractJWTSubject;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/ua.js
similarity index 95%
rename from deps/npm/node_modules/@sigstore/sign/dist/util/ua.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/ua.js
index c142330eb8338c..b15ff2070fb9fc 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/ua.js
@@ -23,7 +23,6 @@ const os_1 = __importDefault(require("os"));
// Format User-Agent: / ()
// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
const getUserAgent = () => {
- // eslint-disable-next-line @typescript-eslint/no-var-requires
const packageVersion = require('../../package.json').version;
const nodeVersion = process.version;
const platformName = os_1.default.platform();
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/client.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/client.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
similarity index 87%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
index f6c165380ba45d..69a3b477e54429 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.toProposedEntry = void 0;
+exports.toProposedEntry = toProposedEntry;
/*
Copyright 2023 The Sigstore Authors.
@@ -18,21 +18,21 @@ limitations under the License.
*/
const bundle_1 = require("@sigstore/bundle");
const util_1 = require("../../util");
+const SHA256_ALGORITHM = 'sha256';
function toProposedEntry(content, publicKey,
// TODO: Remove this parameter once have completely switched to 'dsse' entries
-entryType = 'intoto') {
+entryType = 'dsse') {
switch (content.$case) {
case 'dsseEnvelope':
- // TODO: Remove this conditional once have completely switched to 'dsse' entries
- if (entryType === 'dsse') {
- return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
+ // TODO: Remove this conditional once have completely ditched "intoto" entries
+ if (entryType === 'intoto') {
+ return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
}
- return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
+ return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
case 'messageSignature':
return toProposedHashedRekordEntry(content.messageSignature, publicKey);
}
}
-exports.toProposedEntry = toProposedEntry;
// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
// and signature
function toProposedHashedRekordEntry(messageSignature, publicKey) {
@@ -45,7 +45,7 @@ function toProposedHashedRekordEntry(messageSignature, publicKey) {
spec: {
data: {
hash: {
- algorithm: 'sha256',
+ algorithm: SHA256_ALGORITHM,
value: hexDigest,
},
},
@@ -78,7 +78,9 @@ function toProposedDSSEEntry(envelope, publicKey) {
// envelope and signature
function toProposedIntotoEntry(envelope, publicKey) {
// Calculate the value for the payloadHash field in the Rekor entry
- const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
+ const payloadHash = util_1.crypto
+ .digest(SHA256_ALGORITHM, envelope.payload)
+ .toString('hex');
// Calculate the value for the hash field in the Rekor entry
const envelopeHash = calculateDSSEHash(envelope, publicKey);
// Collect values for re-creating the DSSE envelope.
@@ -107,8 +109,8 @@ function toProposedIntotoEntry(envelope, publicKey) {
spec: {
content: {
envelope: dsse,
- hash: { algorithm: 'sha256', value: envelopeHash },
- payloadHash: { algorithm: 'sha256', value: payloadHash },
+ hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash },
+ payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash },
},
},
};
@@ -132,5 +134,7 @@ function calculateDSSEHash(envelope, publicKey) {
if (envelope.signatures[0].keyid.length > 0) {
dsse.signatures[0].keyid = envelope.signatures[0].keyid;
}
- return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex');
+ return util_1.crypto
+ .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse))
+ .toString('hex');
}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tlog/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/client.js
similarity index 86%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/client.js
index a334deb00b7756..754de3748dbb36 100644
--- a/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -19,6 +19,7 @@ limitations under the License.
const error_1 = require("../../error");
const tsa_1 = require("../../external/tsa");
const util_1 = require("../../util");
+const SHA256_ALGORITHM = 'sha256';
class TSAClient {
constructor(options) {
this.tsa = new tsa_1.TimestampAuthority({
@@ -29,8 +30,10 @@ class TSAClient {
}
async createTimestamp(signature) {
const request = {
- artifactHash: util_1.crypto.hash(signature).toString('base64'),
- hashAlgorithm: 'sha256',
+ artifactHash: util_1.crypto
+ .digest(SHA256_ALGORITHM, signature)
+ .toString('base64'),
+ hashAlgorithm: SHA256_ALGORITHM,
};
try {
return await this.tsa.createTimestamp(request);
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/tsa/index.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/witness.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/witness/witness.js
diff --git a/deps/npm/node_modules/@sigstore/sign/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
similarity index 78%
rename from deps/npm/node_modules/@sigstore/sign/package.json
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
index 4adb3d24c6fa68..fe05e8dc2d73ad 100644
--- a/deps/npm/node_modules/@sigstore/sign/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/sign",
- "version": "2.3.2",
+ "version": "3.0.0",
"description": "Sigstore signing library",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,20 +27,20 @@
},
"devDependencies": {
"@sigstore/jest": "^0.0.0",
- "@sigstore/mock": "^0.7.4",
- "@sigstore/rekor-types": "^2.0.0",
+ "@sigstore/mock": "^0.8.0",
+ "@sigstore/rekor-types": "^3.0.0",
"@types/make-fetch-happen": "^10.0.4",
"@types/promise-retry": "^1.1.6"
},
"dependencies": {
- "@sigstore/bundle": "^2.3.2",
- "@sigstore/core": "^1.0.0",
+ "@sigstore/bundle": "^3.0.0",
+ "@sigstore/core": "^2.0.0",
"@sigstore/protobuf-specs": "^0.3.2",
- "make-fetch-happen": "^13.0.1",
- "proc-log": "^4.2.0",
+ "make-fetch-happen": "^14.0.1",
+ "proc-log": "^5.0.0",
"promise-retry": "^2.0.1"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/dsse.js
similarity index 93%
rename from deps/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/dsse.js
index 193f875fd1014e..1033fc422aba09 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/dsse.js
@@ -22,7 +22,7 @@ class DSSESignatureContent {
this.env = env;
}
compareDigest(digest) {
- return core_1.crypto.bufferEqual(digest, core_1.crypto.hash(this.env.payload));
+ return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload));
}
compareSignature(signature) {
return core_1.crypto.bufferEqual(signature, this.signature);
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/bundle/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/index.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/verify/dist/bundle/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/index.js
index 63f8d4c4998811..4287d8032b75f0 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/bundle/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/index.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.signatureContent = exports.toSignedEntity = void 0;
+exports.toSignedEntity = toSignedEntity;
+exports.signatureContent = signatureContent;
const core_1 = require("@sigstore/core");
const dsse_1 = require("./dsse");
const message_1 = require("./message");
@@ -26,7 +27,6 @@ function toSignedEntity(bundle, artifact) {
timestamps,
};
}
-exports.toSignedEntity = toSignedEntity;
function signatureContent(bundle, artifact) {
switch (bundle.content.$case) {
case 'dsseEnvelope':
@@ -35,7 +35,6 @@ function signatureContent(bundle, artifact) {
return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact);
}
}
-exports.signatureContent = signatureContent;
function key(bundle) {
switch (bundle.verificationMaterial.content.$case) {
case 'publicKey':
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/bundle/message.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/message.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/bundle/message.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/bundle/message.js
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/error.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/error.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/error.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/error.js
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/index.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/index.js
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/key/certificate.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
similarity index 99%
rename from deps/npm/node_modules/@sigstore/verify/dist/key/certificate.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
index c9140dd98d58a6..a916de0e51e712 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/key/certificate.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.CertificateChainVerifier = exports.verifyCertificateChain = void 0;
+exports.CertificateChainVerifier = void 0;
+exports.verifyCertificateChain = verifyCertificateChain;
const error_1 = require("../error");
const trust_1 = require("../trust");
function verifyCertificateChain(leaf, certificateAuthorities) {
@@ -32,7 +33,6 @@ function verifyCertificateChain(leaf, certificateAuthorities) {
cause: error,
});
}
-exports.verifyCertificateChain = verifyCertificateChain;
class CertificateChainVerifier {
constructor(opts) {
this.untrustedCert = opts.untrustedCert;
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/key/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/verify/dist/key/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
index 682a306803a991..cc894aab95a5d5 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/key/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyCertificate = exports.verifyPublicKey = void 0;
+exports.verifyPublicKey = verifyPublicKey;
+exports.verifyCertificate = verifyCertificate;
/*
Copyright 2023 The Sigstore Authors.
@@ -34,7 +35,6 @@ function verifyPublicKey(hint, timestamps, trustMaterial) {
});
return { key: key.publicKey };
}
-exports.verifyPublicKey = verifyPublicKey;
function verifyCertificate(leaf, timestamps, trustMaterial) {
// Check that leaf certificate chains to a trusted CA
const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities);
@@ -51,10 +51,10 @@ function verifyCertificate(leaf, timestamps, trustMaterial) {
signer: getSigner(path[0]),
};
}
-exports.verifyCertificate = verifyCertificate;
function getSigner(cert) {
let issuer;
const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2);
+ /* istanbul ignore next */
if (issuerExtension) {
issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii');
}
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/key/sct.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/sct.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/verify/dist/key/sct.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/sct.js
index aea412840e1039..8eca48738096ee 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/key/sct.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/sct.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifySCTs = void 0;
+exports.verifySCTs = verifySCTs;
/*
Copyright 2023 The Sigstore Authors.
@@ -52,7 +52,7 @@ function verifySCTs(cert, issuer, ctlogs) {
// https://www.rfc-editor.org/rfc/rfc6962#section-3.2
const preCert = new core_1.ByteStream();
// Calculate hash of the issuer's public key
- const issuerId = core_1.crypto.hash(issuer.publicKey);
+ const issuerId = core_1.crypto.digest('sha256', issuer.publicKey);
preCert.appendView(issuerId);
// Re-encodes the certificate to DER after removing the SCT extension
const tbs = clone.tbsCertificate.toDER();
@@ -76,4 +76,3 @@ function verifySCTs(cert, issuer, ctlogs) {
return sct.logID;
});
}
-exports.verifySCTs = verifySCTs;
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/policy.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/policy.js
similarity index 93%
rename from deps/npm/node_modules/@sigstore/verify/dist/policy.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/policy.js
index 731e5c83328475..f5960cf047b84b 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/policy.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/policy.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyExtensions = exports.verifySubjectAlternativeName = void 0;
+exports.verifySubjectAlternativeName = verifySubjectAlternativeName;
+exports.verifyExtensions = verifyExtensions;
const error_1 = require("./error");
function verifySubjectAlternativeName(policyIdentity, signerIdentity) {
if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) {
@@ -10,7 +11,6 @@ function verifySubjectAlternativeName(policyIdentity, signerIdentity) {
});
}
}
-exports.verifySubjectAlternativeName = verifySubjectAlternativeName;
function verifyExtensions(policyExtensions, signerExtensions = {}) {
let key;
for (key in policyExtensions) {
@@ -22,4 +22,3 @@ function verifyExtensions(policyExtensions, signerExtensions = {}) {
}
}
}
-exports.verifyExtensions = verifyExtensions;
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/shared.types.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/shared.types.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/shared.types.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/shared.types.js
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
similarity index 99%
rename from deps/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
index 04a87383f0fd17..46619b675f8863 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyCheckpoint = void 0;
+exports.verifyCheckpoint = verifyCheckpoint;
/*
Copyright 2023 The Sigstore Authors.
@@ -61,7 +61,6 @@ function verifyCheckpoint(entry, tlogs) {
});
}
}
-exports.verifyCheckpoint = verifyCheckpoint;
// Verifies the signatures in the SignedNote. For each signature, the
// corresponding transparency log is looked up by the key hint and the
// signature is verified against the public key in the transparency log.
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/index.js
similarity index 96%
rename from deps/npm/node_modules/@sigstore/verify/dist/timestamp/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/index.js
index 0da554f648d25e..56e948de19338d 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/index.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTLogTimestamp = exports.verifyTSATimestamp = void 0;
+exports.verifyTSATimestamp = verifyTSATimestamp;
+exports.verifyTLogTimestamp = verifyTLogTimestamp;
const error_1 = require("../error");
const checkpoint_1 = require("./checkpoint");
const merkle_1 = require("./merkle");
@@ -14,7 +15,6 @@ function verifyTSATimestamp(timestamp, data, timestampAuthorities) {
timestamp: timestamp.signingTime,
};
}
-exports.verifyTSATimestamp = verifyTSATimestamp;
function verifyTLogTimestamp(entry, tlogAuthorities) {
let inclusionVerified = false;
if (isTLogEntryWithInclusionPromise(entry)) {
@@ -38,7 +38,6 @@ function verifyTLogTimestamp(entry, tlogAuthorities) {
timestamp: new Date(Number(entry.integratedTime) * 1000),
};
}
-exports.verifyTLogTimestamp = verifyTLogTimestamp;
function isTLogEntryWithInclusionPromise(entry) {
return entry.inclusionPromise !== undefined;
}
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/merkle.js
similarity index 95%
rename from deps/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/merkle.js
index 9895d01b7abc03..f57cae42002bd0 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/merkle.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyMerkleInclusion = void 0;
+exports.verifyMerkleInclusion = verifyMerkleInclusion;
/*
Copyright 2023 The Sigstore Authors.
@@ -53,7 +53,6 @@ function verifyMerkleInclusion(entry) {
});
}
}
-exports.verifyMerkleInclusion = verifyMerkleInclusion;
// Breaks down inclusion proof for a leaf at the specified index in a tree of
// the specified size. The split point is where paths to the index leaf and
// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
@@ -98,8 +97,8 @@ function bitLength(n) {
// Hashing logic according to RFC6962.
// https://datatracker.ietf.org/doc/html/rfc6962#section-2
function hashChildren(left, right) {
- return core_1.crypto.hash(RFC6962_NODE_HASH_PREFIX, left, right);
+ return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right);
}
function hashLeaf(leaf) {
- return core_1.crypto.hash(RFC6962_LEAF_HASH_PREFIX, leaf);
+ return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf);
}
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/set.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/set.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/verify/dist/timestamp/set.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/set.js
index a6357c06999cba..5d3f47bb88746a 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/set.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/set.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTLogSET = void 0;
+exports.verifyTLogSET = verifyTLogSET;
/*
Copyright 2023 The Sigstore Authors.
@@ -46,7 +46,6 @@ function verifyTLogSET(entry, tlogs) {
});
}
}
-exports.verifyTLogSET = verifyTLogSET;
// Returns a properly formatted "VerificationPayload" for one of the
// transaction log entires in the given bundle which can be used for SET
// verification.
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
index 7b095bc3a7f908..70388cd06c52d6 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyRFC3161Timestamp = void 0;
+exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
const core_1 = require("@sigstore/core");
const error_1 = require("../error");
const certificate_1 = require("../key/certificate");
@@ -35,7 +35,6 @@ function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
});
}
}
-exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
function verifyTimestampForCA(timestamp, data, ca) {
const [leaf, ...cas] = ca.certChain;
const signingKey = core_1.crypto.createPublicKey(leaf.publicKey);
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/dsse.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/dsse.js
index bf430e61dde563..d71ed8c6e7ad9a 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/dsse.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyDSSETLogBody = void 0;
+exports.verifyDSSETLogBody = verifyDSSETLogBody;
/*
Copyright 2023 The Sigstore Authors.
@@ -29,7 +29,6 @@ function verifyDSSETLogBody(tlogEntry, content) {
});
}
}
-exports.verifyDSSETLogBody = verifyDSSETLogBody;
// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope.
function verifyDSSE001TLogBody(tlogEntry, content) {
// Ensure the bundle's DSSE only contains a single signature
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
similarity index 97%
rename from deps/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
index d1758858f030d8..c4aa345b57ba7a 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyHashedRekordTLogBody = void 0;
+exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody;
/*
Copyright 2023 The Sigstore Authors.
@@ -29,7 +29,6 @@ function verifyHashedRekordTLogBody(tlogEntry, content) {
});
}
}
-exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody;
// Compare the given hashedrekord v0.0.1 tlog entry to the given message
// signature
function verifyHashedrekord001TLogBody(tlogEntry, content) {
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/tlog/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/index.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/verify/dist/tlog/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/index.js
index adfc70ed51ad05..da235360c594a8 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/tlog/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/index.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTLogBody = void 0;
+exports.verifyTLogBody = verifyTLogBody;
/*
Copyright 2023 The Sigstore Authors.
@@ -45,4 +45,3 @@ function verifyTLogBody(entry, sigContent) {
});
}
}
-exports.verifyTLogBody = verifyTLogBody;
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/intoto.js
similarity index 98%
rename from deps/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/intoto.js
index 74c7f50d763e1d..9096ae9418cc30 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/tlog/intoto.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyIntotoTLogBody = void 0;
+exports.verifyIntotoTLogBody = verifyIntotoTLogBody;
/*
Copyright 2023 The Sigstore Authors.
@@ -29,7 +29,6 @@ function verifyIntotoTLogBody(tlogEntry, content) {
});
}
}
-exports.verifyIntotoTLogBody = verifyIntotoTLogBody;
// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope.
function verifyIntoto002TLogBody(tlogEntry, content) {
// Ensure the bundle's DSSE contains a single signature
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/trust/filter.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
similarity index 93%
rename from deps/npm/node_modules/@sigstore/verify/dist/trust/filter.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
index c09d055913c4c7..880a16cf1940ea 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/trust/filter.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
@@ -1,12 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+exports.filterCertAuthorities = filterCertAuthorities;
+exports.filterTLogAuthorities = filterTLogAuthorities;
function filterCertAuthorities(certAuthorities, criteria) {
return certAuthorities.filter((ca) => {
return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end);
});
}
-exports.filterCertAuthorities = filterCertAuthorities;
// Filter the list of tlog instances to only those which match the given log
// ID and have public keys which are valid for the given integrated time.
function filterTLogAuthorities(tlogAuthorities, criteria) {
@@ -21,4 +21,3 @@ function filterTLogAuthorities(tlogAuthorities, criteria) {
criteria.targetDate <= tlog.validFor.end);
});
}
-exports.filterTLogAuthorities = filterTLogAuthorities;
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/trust/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/index.js
similarity index 95%
rename from deps/npm/node_modules/@sigstore/verify/dist/trust/index.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/index.js
index 954de558415902..bfab2eb4f9975a 100644
--- a/deps/npm/node_modules/@sigstore/verify/dist/trust/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/index.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.toTrustMaterial = exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+exports.toTrustMaterial = toTrustMaterial;
/*
Copyright 2023 The Sigstore Authors.
@@ -34,7 +35,6 @@ function toTrustMaterial(root, keys) {
publicKey: keyFinder,
};
}
-exports.toTrustMaterial = toTrustMaterial;
function createTLogAuthority(tlogInstance) {
const keyDetails = tlogInstance.publicKey.keyDetails;
const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 ||
@@ -54,6 +54,7 @@ function createTLogAuthority(tlogInstance) {
};
}
function createCertAuthority(ca) {
+ /* istanbul ignore next */
return {
certChain: ca.certChain.certificates.map((cert) => {
return core_1.X509Certificate.parse(cert.rawBytes);
@@ -76,6 +77,7 @@ function keyLocator(keys) {
return {
publicKey: core_1.crypto.createPublicKey(key.rawBytes),
validFor: (date) => {
+ /* istanbul ignore next */
return ((key.validFor?.start || BEGINNING_OF_TIME) <= date &&
(key.validFor?.end || END_OF_TIME) >= date);
},
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/trust/trust.types.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/trust.types.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/trust/trust.types.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/trust.types.js
diff --git a/deps/npm/node_modules/@sigstore/verify/dist/verifier.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/verifier.js
similarity index 100%
rename from deps/npm/node_modules/@sigstore/verify/dist/verifier.js
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/verifier.js
diff --git a/deps/npm/node_modules/@sigstore/verify/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
similarity index 86%
rename from deps/npm/node_modules/@sigstore/verify/package.json
rename to deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
index cd0c845a797e47..edf72b8bfd9680 100644
--- a/deps/npm/node_modules/@sigstore/verify/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/verify",
- "version": "1.2.1",
+ "version": "2.0.0",
"description": "Verification of Sigstore signatures",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,10 +27,10 @@
},
"dependencies": {
"@sigstore/protobuf-specs": "^0.3.2",
- "@sigstore/bundle": "^2.3.2",
- "@sigstore/core": "^1.1.0"
+ "@sigstore/bundle": "^3.0.0",
+ "@sigstore/core": "^2.0.0"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/sigstore/package.json b/deps/npm/node_modules/sigstore/package.json
index fa8744bf304a3f..0f798a263657b4 100644
--- a/deps/npm/node_modules/sigstore/package.json
+++ b/deps/npm/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
{
"name": "sigstore",
- "version": "2.3.1",
+ "version": "3.0.0",
"description": "code-signing for npm packages",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,21 +27,21 @@
"provenance": true
},
"devDependencies": {
- "@sigstore/rekor-types": "^2.0.0",
+ "@sigstore/rekor-types": "^3.0.0",
"@sigstore/jest": "^0.0.0",
- "@sigstore/mock": "^0.7.4",
- "@tufjs/repo-mock": "^2.0.1",
+ "@sigstore/mock": "^0.8.0",
+ "@tufjs/repo-mock": "^3.0.1",
"@types/make-fetch-happen": "^10.0.4"
},
"dependencies": {
- "@sigstore/bundle": "^2.3.2",
- "@sigstore/core": "^1.0.0",
+ "@sigstore/bundle": "^3.0.0",
+ "@sigstore/core": "^2.0.0",
"@sigstore/protobuf-specs": "^0.3.2",
- "@sigstore/sign": "^2.3.2",
- "@sigstore/tuf": "^2.3.4",
- "@sigstore/verify": "^1.2.1"
+ "@sigstore/sign": "^3.0.0",
+ "@sigstore/tuf": "^3.0.0",
+ "@sigstore/verify": "^2.0.0"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/spdx-license-ids/deprecated.json b/deps/npm/node_modules/spdx-license-ids/deprecated.json
index 278531e40c613d..4f70a14c7469da 100644
--- a/deps/npm/node_modules/spdx-license-ids/deprecated.json
+++ b/deps/npm/node_modules/spdx-license-ids/deprecated.json
@@ -19,6 +19,7 @@
"LGPL-2.0",
"LGPL-2.1",
"LGPL-3.0",
+ "Net-SNMP",
"Nunit",
"StandardML-NJ",
"bzip2-1.0.5",
diff --git a/deps/npm/node_modules/spdx-license-ids/index.json b/deps/npm/node_modules/spdx-license-ids/index.json
index c7686a710d61d1..f43d5016bd95ab 100644
--- a/deps/npm/node_modules/spdx-license-ids/index.json
+++ b/deps/npm/node_modules/spdx-license-ids/index.json
@@ -197,6 +197,8 @@
"DRL-1.0",
"DRL-1.1",
"DSDP",
+ "DocBook-Schema",
+ "DocBook-XML",
"Dotseqn",
"ECL-1.0",
"ECL-2.0",
@@ -260,6 +262,7 @@
"Glulxe",
"Graphics-Gems",
"Gutmann",
+ "HIDAPI",
"HP-1986",
"HP-1989",
"HPND",
@@ -270,6 +273,7 @@
"HPND-Kevlin-Henney",
"HPND-MIT-disclaimer",
"HPND-Markus-Kuhn",
+ "HPND-Netrek",
"HPND-Pbmplus",
"HPND-UC",
"HPND-UC-export-US",
@@ -403,7 +407,6 @@
"NTP",
"NTP-0",
"Naumen",
- "Net-SNMP",
"NetCDF",
"Newsletr",
"Nokia",
@@ -485,6 +488,7 @@
"RSCPL",
"Rdisc",
"Ruby",
+ "Ruby-pty",
"SAX-PD",
"SAX-PD-2.0",
"SCEA",
@@ -541,6 +545,7 @@
"UMich-Merit",
"UPL-1.0",
"URT-RLE",
+ "Ubuntu-font-1.0",
"Unicode-3.0",
"Unicode-DFS-2015",
"Unicode-DFS-2016",
@@ -559,6 +564,7 @@
"Wsuipa",
"X11",
"X11-distribute-modifications-variant",
+ "X11-swapped",
"XFree86-1.1",
"XSkat",
"Xdebug-1.03",
diff --git a/deps/npm/node_modules/spdx-license-ids/package.json b/deps/npm/node_modules/spdx-license-ids/package.json
index 5f5ed9554f2579..7ab34aab6b8b1d 100644
--- a/deps/npm/node_modules/spdx-license-ids/package.json
+++ b/deps/npm/node_modules/spdx-license-ids/package.json
@@ -1,6 +1,6 @@
{
"name": "spdx-license-ids",
- "version": "3.0.18",
+ "version": "3.0.20",
"description": "A list of SPDX license identifiers",
"repository": "jslicense/spdx-license-ids",
"author": "Shinnosuke Watanabe (https://github.com/shinnn)",
diff --git a/deps/npm/node_modules/tuf-js/dist/config.js b/deps/npm/node_modules/tuf-js/dist/config.js
index 6845679942fec5..c66d76af86b98c 100644
--- a/deps/npm/node_modules/tuf-js/dist/config.js
+++ b/deps/npm/node_modules/tuf-js/dist/config.js
@@ -2,7 +2,7 @@
Object.defineProperty(exports, "__esModule", { value: true });
exports.defaultConfig = void 0;
exports.defaultConfig = {
- maxRootRotations: 32,
+ maxRootRotations: 256,
maxDelegations: 32,
rootMaxLength: 512000, //bytes
timestampMaxLength: 16384, // bytes
diff --git a/deps/npm/node_modules/tuf-js/dist/updater.js b/deps/npm/node_modules/tuf-js/dist/updater.js
index 5317f7e14659ac..8d5eb4428f044a 100644
--- a/deps/npm/node_modules/tuf-js/dist/updater.js
+++ b/deps/npm/node_modules/tuf-js/dist/updater.js
@@ -144,7 +144,7 @@ class Updater {
const rootVersion = this.trustedSet.root.signed.version;
const lowerBound = rootVersion + 1;
const upperBound = lowerBound + this.config.maxRootRotations;
- for (let version = lowerBound; version <= upperBound; version++) {
+ for (let version = lowerBound; version < upperBound; version++) {
const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`);
try {
// Client workflow 5.3.3: download new root metadata file
@@ -155,7 +155,13 @@ class Updater {
this.persistMetadata(models_1.MetadataKind.Root, bytesData);
}
catch (error) {
- break;
+ if (error instanceof error_1.DownloadHTTPError) {
+ // 404/403 means current root is newest available
+ if ([403, 404].includes(error.statusCode)) {
+ break;
+ }
+ }
+ throw error;
}
}
}
@@ -247,7 +253,8 @@ class Updater {
const version = this.trustedSet.root.signed.consistentSnapshot
? metaInfo.version
: undefined;
- const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`);
+ const encodedRole = encodeURIComponent(role);
+ const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`);
try {
// Client workflow 5.6.1: download targets metadata file
const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength);
@@ -280,7 +287,6 @@ class Updater {
while (visitedRoleNames.size <= this.config.maxDelegations &&
delegationsToVisit.length > 0) {
// Pop the role name from the top of the stack.
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const { roleName, parentRoleName } = delegationsToVisit.pop();
// Skip any visited current role to prevent cycles.
// Client workflow 5.6.7.1: skip already-visited roles
@@ -330,13 +336,14 @@ class Updater {
return path.join(this.targetDir, filePath);
}
persistMetadata(metaDataName, bytesData) {
+ const encodedName = encodeURIComponent(metaDataName);
try {
- const filePath = path.join(this.dir, `${metaDataName}.json`);
+ const filePath = path.join(this.dir, `${encodedName}.json`);
log('WRITE %s', filePath);
fs.writeFileSync(filePath, bytesData.toString('utf8'));
}
catch (error) {
- throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`);
+ throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`);
}
}
}
diff --git a/deps/npm/node_modules/tuf-js/dist/utils/url.js b/deps/npm/node_modules/tuf-js/dist/utils/url.js
index ce67fe2c230535..359d1f3ef385b7 100644
--- a/deps/npm/node_modules/tuf-js/dist/utils/url.js
+++ b/deps/npm/node_modules/tuf-js/dist/utils/url.js
@@ -1,11 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.join = void 0;
+exports.join = join;
const url_1 = require("url");
function join(base, path) {
return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString();
}
-exports.join = join;
function ensureTrailingSlash(path) {
return path.endsWith('/') ? path : path + '/';
}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js
deleted file mode 100644
index c541b93001517e..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const net = require('net')
-const tls = require('tls')
-const { once } = require('events')
-const timers = require('timers/promises')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
-const Errors = require('./errors.js')
-const { Agent: AgentBase } = require('agent-base')
-
-module.exports = class Agent extends AgentBase {
- #options
- #timeouts
- #proxy
- #noProxy
- #ProxyAgent
-
- constructor (options = {}) {
- const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
-
- super(normalizedOptions)
-
- this.#options = normalizedOptions
- this.#timeouts = timeouts
-
- if (proxy) {
- this.#proxy = new URL(proxy)
- this.#noProxy = noProxy
- this.#ProxyAgent = getProxyAgent(proxy)
- }
- }
-
- get proxy () {
- return this.#proxy ? { url: this.#proxy } : {}
- }
-
- #getProxy (options) {
- if (!this.#proxy) {
- return
- }
-
- const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
- proxy: this.#proxy,
- noProxy: this.#noProxy,
- })
-
- if (!proxy) {
- return
- }
-
- const cacheKey = cacheOptions({
- ...options,
- ...this.#options,
- timeouts: this.#timeouts,
- proxy,
- })
-
- if (proxyCache.has(cacheKey)) {
- return proxyCache.get(cacheKey)
- }
-
- let ProxyAgent = this.#ProxyAgent
- if (Array.isArray(ProxyAgent)) {
- ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
- }
-
- const proxyAgent = new ProxyAgent(proxy, {
- ...this.#options,
- socketOptions: { family: this.#options.family },
- })
- proxyCache.set(cacheKey, proxyAgent)
-
- return proxyAgent
- }
-
- // takes an array of promises and races them against the connection timeout
- // which will throw the necessary error if it is hit. This will return the
- // result of the promise race.
- async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
- if (timeout) {
- const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
- .then(() => {
- throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
- }).catch((err) => {
- if (err.name === 'AbortError') {
- return
- }
- throw err
- })
- promises.push(connectionTimeout)
- }
-
- let result
- try {
- result = await Promise.race(promises)
- ac.abort()
- } catch (err) {
- ac.abort()
- throw err
- }
- return result
- }
-
- async connect (request, options) {
- // if the connection does not have its own lookup function
- // set, then use the one from our options
- options.lookup ??= this.#options.lookup
-
- let socket
- let timeout = this.#timeouts.connection
- const isSecureEndpoint = this.isSecureEndpoint(options)
-
- const proxy = this.#getProxy(options)
- if (proxy) {
- // some of the proxies will wait for the socket to fully connect before
- // returning so we have to await this while also racing it against the
- // connection timeout.
- const start = Date.now()
- socket = await this.#timeoutConnection({
- options,
- timeout,
- promises: [proxy.connect(request, options)],
- })
- // see how much time proxy.connect took and subtract it from
- // the timeout
- if (timeout) {
- timeout = timeout - (Date.now() - start)
- }
- } else {
- socket = (isSecureEndpoint ? tls : net).connect(options)
- }
-
- socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
- socket.setNoDelay(this.keepAlive)
-
- const abortController = new AbortController()
- const { signal } = abortController
-
- const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
- ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
- : Promise.resolve()
-
- await this.#timeoutConnection({
- options,
- timeout,
- promises: [
- connectPromise,
- once(socket, 'error', { signal }).then((err) => {
- throw err[0]
- }),
- ],
- }, abortController)
-
- if (this.#timeouts.idle) {
- socket.setTimeout(this.#timeouts.idle, () => {
- socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
- })
- }
-
- return socket
- }
-
- addRequest (request, options) {
- const proxy = this.#getProxy(options)
- // it would be better to call proxy.addRequest here but this causes the
- // http-proxy-agent to call its super.addRequest which causes the request
- // to be added to the agent twice. since we only support 3 agents
- // currently (see the required agents in proxy.js) we have manually
- // checked that the only public methods we need to call are called in the
- // next block. this could change in the future and presumably we would get
- // failing tests until we have properly called the necessary methods on
- // each of our proxy agents
- if (proxy?.setRequestProps) {
- proxy.setRequestProps(request, options)
- }
-
- request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
-
- if (this.#timeouts.response) {
- let responseTimeout
- request.once('finish', () => {
- setTimeout(() => {
- request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
- }, this.#timeouts.response)
- })
- request.once('response', () => {
- clearTimeout(responseTimeout)
- })
- }
-
- if (this.#timeouts.transfer) {
- let transferTimeout
- request.once('response', (res) => {
- setTimeout(() => {
- res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
- }, this.#timeouts.transfer)
- res.once('close', () => {
- clearTimeout(transferTimeout)
- })
- })
- }
-
- return super.addRequest(request, options)
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js
deleted file mode 100644
index 3c6946c566d736..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js
+++ /dev/null
@@ -1,53 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const dns = require('dns')
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-const cache = new LRUCache({ max: 50 })
-
-const getOptions = ({
- family = 0,
- hints = dns.ADDRCONFIG,
- all = false,
- verbatim = undefined,
- ttl = 5 * 60 * 1000,
- lookup = dns.lookup,
-}) => ({
- // hints and lookup are returned since both are top level properties to (net|tls).connect
- hints,
- lookup: (hostname, ...args) => {
- const callback = args.pop() // callback is always last arg
- const lookupOptions = args[0] ?? {}
-
- const options = {
- family,
- hints,
- all,
- verbatim,
- ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
- }
-
- const key = JSON.stringify({ hostname, ...options })
-
- if (cache.has(key)) {
- const cached = cache.get(key)
- return process.nextTick(callback, null, ...cached)
- }
-
- lookup(hostname, options, (err, ...result) => {
- if (err) {
- return callback(err)
- }
-
- cache.set(key, result, { ttl })
- return callback(null, ...result)
- })
- },
-})
-
-module.exports = {
- cache,
- getOptions,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js
deleted file mode 100644
index 70475aec8eb357..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js
+++ /dev/null
@@ -1,61 +0,0 @@
-'use strict'
-
-class InvalidProxyProtocolError extends Error {
- constructor (url) {
- super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
- this.code = 'EINVALIDPROXY'
- this.proxy = url
- }
-}
-
-class ConnectionTimeoutError extends Error {
- constructor (host) {
- super(`Timeout connecting to host \`${host}\``)
- this.code = 'ECONNECTIONTIMEOUT'
- this.host = host
- }
-}
-
-class IdleTimeoutError extends Error {
- constructor (host) {
- super(`Idle timeout reached for host \`${host}\``)
- this.code = 'EIDLETIMEOUT'
- this.host = host
- }
-}
-
-class ResponseTimeoutError extends Error {
- constructor (request, proxy) {
- let msg = 'Response timeout '
- if (proxy) {
- msg += `from proxy \`${proxy.host}\` `
- }
- msg += `connecting to host \`${request.host}\``
- super(msg)
- this.code = 'ERESPONSETIMEOUT'
- this.proxy = proxy
- this.request = request
- }
-}
-
-class TransferTimeoutError extends Error {
- constructor (request, proxy) {
- let msg = 'Transfer timeout '
- if (proxy) {
- msg += `from proxy \`${proxy.host}\` `
- }
- msg += `for \`${request.host}\``
- super(msg)
- this.code = 'ETRANSFERTIMEOUT'
- this.proxy = proxy
- this.request = request
- }
-}
-
-module.exports = {
- InvalidProxyProtocolError,
- ConnectionTimeoutError,
- IdleTimeoutError,
- ResponseTimeoutError,
- TransferTimeoutError,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js
deleted file mode 100644
index b33d6eaef07a21..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js
+++ /dev/null
@@ -1,56 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, proxyCache } = require('./proxy.js')
-const dns = require('./dns.js')
-const Agent = require('./agents.js')
-
-const agentCache = new LRUCache({ max: 20 })
-
-const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
- // false has meaning so this can't be a simple truthiness check
- if (agent != null) {
- return agent
- }
-
- url = new URL(url)
-
- const proxyForUrl = getProxy(url, { proxy, noProxy })
- const normalizedOptions = {
- ...normalizeOptions(options),
- proxy: proxyForUrl,
- }
-
- const cacheKey = cacheOptions({
- ...normalizedOptions,
- secureEndpoint: url.protocol === 'https:',
- })
-
- if (agentCache.has(cacheKey)) {
- return agentCache.get(cacheKey)
- }
-
- const newAgent = new Agent(normalizedOptions)
- agentCache.set(cacheKey, newAgent)
-
- return newAgent
-}
-
-module.exports = {
- getAgent,
- Agent,
- // these are exported for backwards compatability
- HttpAgent: Agent,
- HttpsAgent: Agent,
- cache: {
- proxy: proxyCache,
- agent: agentCache,
- dns: dns.cache,
- clear: () => {
- proxyCache.clear()
- agentCache.clear()
- dns.cache.clear()
- },
- },
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js
deleted file mode 100644
index 0bf53f725f0846..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-const dns = require('./dns')
-
-const normalizeOptions = (opts) => {
- const family = parseInt(opts.family ?? '0', 10)
- const keepAlive = opts.keepAlive ?? true
-
- const normalized = {
- // nodejs http agent options. these are all the defaults
- // but kept here to increase the likelihood of cache hits
- // https://nodejs.org/api/http.html#new-agentoptions
- keepAliveMsecs: keepAlive ? 1000 : undefined,
- maxSockets: opts.maxSockets ?? 15,
- maxTotalSockets: Infinity,
- maxFreeSockets: keepAlive ? 256 : undefined,
- scheduling: 'fifo',
- // then spread the rest of the options
- ...opts,
- // we already set these to their defaults that we want
- family,
- keepAlive,
- // our custom timeout options
- timeouts: {
- // the standard timeout option is mapped to our idle timeout
- // and then deleted below
- idle: opts.timeout ?? 0,
- connection: 0,
- response: 0,
- transfer: 0,
- ...opts.timeouts,
- },
- // get the dns options that go at the top level of socket connection
- ...dns.getOptions({ family, ...opts.dns }),
- }
-
- // remove timeout since we already used it to set our own idle timeout
- delete normalized.timeout
-
- return normalized
-}
-
-const createKey = (obj) => {
- let key = ''
- const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
- for (let [k, v] of sorted) {
- if (v == null) {
- v = 'null'
- } else if (v instanceof URL) {
- v = v.toString()
- } else if (typeof v === 'object') {
- v = createKey(v)
- }
- key += `${k}:${v}:`
- }
- return key
-}
-
-const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
- secureEndpoint: !!secureEndpoint,
- // socket connect options
- family: options.family,
- hints: options.hints,
- localAddress: options.localAddress,
- // tls specific connect options
- strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
- ca: secureEndpoint ? options.ca : null,
- cert: secureEndpoint ? options.cert : null,
- key: secureEndpoint ? options.key : null,
- // http agent options
- keepAlive: options.keepAlive,
- keepAliveMsecs: options.keepAliveMsecs,
- maxSockets: options.maxSockets,
- maxTotalSockets: options.maxTotalSockets,
- maxFreeSockets: options.maxFreeSockets,
- scheduling: options.scheduling,
- // timeout options
- timeouts: options.timeouts,
- // proxy
- proxy: options.proxy,
-})
-
-module.exports = {
- normalizeOptions,
- cacheOptions,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js
deleted file mode 100644
index 6272e929e57bcf..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js
+++ /dev/null
@@ -1,88 +0,0 @@
-'use strict'
-
-const { HttpProxyAgent } = require('http-proxy-agent')
-const { HttpsProxyAgent } = require('https-proxy-agent')
-const { SocksProxyAgent } = require('socks-proxy-agent')
-const { LRUCache } = require('lru-cache')
-const { InvalidProxyProtocolError } = require('./errors.js')
-
-const PROXY_CACHE = new LRUCache({ max: 20 })
-
-const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
-
-const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
-
-const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
- key = key.toLowerCase()
- if (PROXY_ENV_KEYS.has(key)) {
- acc[key] = value
- }
- return acc
-}, {})
-
-const getProxyAgent = (url) => {
- url = new URL(url)
-
- const protocol = url.protocol.slice(0, -1)
- if (SOCKS_PROTOCOLS.has(protocol)) {
- return SocksProxyAgent
- }
- if (protocol === 'https' || protocol === 'http') {
- return [HttpProxyAgent, HttpsProxyAgent]
- }
-
- throw new InvalidProxyProtocolError(url)
-}
-
-const isNoProxy = (url, noProxy) => {
- if (typeof noProxy === 'string') {
- noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
- }
-
- if (!noProxy || !noProxy.length) {
- return false
- }
-
- const hostSegments = url.hostname.split('.').reverse()
-
- return noProxy.some((no) => {
- const noSegments = no.split('.').filter(Boolean).reverse()
- if (!noSegments.length) {
- return false
- }
-
- for (let i = 0; i < noSegments.length; i++) {
- if (hostSegments[i] !== noSegments[i]) {
- return false
- }
- }
-
- return true
- })
-}
-
-const getProxy = (url, { proxy, noProxy }) => {
- url = new URL(url)
-
- if (!proxy) {
- proxy = url.protocol === 'https:'
- ? PROXY_ENV.https_proxy
- : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
- }
-
- if (!noProxy) {
- noProxy = PROXY_ENV.no_proxy
- }
-
- if (!proxy || isNoProxy(url, noProxy)) {
- return null
- }
-
- return new URL(proxy)
-}
-
-module.exports = {
- getProxyAgent,
- getProxy,
- proxyCache: PROXY_CACHE,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json
deleted file mode 100644
index ef5b4e3228cc46..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "name": "@npmcli/agent",
- "version": "2.2.2",
- "description": "the http/https agent used by the npm cli",
- "main": "lib/index.js",
- "scripts": {
- "gencerts": "bash scripts/create-cert.sh",
- "test": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "author": "GitHub Inc.",
- "license": "ISC",
- "bugs": {
- "url": "https://github.com/npm/agent/issues"
- },
- "homepage": "https://github.com/npm/agent#readme",
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.3",
- "publish": "true"
- },
- "dependencies": {
- "agent-base": "^7.1.0",
- "http-proxy-agent": "^7.0.0",
- "https-proxy-agent": "^7.0.1",
- "lru-cache": "^10.0.1",
- "socks-proxy-agent": "^8.0.3"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.3",
- "minipass-fetch": "^3.0.3",
- "nock": "^13.2.7",
- "semver": "^7.5.4",
- "simple-socks": "^3.1.0",
- "tap": "^16.3.0"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/agent.git"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md
deleted file mode 100644
index 5fc208ff122e08..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-ISC License
-
-Copyright npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js
deleted file mode 100644
index cb5982f79077ac..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// given an input that may or may not be an object, return an object that has
-// a copy of every defined property listed in 'copy'. if the input is not an
-// object, assign it to the property named by 'wrap'
-const getOptions = (input, { copy, wrap }) => {
- const result = {}
-
- if (input && typeof input === 'object') {
- for (const prop of copy) {
- if (input[prop] !== undefined) {
- result[prop] = input[prop]
- }
- }
- } else {
- result[wrap] = input
- }
-
- return result
-}
-
-module.exports = getOptions
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js
deleted file mode 100644
index 4d13bc037359d7..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js
+++ /dev/null
@@ -1,9 +0,0 @@
-const semver = require('semver')
-
-const satisfies = (range) => {
- return semver.satisfies(process.version, range, { includePrerelease: true })
-}
-
-module.exports = {
- satisfies,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE
deleted file mode 100644
index 93546dfb7655bf..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2011-2017 JP Richardson
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
-(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
- merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
- ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js
deleted file mode 100644
index 1cd1e05d0c533d..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js
+++ /dev/null
@@ -1,129 +0,0 @@
-'use strict'
-const { inspect } = require('util')
-
-// adapted from node's internal/errors
-// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
-
-// close copy of node's internal SystemError class.
-class SystemError {
- constructor (code, prefix, context) {
- // XXX context.code is undefined in all constructors used in cp/polyfill
- // that may be a bug copied from node, maybe the constructor should use
- // `code` not `errno`? nodejs/node#41104
- let message = `${prefix}: ${context.syscall} returned ` +
- `${context.code} (${context.message})`
-
- if (context.path !== undefined) {
- message += ` ${context.path}`
- }
- if (context.dest !== undefined) {
- message += ` => ${context.dest}`
- }
-
- this.code = code
- Object.defineProperties(this, {
- name: {
- value: 'SystemError',
- enumerable: false,
- writable: true,
- configurable: true,
- },
- message: {
- value: message,
- enumerable: false,
- writable: true,
- configurable: true,
- },
- info: {
- value: context,
- enumerable: true,
- configurable: true,
- writable: false,
- },
- errno: {
- get () {
- return context.errno
- },
- set (value) {
- context.errno = value
- },
- enumerable: true,
- configurable: true,
- },
- syscall: {
- get () {
- return context.syscall
- },
- set (value) {
- context.syscall = value
- },
- enumerable: true,
- configurable: true,
- },
- })
-
- if (context.path !== undefined) {
- Object.defineProperty(this, 'path', {
- get () {
- return context.path
- },
- set (value) {
- context.path = value
- },
- enumerable: true,
- configurable: true,
- })
- }
-
- if (context.dest !== undefined) {
- Object.defineProperty(this, 'dest', {
- get () {
- return context.dest
- },
- set (value) {
- context.dest = value
- },
- enumerable: true,
- configurable: true,
- })
- }
- }
-
- toString () {
- return `${this.name} [${this.code}]: ${this.message}`
- }
-
- [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
- return inspect(this, {
- ...ctx,
- getters: true,
- customInspect: false,
- })
- }
-}
-
-function E (code, message) {
- module.exports[code] = class NodeError extends SystemError {
- constructor (ctx) {
- super(code, message, ctx)
- }
- }
-}
-
-E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
-E('ERR_FS_CP_EEXIST', 'Target already exists')
-E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
-E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
-E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
-E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
-E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
-E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
-E('ERR_FS_EISDIR', 'Path is a directory')
-
-module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
- constructor (name, expected, actual) {
- super()
- this.code = 'ERR_INVALID_ARG_TYPE'
- this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js
deleted file mode 100644
index 972ce7aa12abef..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const fs = require('fs/promises')
-const getOptions = require('../common/get-options.js')
-const node = require('../common/node.js')
-const polyfill = require('./polyfill.js')
-
-// node 16.7.0 added fs.cp
-const useNative = node.satisfies('>=16.7.0')
-
-const cp = async (src, dest, opts) => {
- const options = getOptions(opts, {
- copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
- })
-
- // the polyfill is tested separately from this module, no need to hack
- // process.version to try to trigger it just for coverage
- // istanbul ignore next
- return useNative
- ? fs.cp(src, dest, options)
- : polyfill(src, dest, options)
-}
-
-module.exports = cp
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js
deleted file mode 100644
index 80eb10de971918..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js
+++ /dev/null
@@ -1,428 +0,0 @@
-// this file is a modified version of the code in node 17.2.0
-// which is, in turn, a modified version of the fs-extra module on npm
-// node core changes:
-// - Use of the assert module has been replaced with core's error system.
-// - All code related to the glob dependency has been removed.
-// - Bring your own custom fs module is not currently supported.
-// - Some basic code cleanup.
-// changes here:
-// - remove all callback related code
-// - drop sync support
-// - change assertions back to non-internal methods (see options.js)
-// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
-'use strict'
-
-const {
- ERR_FS_CP_DIR_TO_NON_DIR,
- ERR_FS_CP_EEXIST,
- ERR_FS_CP_EINVAL,
- ERR_FS_CP_FIFO_PIPE,
- ERR_FS_CP_NON_DIR_TO_DIR,
- ERR_FS_CP_SOCKET,
- ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
- ERR_FS_CP_UNKNOWN,
- ERR_FS_EISDIR,
- ERR_INVALID_ARG_TYPE,
-} = require('./errors.js')
-const {
- constants: {
- errno: {
- EEXIST,
- EISDIR,
- EINVAL,
- ENOTDIR,
- },
- },
-} = require('os')
-const {
- chmod,
- copyFile,
- lstat,
- mkdir,
- readdir,
- readlink,
- stat,
- symlink,
- unlink,
- utimes,
-} = require('fs/promises')
-const {
- dirname,
- isAbsolute,
- join,
- parse,
- resolve,
- sep,
- toNamespacedPath,
-} = require('path')
-const { fileURLToPath } = require('url')
-
-const defaultOptions = {
- dereference: false,
- errorOnExist: false,
- filter: undefined,
- force: true,
- preserveTimestamps: false,
- recursive: false,
-}
-
-async function cp (src, dest, opts) {
- if (opts != null && typeof opts !== 'object') {
- throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
- }
- return cpFn(
- toNamespacedPath(getValidatedPath(src)),
- toNamespacedPath(getValidatedPath(dest)),
- { ...defaultOptions, ...opts })
-}
-
-function getValidatedPath (fileURLOrPath) {
- const path = fileURLOrPath != null && fileURLOrPath.href
- && fileURLOrPath.origin
- ? fileURLToPath(fileURLOrPath)
- : fileURLOrPath
- return path
-}
-
-async function cpFn (src, dest, opts) {
- // Warn about using preserveTimestamps on 32-bit node
- // istanbul ignore next
- if (opts.preserveTimestamps && process.arch === 'ia32') {
- const warning = 'Using the preserveTimestamps option in 32-bit ' +
- 'node is not recommended'
- process.emitWarning(warning, 'TimestampPrecisionWarning')
- }
- const stats = await checkPaths(src, dest, opts)
- const { srcStat, destStat } = stats
- await checkParentPaths(src, srcStat, dest)
- if (opts.filter) {
- return handleFilter(checkParentDir, destStat, src, dest, opts)
- }
- return checkParentDir(destStat, src, dest, opts)
-}
-
-async function checkPaths (src, dest, opts) {
- const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
- if (destStat) {
- if (areIdentical(srcStat, destStat)) {
- throw new ERR_FS_CP_EINVAL({
- message: 'src and dest cannot be the same',
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- if (srcStat.isDirectory() && !destStat.isDirectory()) {
- throw new ERR_FS_CP_DIR_TO_NON_DIR({
- message: `cannot overwrite directory ${src} ` +
- `with non-directory ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EISDIR,
- })
- }
- if (!srcStat.isDirectory() && destStat.isDirectory()) {
- throw new ERR_FS_CP_NON_DIR_TO_DIR({
- message: `cannot overwrite non-directory ${src} ` +
- `with directory ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: ENOTDIR,
- })
- }
- }
-
- if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${src} to a subdirectory of self ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return { srcStat, destStat }
-}
-
-function areIdentical (srcStat, destStat) {
- return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
- destStat.dev === srcStat.dev
-}
-
-function getStats (src, dest, opts) {
- const statFunc = opts.dereference ?
- (file) => stat(file, { bigint: true }) :
- (file) => lstat(file, { bigint: true })
- return Promise.all([
- statFunc(src),
- statFunc(dest).catch((err) => {
- // istanbul ignore next: unsure how to cover.
- if (err.code === 'ENOENT') {
- return null
- }
- // istanbul ignore next: unsure how to cover.
- throw err
- }),
- ])
-}
-
-async function checkParentDir (destStat, src, dest, opts) {
- const destParent = dirname(dest)
- const dirExists = await pathExists(destParent)
- if (dirExists) {
- return getStatsForCopy(destStat, src, dest, opts)
- }
- await mkdir(destParent, { recursive: true })
- return getStatsForCopy(destStat, src, dest, opts)
-}
-
-function pathExists (dest) {
- return stat(dest).then(
- () => true,
- // istanbul ignore next: not sure when this would occur
- (err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
-}
-
-// Recursively check if dest parent is a subdirectory of src.
-// It works for all file types including symlinks since it
-// checks the src and dest inodes. It starts from the deepest
-// parent and stops once it reaches the src parent or the root path.
-async function checkParentPaths (src, srcStat, dest) {
- const srcParent = resolve(dirname(src))
- const destParent = resolve(dirname(dest))
- if (destParent === srcParent || destParent === parse(destParent).root) {
- return
- }
- let destStat
- try {
- destStat = await stat(destParent, { bigint: true })
- } catch (err) {
- // istanbul ignore else: not sure when this would occur
- if (err.code === 'ENOENT') {
- return
- }
- // istanbul ignore next: not sure when this would occur
- throw err
- }
- if (areIdentical(srcStat, destStat)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${src} to a subdirectory of self ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return checkParentPaths(src, srcStat, destParent)
-}
-
-const normalizePathToArray = (path) =>
- resolve(path).split(sep).filter(Boolean)
-
-// Return true if dest is a subdir of src, otherwise false.
-// It only checks the path strings.
-function isSrcSubdir (src, dest) {
- const srcArr = normalizePathToArray(src)
- const destArr = normalizePathToArray(dest)
- return srcArr.every((cur, i) => destArr[i] === cur)
-}
-
-async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
- const include = await opts.filter(src, dest)
- if (include) {
- return onInclude(destStat, src, dest, opts, cb)
- }
-}
-
-function startCopy (destStat, src, dest, opts) {
- if (opts.filter) {
- return handleFilter(getStatsForCopy, destStat, src, dest, opts)
- }
- return getStatsForCopy(destStat, src, dest, opts)
-}
-
-async function getStatsForCopy (destStat, src, dest, opts) {
- const statFn = opts.dereference ? stat : lstat
- const srcStat = await statFn(src)
- // istanbul ignore else: can't portably test FIFO
- if (srcStat.isDirectory() && opts.recursive) {
- return onDir(srcStat, destStat, src, dest, opts)
- } else if (srcStat.isDirectory()) {
- throw new ERR_FS_EISDIR({
- message: `${src} is a directory (not copied)`,
- path: src,
- syscall: 'cp',
- errno: EINVAL,
- })
- } else if (srcStat.isFile() ||
- srcStat.isCharacterDevice() ||
- srcStat.isBlockDevice()) {
- return onFile(srcStat, destStat, src, dest, opts)
- } else if (srcStat.isSymbolicLink()) {
- return onLink(destStat, src, dest)
- } else if (srcStat.isSocket()) {
- throw new ERR_FS_CP_SOCKET({
- message: `cannot copy a socket file: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- } else if (srcStat.isFIFO()) {
- throw new ERR_FS_CP_FIFO_PIPE({
- message: `cannot copy a FIFO pipe: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- // istanbul ignore next: should be unreachable
- throw new ERR_FS_CP_UNKNOWN({
- message: `cannot copy an unknown file type: ${dest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
-}
-
-function onFile (srcStat, destStat, src, dest, opts) {
- if (!destStat) {
- return _copyFile(srcStat, src, dest, opts)
- }
- return mayCopyFile(srcStat, src, dest, opts)
-}
-
-async function mayCopyFile (srcStat, src, dest, opts) {
- if (opts.force) {
- await unlink(dest)
- return _copyFile(srcStat, src, dest, opts)
- } else if (opts.errorOnExist) {
- throw new ERR_FS_CP_EEXIST({
- message: `${dest} already exists`,
- path: dest,
- syscall: 'cp',
- errno: EEXIST,
- })
- }
-}
-
-async function _copyFile (srcStat, src, dest, opts) {
- await copyFile(src, dest)
- if (opts.preserveTimestamps) {
- return handleTimestampsAndMode(srcStat.mode, src, dest)
- }
- return setDestMode(dest, srcStat.mode)
-}
-
-async function handleTimestampsAndMode (srcMode, src, dest) {
- // Make sure the file is writable before setting the timestamp
- // otherwise open fails with EPERM when invoked with 'r+'
- // (through utimes call)
- if (fileIsNotWritable(srcMode)) {
- await makeFileWritable(dest, srcMode)
- return setDestTimestampsAndMode(srcMode, src, dest)
- }
- return setDestTimestampsAndMode(srcMode, src, dest)
-}
-
-function fileIsNotWritable (srcMode) {
- return (srcMode & 0o200) === 0
-}
-
-function makeFileWritable (dest, srcMode) {
- return setDestMode(dest, srcMode | 0o200)
-}
-
-async function setDestTimestampsAndMode (srcMode, src, dest) {
- await setDestTimestamps(src, dest)
- return setDestMode(dest, srcMode)
-}
-
-function setDestMode (dest, srcMode) {
- return chmod(dest, srcMode)
-}
-
-async function setDestTimestamps (src, dest) {
- // The initial srcStat.atime cannot be trusted
- // because it is modified by the read(2) system call
- // (See https://nodejs.org/api/fs.html#fs_stat_time_values)
- const updatedSrcStat = await stat(src)
- return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
-}
-
-function onDir (srcStat, destStat, src, dest, opts) {
- if (!destStat) {
- return mkDirAndCopy(srcStat.mode, src, dest, opts)
- }
- return copyDir(src, dest, opts)
-}
-
-async function mkDirAndCopy (srcMode, src, dest, opts) {
- await mkdir(dest)
- await copyDir(src, dest, opts)
- return setDestMode(dest, srcMode)
-}
-
-async function copyDir (src, dest, opts) {
- const dir = await readdir(src)
- for (let i = 0; i < dir.length; i++) {
- const item = dir[i]
- const srcItem = join(src, item)
- const destItem = join(dest, item)
- const { destStat } = await checkPaths(srcItem, destItem, opts)
- await startCopy(destStat, srcItem, destItem, opts)
- }
-}
-
-async function onLink (destStat, src, dest) {
- let resolvedSrc = await readlink(src)
- if (!isAbsolute(resolvedSrc)) {
- resolvedSrc = resolve(dirname(src), resolvedSrc)
- }
- if (!destStat) {
- return symlink(resolvedSrc, dest)
- }
- let resolvedDest
- try {
- resolvedDest = await readlink(dest)
- } catch (err) {
- // Dest exists and is a regular file or directory,
- // Windows may throw UNKNOWN error. If dest already exists,
- // fs throws error anyway, so no need to guard against it here.
- // istanbul ignore next: can only test on windows
- if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
- return symlink(resolvedSrc, dest)
- }
- // istanbul ignore next: should not be possible
- throw err
- }
- if (!isAbsolute(resolvedDest)) {
- resolvedDest = resolve(dirname(dest), resolvedDest)
- }
- if (isSrcSubdir(resolvedSrc, resolvedDest)) {
- throw new ERR_FS_CP_EINVAL({
- message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
- `${resolvedDest}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- // Do not copy if src is a subdir of dest since unlinking
- // dest in this case would result in removing src contents
- // and therefore a broken symlink would be created.
- const srcStat = await stat(src)
- if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
- throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
- message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
- path: dest,
- syscall: 'cp',
- errno: EINVAL,
- })
- }
- return copyLink(resolvedSrc, dest)
-}
-
-async function copyLink (resolvedSrc, dest) {
- await unlink(dest)
- return symlink(resolvedSrc, dest)
-}
-
-module.exports = cp
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js
deleted file mode 100644
index 81c746304cc428..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js
+++ /dev/null
@@ -1,13 +0,0 @@
-'use strict'
-
-const cp = require('./cp/index.js')
-const withTempDir = require('./with-temp-dir.js')
-const readdirScoped = require('./readdir-scoped.js')
-const moveFile = require('./move-file.js')
-
-module.exports = {
- cp,
- withTempDir,
- readdirScoped,
- moveFile,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js
deleted file mode 100644
index d56e06d384659a..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const { dirname, join, resolve, relative, isAbsolute } = require('path')
-const fs = require('fs/promises')
-
-const pathExists = async path => {
- try {
- await fs.access(path)
- return true
- } catch (er) {
- return er.code !== 'ENOENT'
- }
-}
-
-const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
- if (!source || !destination) {
- throw new TypeError('`source` and `destination` file required')
- }
-
- options = {
- overwrite: true,
- ...options,
- }
-
- if (!options.overwrite && await pathExists(destination)) {
- throw new Error(`The destination file exists: ${destination}`)
- }
-
- await fs.mkdir(dirname(destination), { recursive: true })
-
- try {
- await fs.rename(source, destination)
- } catch (error) {
- if (error.code === 'EXDEV' || error.code === 'EPERM') {
- const sourceStat = await fs.lstat(source)
- if (sourceStat.isDirectory()) {
- const files = await fs.readdir(source)
- await Promise.all(files.map((file) =>
- moveFile(join(source, file), join(destination, file), options, false, symlinks)
- ))
- } else if (sourceStat.isSymbolicLink()) {
- symlinks.push({ source, destination })
- } else {
- await fs.copyFile(source, destination)
- }
- } else {
- throw error
- }
- }
-
- if (root) {
- await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => {
- let target = await fs.readlink(symSource)
- // junction symlinks in windows will be absolute paths, so we need to
- // make sure they point to the symlink destination
- if (isAbsolute(target)) {
- target = resolve(symDestination, relative(symSource, target))
- }
- // try to determine what the actual file is so we can create the correct
- // type of symlink in windows
- let targetStat = 'file'
- try {
- targetStat = await fs.stat(resolve(dirname(symSource), target))
- if (targetStat.isDirectory()) {
- targetStat = 'junction'
- }
- } catch {
- // targetStat remains 'file'
- }
- await fs.symlink(
- target,
- symDestination,
- targetStat
- )
- }))
- await fs.rm(source, { recursive: true, force: true })
- }
-}
-
-module.exports = moveFile
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js
deleted file mode 100644
index cd601dfbe7486b..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const { readdir } = require('fs/promises')
-const { join } = require('path')
-
-const readdirScoped = async (dir) => {
- const results = []
-
- for (const item of await readdir(dir)) {
- if (item.startsWith('@')) {
- for (const scopedItem of await readdir(join(dir, item))) {
- results.push(join(item, scopedItem))
- }
- } else {
- results.push(item)
- }
- }
-
- return results
-}
-
-module.exports = readdirScoped
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js
deleted file mode 100644
index 0738ac4f29e1be..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const { join, sep } = require('path')
-
-const getOptions = require('./common/get-options.js')
-const { mkdir, mkdtemp, rm } = require('fs/promises')
-
-// create a temp directory, ensure its permissions match its parent, then call
-// the supplied function passing it the path to the directory. clean up after
-// the function finishes, whether it throws or not
-const withTempDir = async (root, fn, opts) => {
- const options = getOptions(opts, {
- copy: ['tmpPrefix'],
- })
- // create the directory
- await mkdir(root, { recursive: true })
-
- const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''))
- let err
- let result
-
- try {
- result = await fn(target)
- } catch (_err) {
- err = _err
- }
-
- try {
- await rm(target, { force: true, recursive: true })
- } catch {
- // ignore errors
- }
-
- if (err) {
- throw err
- }
-
- return result
-}
-
-module.exports = withTempDir
diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json
deleted file mode 100644
index 5261a11b78000e..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "name": "@npmcli/fs",
- "version": "3.1.1",
- "description": "filesystem utilities for the npm cli",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "snap": "tap",
- "test": "tap",
- "npmclilint": "npmcli-lint",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "lintfix": "npm run lint -- --fix",
- "posttest": "npm run lint",
- "postsnap": "npm run lintfix --",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fs.git"
- },
- "keywords": [
- "npm",
- "oss"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.1"
- },
- "dependencies": {
- "semver": "^7.3.5"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/@tufjs/models/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/LICENSE
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/LICENSE
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/LICENSE
diff --git a/deps/npm/node_modules/@tufjs/models/dist/base.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
similarity index 80%
rename from deps/npm/node_modules/@tufjs/models/dist/base.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
index 259f6799c13a0d..85e45d8fc1151e 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/base.js
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
@@ -3,7 +3,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0;
+exports.Signed = exports.MetadataKind = void 0;
+exports.isMetadataKind = isMetadataKind;
const util_1 = __importDefault(require("util"));
const error_1 = require("./error");
const utils_1 = require("./utils");
@@ -19,7 +20,6 @@ function isMetadataKind(value) {
return (typeof value === 'string' &&
Object.values(MetadataKind).includes(value));
}
-exports.isMetadataKind = isMetadataKind;
/***
* A base class for the signed part of TUF metadata.
*
@@ -39,8 +39,8 @@ class Signed {
if (specList[0] != SPECIFICATION_VERSION[0]) {
throw new error_1.ValueError('Unsupported specVersion');
}
- this.expires = options.expires || new Date().toISOString();
- this.version = options.version || 1;
+ this.expires = options.expires;
+ this.version = options.version;
this.unrecognizedFields = options.unrecognizedFields || {};
}
equals(other) {
@@ -60,13 +60,22 @@ class Signed {
}
static commonFieldsFromJSON(data) {
const { spec_version, expires, version, ...rest } = data;
- if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) {
+ if (!utils_1.guard.isDefined(spec_version)) {
+ throw new error_1.ValueError('spec_version is not defined');
+ }
+ else if (typeof spec_version !== 'string') {
throw new TypeError('spec_version must be a string');
}
- if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) {
+ if (!utils_1.guard.isDefined(expires)) {
+ throw new error_1.ValueError('expires is not defined');
+ }
+ else if (!(typeof expires === 'string')) {
throw new TypeError('expires must be a string');
}
- if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) {
+ if (!utils_1.guard.isDefined(version)) {
+ throw new error_1.ValueError('version is not defined');
+ }
+ else if (!(typeof version === 'number')) {
throw new TypeError('version must be a number');
}
return {
diff --git a/deps/npm/node_modules/@tufjs/models/dist/delegations.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/delegations.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/error.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/error.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/file.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/file.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/index.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/index.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/key.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/key.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/metadata.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
similarity index 91%
rename from deps/npm/node_modules/@tufjs/models/dist/metadata.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
index 9668b6f14fa701..389d2504e0b53d 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/metadata.js
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
@@ -125,6 +125,9 @@ class Metadata {
if (type !== signed._type) {
throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
}
+ if (!utils_1.guard.isObjectArray(signatures)) {
+ throw new TypeError('signatures is not an array');
+ }
let signedObj;
switch (type) {
case base_1.MetadataKind.Root:
@@ -142,17 +145,16 @@ class Metadata {
default:
throw new TypeError('invalid metadata type');
}
- const sigMap = signaturesFromJSON(signatures);
+ const sigMap = {};
+ // Ensure that each signature is unique
+ signatures.forEach((sigData) => {
+ const sig = signature_1.Signature.fromJSON(sigData);
+ if (sigMap[sig.keyID]) {
+ throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`);
+ }
+ sigMap[sig.keyID] = sig;
+ });
return new Metadata(signedObj, sigMap, rest);
}
}
exports.Metadata = Metadata;
-function signaturesFromJSON(data) {
- if (!utils_1.guard.isObjectArray(data)) {
- throw new TypeError('signatures is not an array');
- }
- return data.reduce((acc, sigData) => {
- const signature = signature_1.Signature.fromJSON(sigData);
- return { ...acc, [signature.keyID]: signature };
- }, {});
-}
diff --git a/deps/npm/node_modules/@tufjs/models/dist/role.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/role.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/root.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/root.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/signature.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/signature.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/snapshot.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/snapshot.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/targets.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/targets.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/timestamp.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/timestamp.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/guard.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
similarity index 88%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/guard.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
index efe558852303ce..911e8475986bbc 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/utils/guard.js
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
@@ -1,33 +1,32 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0;
+exports.isDefined = isDefined;
+exports.isObject = isObject;
+exports.isStringArray = isStringArray;
+exports.isObjectArray = isObjectArray;
+exports.isStringRecord = isStringRecord;
+exports.isObjectRecord = isObjectRecord;
function isDefined(val) {
return val !== undefined;
}
-exports.isDefined = isDefined;
function isObject(value) {
return typeof value === 'object' && value !== null;
}
-exports.isObject = isObject;
function isStringArray(value) {
return Array.isArray(value) && value.every((v) => typeof v === 'string');
}
-exports.isStringArray = isStringArray;
function isObjectArray(value) {
return Array.isArray(value) && value.every(isObject);
}
-exports.isObjectArray = isObjectArray;
function isStringRecord(value) {
return (typeof value === 'object' &&
value !== null &&
Object.keys(value).every((k) => typeof k === 'string') &&
Object.values(value).every((v) => typeof v === 'string'));
}
-exports.isStringRecord = isStringRecord;
function isObjectRecord(value) {
return (typeof value === 'object' &&
value !== null &&
Object.keys(value).every((k) => typeof k === 'string') &&
Object.values(value).every((v) => typeof v === 'object' && v !== null));
}
-exports.isObjectRecord = isObjectRecord;
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/index.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/index.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/key.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
similarity index 99%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/key.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
index 1f795ba1a2733f..3c3ec07f1425a7 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/utils/key.js
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.getPublicKey = void 0;
+exports.getPublicKey = getPublicKey;
const crypto_1 = __importDefault(require("crypto"));
const error_1 = require("../error");
const oid_1 = require("./oid");
@@ -28,7 +28,6 @@ function getPublicKey(keyInfo) {
throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
}
}
-exports.getPublicKey = getPublicKey;
function getRSAPublicKey(keyInfo) {
// Only support PEM-encoded RSA keys
if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/oid.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
similarity index 96%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/oid.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
index e1bb7af5e54fbf..00b29c3030d1ec 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/utils/oid.js
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.encodeOIDString = void 0;
+exports.encodeOIDString = encodeOIDString;
const ANS1_TAG_OID = 0x06;
function encodeOIDString(oid) {
const parts = oid.split('.');
@@ -14,7 +14,6 @@ function encodeOIDString(oid) {
const der = Buffer.from([first, ...rest]);
return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
}
-exports.encodeOIDString = encodeOIDString;
function encodeVariableLengthInteger(value) {
const bytes = [];
let mask = 0x00;
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/types.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/types.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js
diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/verify.js b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js
similarity index 100%
rename from deps/npm/node_modules/@tufjs/models/dist/utils/verify.js
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js
diff --git a/deps/npm/node_modules/@tufjs/models/package.json b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/package.json
similarity index 90%
rename from deps/npm/node_modules/@tufjs/models/package.json
rename to deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/package.json
index be581591a0f3a3..8e5132ddf1079c 100644
--- a/deps/npm/node_modules/@tufjs/models/package.json
+++ b/deps/npm/node_modules/tuf-js/node_modules/@tufjs/models/package.json
@@ -1,6 +1,6 @@
{
"name": "@tufjs/models",
- "version": "2.0.1",
+ "version": "3.0.1",
"description": "TUF metadata models",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -29,9 +29,9 @@
"homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
"dependencies": {
"@tufjs/canonical-json": "2.0.0",
- "minimatch": "^9.0.4"
+ "minimatch": "^9.0.5"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d932..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f26..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
- const sri = ssri.parse(integrity, { single: true })
- // contentPath is the *strongest* algo given
- return path.join(
- contentDir(cache),
- sri.algorithm,
- ...hashToSegments(sri.hexDigest())
- )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
- return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 5f6192c3cec566..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,165 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
- const { size } = opts
- const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
- // get size
- const stat = size ? { size } : await fs.stat(cpath)
- return { stat, cpath, sri }
- })
-
- if (stat.size > MAX_SINGLE_READ_SIZE) {
- return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
- }
-
- const data = await fs.readFile(cpath, { encoding: null })
-
- if (stat.size !== data.length) {
- throw sizeError(stat.size, data.length)
- }
-
- if (!ssri.checkData(data, sri)) {
- throw integrityError(sri, cpath)
- }
-
- return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
- stream.push(
- new fsm.ReadStream(cpath, {
- size,
- readSize: MAX_SINGLE_READ_SIZE,
- }),
- ssri.integrityStream({
- integrity: sri,
- size,
- })
- )
- return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
- const { size } = opts
- const stream = new Pipeline()
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
- // get size
- const stat = size ? { size } : await fs.stat(cpath)
- return { stat, cpath, sri }
- })
-
- return readPipeline(cpath, stat.size, sri, stream)
- }).catch(err => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
- return withContentSri(cache, integrity, (cpath) => {
- return fs.copyFile(cpath, dest)
- })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
- if (!integrity) {
- return false
- }
-
- try {
- return await withContentSri(cache, integrity, async (cpath, sri) => {
- const stat = await fs.stat(cpath)
- return { size: stat.size, sri, stat }
- })
- } catch (err) {
- if (err.code === 'ENOENT') {
- return false
- }
-
- if (err.code === 'EPERM') {
- /* istanbul ignore else */
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- }
-}
-
-async function withContentSri (cache, integrity, fn) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
-
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- // Can't use race here because a generic error can happen before
- // a ENOENT error, and can happen before a valid result
- const results = await Promise.all(digests.map(async (meta) => {
- try {
- return await withContentSri(cache, meta, fn)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- { code: 'ENOENT' }
- )
- }
- return err
- }
- }))
- // Return the first non error if it is found
- const result = results.find((r) => !(r instanceof Error))
- if (result) {
- return result
- }
-
- // Throw the No matching content found error
- const enoentError = results.find((r) => r.code === 'ENOENT')
- if (enoentError) {
- throw enoentError
- }
-
- // Throw generic error
- throw results.find((r) => r instanceof Error)
- }
-}
-
-function sizeError (expected, found) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- const err = new Error(`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb25..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
- const content = await hasContent(cache, integrity)
- // ~pretty~ sure we can't end up with a content lacking sri, but be safe
- if (content && content.sri) {
- await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
- return true
- } else {
- return false
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index e7187abca8788a..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
- const { algorithms, size, integrity } = opts
-
- if (typeof size === 'number' && data.length !== size) {
- throw sizeError(size, data.length)
- }
-
- const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
- if (integrity && !ssri.checkData(data, integrity, opts)) {
- throw checksumError(integrity, sri)
- }
-
- for (const algo in sri) {
- const tmp = await makeTmp(cache, opts)
- const hash = sri[algo].toString()
- try {
- await fs.writeFile(tmp.target, data, { flag: 'wx' })
- await moveToDestination(tmp, cache, hash, opts)
- } finally {
- if (!tmp.moved) {
- await fs.rm(tmp.target, { recursive: true, force: true })
- }
- }
- }
- return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
- constructor (cache, opts) {
- super()
- this.opts = opts
- this.cache = cache
- this.inputStream = new Minipass()
- this.inputStream.on('error', er => this.emit('error', er))
- this.inputStream.on('drain', () => this.emit('drain'))
- this.handleContentP = null
- }
-
- write (chunk, encoding, cb) {
- if (!this.handleContentP) {
- this.handleContentP = handleContent(
- this.inputStream,
- this.cache,
- this.opts
- )
- this.handleContentP.catch(error => this.emit('error', error))
- }
- return this.inputStream.write(chunk, encoding, cb)
- }
-
- flush (cb) {
- this.inputStream.end(() => {
- if (!this.handleContentP) {
- const e = new Error('Cache input stream was empty')
- e.code = 'ENODATA'
- // empty streams are probably emitting end right away.
- // defer this one tick by rejecting a promise on it.
- return Promise.reject(e).catch(cb)
- }
- // eslint-disable-next-line promise/catch-or-return
- this.handleContentP.then(
- (res) => {
- res.integrity && this.emit('integrity', res.integrity)
- // eslint-disable-next-line promise/always-return
- res.size !== null && this.emit('size', res.size)
- cb()
- },
- (er) => cb(er)
- )
- })
- }
-}
-
-function writeStream (cache, opts = {}) {
- return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
- const tmp = await makeTmp(cache, opts)
- try {
- const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
- await moveToDestination(
- tmp,
- cache,
- res.integrity,
- opts
- )
- return res
- } finally {
- if (!tmp.moved) {
- await fs.rm(tmp.target, { recursive: true, force: true })
- }
- }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
- const outStream = new fsm.WriteStream(tmpTarget, {
- flags: 'wx',
- })
-
- if (opts.integrityEmitter) {
- // we need to create these all simultaneously since they can fire in any order
- const [integrity, size] = await Promise.all([
- events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
- events.once(opts.integrityEmitter, 'size').then(res => res[0]),
- new Pipeline(inputStream, outStream).promise(),
- ])
- return { integrity, size }
- }
-
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size,
- })
- hashStream.on('integrity', i => {
- integrity = i
- })
- hashStream.on('size', s => {
- size = s
- })
-
- const pipeline = new Pipeline(inputStream, hashStream, outStream)
- await pipeline.promise()
- return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
- return {
- target: tmpTarget,
- moved: false,
- }
-}
-
-async function moveToDestination (tmp, cache, sri) {
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
- if (moveOperations.has(destination)) {
- return moveOperations.get(destination)
- }
- moveOperations.set(
- destination,
- fs.mkdir(destDir, { recursive: true })
- .then(async () => {
- await moveFile(tmp.target, destination, { overwrite: false })
- tmp.moved = true
- return tmp.moved
- })
- .catch(err => {
- if (!err.message.startsWith('The destination file exists')) {
- throw Object.assign(err, { code: 'EEXIST' })
- }
- }).finally(() => {
- moveOperations.delete(destination)
- })
-
- )
- return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- const err = new Error(`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 89c28f2f257d48..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,336 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
- appendFile,
- mkdir,
- readFile,
- readdir,
- rm,
- writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-const pMap = require('p-map')
-const lsStreamConcurrency = 5
-
-module.exports.NotFoundError = class NotFoundError extends Error {
- constructor (cache, key) {
- super(`No cache entry for ${key} found in ${cache}`)
- this.code = 'ENOENT'
- this.cache = cache
- this.key = key
- }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
- const bucket = bucketPath(cache, key)
- const entries = await bucketEntries(bucket)
- const newEntries = []
- // we loop backwards because the bottom-most result is the newest
- // since we add new entries with appendFile
- for (let i = entries.length - 1; i >= 0; --i) {
- const entry = entries[i]
- // a null integrity could mean either a delete was appended
- // or the user has simply stored an index that does not map
- // to any content. we determine if the user wants to keep the
- // null integrity based on the validateEntry function passed in options.
- // if the integrity is null and no validateEntry is provided, we break
- // as we consider the null integrity to be a deletion of everything
- // that came before it.
- if (entry.integrity === null && !opts.validateEntry) {
- break
- }
-
- // if this entry is valid, and it is either the first entry or
- // the newEntries array doesn't already include an entry that
- // matches this one based on the provided matchFn, then we add
- // it to the beginning of our list
- if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
- (newEntries.length === 0 ||
- !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
- newEntries.unshift(entry)
- }
- }
-
- const newIndex = '\n' + newEntries.map((entry) => {
- const stringified = JSON.stringify(entry)
- const hash = hashEntry(stringified)
- return `${hash}\t${stringified}`
- }).join('\n')
-
- const setup = async () => {
- const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- await mkdir(path.dirname(target), { recursive: true })
- return {
- target,
- moved: false,
- }
- }
-
- const teardown = async (tmp) => {
- if (!tmp.moved) {
- return rm(tmp.target, { recursive: true, force: true })
- }
- }
-
- const write = async (tmp) => {
- await writeFile(tmp.target, newIndex, { flag: 'wx' })
- await mkdir(path.dirname(bucket), { recursive: true })
- // we use @npmcli/move-file directly here because we
- // want to overwrite the existing file
- await moveFile(tmp.target, bucket)
- tmp.moved = true
- }
-
- // write the file atomically
- const tmp = await setup()
- try {
- await write(tmp)
- } finally {
- await teardown(tmp)
- }
-
- // we reverse the list we generated such that the newest
- // entries come first in order to make looping through them easier
- // the true passed to formatEntry tells it to keep null
- // integrity values, if they made it this far it's because
- // validateEntry returned true, and as such we should return it
- return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
- const { metadata, size, time } = opts
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: time || Date.now(),
- size,
- metadata,
- }
- try {
- await mkdir(path.dirname(bucket), { recursive: true })
- const stringified = JSON.stringify(entry)
- // NOTE - Cleverness ahoy!
- //
- // This works because it's tremendously unlikely for an entry to corrupt
- // another while still preserving the string length of the JSON in
- // question. So, we just slap the length in there and verify it on read.
- //
- // Thanks to @isaacs for the whiteboarding session that ended up with
- // this.
- await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return undefined
- }
-
- throw err
- }
- return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
- const bucket = bucketPath(cache, key)
- try {
- const entries = await bucketEntries(bucket)
- return entries.reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
- if (!opts.removeFully) {
- return insert(cache, key, null, opts)
- }
-
- const bucket = bucketPath(cache, key)
- return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
- const indexDir = bucketDir(cache)
- const stream = new Minipass({ objectMode: true })
-
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const buckets = await readdirOrEmpty(indexDir)
- await pMap(buckets, async (bucket) => {
- const bucketPath = path.join(indexDir, bucket)
- const subbuckets = await readdirOrEmpty(bucketPath)
- await pMap(subbuckets, async (subbucket) => {
- const subbucketPath = path.join(bucketPath, subbucket)
-
- // "/cachename//./*"
- const subbucketEntries = await readdirOrEmpty(subbucketPath)
- await pMap(subbucketEntries, async (entry) => {
- const entryPath = path.join(subbucketPath, entry)
- try {
- const entries = await bucketEntries(entryPath)
- // using a Map here prevents duplicate keys from showing up
- // twice, I guess?
- const reduced = entries.reduce((acc, entry) => {
- acc.set(entry.key, entry)
- return acc
- }, new Map())
- // reduced is a map of key => entry
- for (const entry of reduced.values()) {
- const formatted = formatEntry(cache, entry)
- if (formatted) {
- stream.write(formatted)
- }
- }
- } catch (err) {
- if (err.code === 'ENOENT') {
- return undefined
- }
- throw err
- }
- },
- { concurrency: lsStreamConcurrency })
- },
- { concurrency: lsStreamConcurrency })
- },
- { concurrency: lsStreamConcurrency })
- stream.end()
- return stream
- }).catch(err => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
- const entries = await lsStream(cache).collect()
- return entries.reduce((acc, xs) => {
- acc[xs.key] = xs
- return acc
- }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
- const data = await readFile(bucket, 'utf8')
- return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data) {
- const entries = []
- data.split('\n').forEach((entry) => {
- if (!entry) {
- return
- }
-
- const pieces = entry.split('\t')
- if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
- // Hash is no good! Corruption or malice? Doesn't matter!
- // EJECT EJECT
- return
- }
- let obj
- try {
- obj = JSON.parse(pieces[1])
- } catch (_) {
- // eslint-ignore-next-line no-empty-block
- }
- // coverage disabled here, no need to test with an entry that parses to something falsey
- // istanbul ignore else
- if (obj) {
- entries.push(obj)
- }
- })
- return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
- return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
- const hashed = hashKey(key)
- return path.join.apply(
- path,
- [bucketDir(cache)].concat(hashToSegments(hashed))
- )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
- return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
- return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
- return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
- // Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity && !keepAll) {
- return null
- }
-
- return {
- key: entry.key,
- integrity: entry.integrity,
- path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
- size: entry.size,
- time: entry.time,
- metadata: entry.metadata,
- }
-}
-
-function readdirOrEmpty (dir) {
- return readdir(dir).catch((err) => {
- if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
- return []
- }
-
- throw err
- })
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaaa..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
- const { integrity, memoize, size } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size,
- }
- }
-
- const entry = await index.find(cache, key, opts)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
- const data = await read(cache, entry.integrity, { integrity, size })
- if (memoize) {
- memo.put(cache, entry, data, opts)
- }
-
- return {
- data,
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
- const { integrity, memoize, size } = opts
- const memoized = memo.get.byDigest(cache, key, opts)
- if (memoized && memoize !== false) {
- return memoized
- }
-
- const res = await read(cache, key, { integrity, size })
- if (memoize) {
- memo.put.byDigest(cache, key, res, opts)
- }
- return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
- const stream = new Minipass()
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'integrity' && cb(memoized.entry.integrity)
- ev === 'size' && cb(memoized.entry.size)
- })
- stream.end(memoized.data)
- return stream
-}
-
-function getStream (cache, key, opts = {}) {
- const { memoize, size } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return getMemoizedStream(memoized)
- }
-
- const stream = new Pipeline()
- // Set all this up to run on the stream and then just return the stream
- Promise.resolve().then(async () => {
- const entry = await index.find(cache, key)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
-
- stream.emit('metadata', entry.metadata)
- stream.emit('integrity', entry.integrity)
- stream.emit('size', entry.size)
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(entry.metadata)
- ev === 'integrity' && cb(entry.integrity)
- ev === 'size' && cb(entry.size)
- })
-
- const src = read.readStream(
- cache,
- entry.integrity,
- { ...opts, size: typeof size !== 'number' ? entry.size : size }
- )
-
- if (memoize) {
- const memoStream = new Collect.PassThrough()
- memoStream.on('collect', data => memo.put(cache, entry, data, opts))
- stream.unshift(memoStream)
- }
- stream.unshift(src)
- return stream
- }).catch((err) => stream.emit('error', err))
-
- return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
- const { memoize } = opts
- const memoized = memo.get.byDigest(cache, integrity, opts)
- if (memoized && memoize !== false) {
- const stream = new Minipass()
- stream.end(memoized)
- return stream
- } else {
- const stream = read.readStream(cache, integrity, opts)
- if (!memoize) {
- return stream
- }
-
- const memoStream = new Collect.PassThrough()
- memoStream.on('collect', data => memo.put.byDigest(
- cache,
- integrity,
- data,
- opts
- ))
- return new Pipeline(stream, memoStream)
- }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
- const { memoize } = opts
- const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
- return Promise.resolve(memoized.entry)
- } else {
- return index.find(cache, key)
- }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
- const entry = await index.find(cache, key, opts)
- if (!entry) {
- throw new index.NotFoundError(cache, key)
- }
- await read.copy(cache, entry.integrity, dest, opts)
- return {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
- await read.copy(cache, key, dest, opts)
- return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271b..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 2ecc60912e4563..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-
-const MEMOIZED = new LRUCache({
- max: 500,
- maxSize: 50 * 1024 * 1024, // 50MB
- ttl: 3 * 60 * 1000, // 3 minutes
- sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
- const old = {}
- MEMOIZED.forEach((v, k) => {
- old[k] = v
- })
- MEMOIZED.clear()
- return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
- pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
- putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
- pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
- return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
- return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
- constructor (obj) {
- this.obj = obj
- }
-
- get (key) {
- return this.obj[key]
- }
-
- set (key, val) {
- this.obj[key] = val
- }
-}
-
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- return new ObjProxy(opts.memoize)
- } else {
- return MEMOIZED
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec5..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
- algorithms: ['sha512'],
- ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
- const { memoize } = opts
- opts = putOpts(opts)
- const res = await write(cache, data, opts)
- const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
- if (memoize) {
- memo.put(cache, entry, data, opts)
- }
-
- return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
- const { memoize } = opts
- opts = putOpts(opts)
- let integrity
- let size
- let error
-
- let memoData
- const pipeline = new Pipeline()
- // first item in the pipeline is the memoizer, because we need
- // that to end first and get the collected data.
- if (memoize) {
- const memoizer = new PassThrough().on('collect', data => {
- memoData = data
- })
- pipeline.push(memoizer)
- }
-
- // contentStream is a write-only, not a passthrough
- // no data comes out of it.
- const contentStream = write.stream(cache, opts)
- .on('integrity', (int) => {
- integrity = int
- })
- .on('size', (s) => {
- size = s
- })
- .on('error', (err) => {
- error = err
- })
-
- pipeline.push(contentStream)
-
- // last but not least, we write the index and emit hash and size,
- // and memoize if we're doing that
- pipeline.push(new Flush({
- async flush () {
- if (!error) {
- const entry = await index.insert(cache, key, integrity, { ...opts, size })
- if (memoize && memoData) {
- memo.put(cache, entry, memoData, opts)
- }
- pipeline.emit('integrity', integrity)
- pipeline.emit('size', size)
- }
- },
- }))
-
- return pipeline
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf2430..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
- memo.clearMemoized()
- return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
- memo.clearMemoized()
- return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
- memo.clearMemoized()
- const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
- return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429f..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b5038088..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
- return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebeb..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
- const { tmpPrefix } = opts
- const tmpDir = path.join(cache, 'tmp')
- await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
- // do not use path.join(), it drops the trailing / if tmpPrefix is unset
- const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
- return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
- if (!cb) {
- cb = opts
- opts = {}
- }
- return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js
deleted file mode 100644
index d7423da1295b68..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
- mkdir,
- readFile,
- rm,
- stat,
- truncate,
- writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
- Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
- concurrency: 20,
- log: { silly () {} },
- ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
- opts = verifyOpts(opts)
- opts.log.silly('verify', 'verifying cache at', cache)
-
- const steps = [
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime,
- ]
-
- const stats = {}
- for (const step of steps) {
- const label = step.name
- const start = new Date()
- const s = await step(cache, opts)
- if (s) {
- Object.keys(s).forEach((k) => {
- stats[k] = s[k]
- })
- }
- const end = new Date()
- if (!stats.runTime) {
- stats.runTime = {}
- }
- stats.runTime[label] = end - start
- }
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log.silly(
- 'verify',
- 'verification finished for',
- cache,
- 'in',
- `${stats.runTime.total}ms`
- )
- return stats
-}
-
-async function markStartTime () {
- return { startTime: new Date() }
-}
-
-async function markEndTime () {
- return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
- opts.log.silly('verify', 'fixing cache permissions')
- await mkdir(cache, { recursive: true })
- return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
- opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', (entry) => {
- if (opts.filter && !opts.filter(entry)) {
- return
- }
-
- // integrity is stringified, re-parse it so we can get each hash
- const integrity = ssri.parse(entry.integrity)
- for (const algo in integrity) {
- liveContent.add(integrity[algo].toString())
- }
- })
- await new Promise((resolve, reject) => {
- indexStream.on('end', resolve).on('error', reject)
- })
- const contentDir = contentPath.contentDir(cache)
- const files = await glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true,
- })
- const stats = {
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0,
- }
- await pMap(
- files,
- async (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- const info = await verifyContent(f, integrity)
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- } else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- const s = await stat(f)
- await rm(f, { recursive: true, force: true })
- stats.reclaimedSize += s.size
- }
- return stats
- },
- { concurrency: opts.concurrency }
- )
- return stats
-}
-
-async function verifyContent (filepath, sri) {
- const contentInfo = {}
- try {
- const { size } = await stat(filepath)
- contentInfo.size = size
- contentInfo.valid = true
- await ssri.checkStream(new fsm.ReadStream(filepath), sri)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return { size: 0, valid: false }
- }
- if (err.code !== 'EINTEGRITY') {
- throw err
- }
-
- await rm(filepath, { recursive: true, force: true })
- contentInfo.valid = false
- }
- return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
- opts.log.silly('verify', 'rebuilding index')
- const entries = await index.ls(cache)
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0,
- }
- const buckets = {}
- for (const k in entries) {
- /* istanbul ignore else */
- if (hasOwnProperty(entries, k)) {
- const hashed = index.hashKey(k)
- const entry = entries[k]
- const excluded = opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index.bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index.bucketPath(cache, k)
- }
- }
- }
- await pMap(
- Object.keys(buckets),
- (key) => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- },
- { concurrency: opts.concurrency }
- )
- return stats
-}
-
-async function rebuildBucket (cache, bucket, stats) {
- await truncate(bucket._path)
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- for (const entry of bucket) {
- const content = contentPath(cache, entry.integrity)
- try {
- await stat(content)
- await index.insert(cache, entry.key, entry.integrity, {
- metadata: entry.metadata,
- size: entry.size,
- time: entry.time,
- })
- stats.totalEntries++
- } catch (err) {
- if (err.code === 'ENOENT') {
- stats.rejectedEntries++
- stats.missingContent++
- } else {
- throw err
- }
- }
- }
-}
-
-function cleanTmp (cache, opts) {
- opts.log.silly('verify', 'cleaning tmp directory')
- return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log.silly('verify', 'writing verifile to ' + verifile)
- return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
- const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
- return new Date(+data)
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json b/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json
deleted file mode 100644
index 6e6219158ed759..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "name": "cacache",
- "version": "18.0.4",
- "cache-version": {
- "content": "2",
- "index": "5"
- },
- "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "test": "tap",
- "snap": "tap",
- "coverage": "tap",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "npmclilint": "npmcli-lint",
- "lintfix": "npm run lint -- --fix",
- "postsnap": "npm run lintfix --",
- "postlint": "template-oss-check",
- "posttest": "npm run lint",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/cacache.git"
- },
- "keywords": [
- "cache",
- "caching",
- "content-addressable",
- "sri",
- "sri hash",
- "subresource integrity",
- "cache",
- "storage",
- "store",
- "file store",
- "filesystem",
- "disk cache",
- "disk storage"
- ],
- "license": "ISC",
- "dependencies": {
- "@npmcli/fs": "^3.1.0",
- "fs-minipass": "^3.0.0",
- "glob": "^10.2.2",
- "lru-cache": "^10.0.1",
- "minipass": "^7.0.3",
- "minipass-collect": "^2.0.1",
- "minipass-flush": "^1.0.5",
- "minipass-pipeline": "^1.2.4",
- "p-map": "^4.0.0",
- "ssri": "^10.0.0",
- "tar": "^6.1.11",
- "unique-filename": "^3.0.0"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.0"
- },
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "windowsCI": false,
- "version": "4.22.0",
- "publish": "true"
- },
- "author": "GitHub Inc.",
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231c..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index bfcfacbcc95e18..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,471 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
- 'accept-charset',
- 'accept-encoding',
- 'accept-language',
- 'accept',
- 'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
- 'cache-control',
- 'content-encoding',
- 'content-language',
- 'content-type',
- 'date',
- 'etag',
- 'expires',
- 'last-modified',
- 'link',
- 'location',
- 'pragma',
- 'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
- const metadata = {
- time: Date.now(),
- url: request.url,
- reqHeaders: {},
- resHeaders: {},
-
- // options on which we must match the request and vary the response
- options: {
- compress: options.compress != null ? options.compress : request.compress,
- },
- }
-
- // only save the status if it's not a 200 or 304
- if (response.status !== 200 && response.status !== 304) {
- metadata.status = response.status
- }
-
- for (const name of KEEP_REQUEST_HEADERS) {
- if (request.headers.has(name)) {
- metadata.reqHeaders[name] = request.headers.get(name)
- }
- }
-
- // if the request's host header differs from the host in the url
- // we need to keep it, otherwise it's just noise and we ignore it
- const host = request.headers.get('host')
- const parsedUrl = new url.URL(request.url)
- if (host && parsedUrl.host !== host) {
- metadata.reqHeaders.host = host
- }
-
- // if the response has a vary header, make sure
- // we store the relevant request headers too
- if (response.headers.has('vary')) {
- const vary = response.headers.get('vary')
- // a vary of "*" means every header causes a different response.
- // in that scenario, we do not include any additional headers
- // as the freshness check will always fail anyway and we don't
- // want to bloat the cache indexes
- if (vary !== '*') {
- // copy any other request headers that will vary the response
- const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
- for (const name of varyHeaders) {
- if (request.headers.has(name)) {
- metadata.reqHeaders[name] = request.headers.get(name)
- }
- }
- }
- }
-
- for (const name of KEEP_RESPONSE_HEADERS) {
- if (response.headers.has(name)) {
- metadata.resHeaders[name] = response.headers.get(name)
- }
- }
-
- for (const name of options.cacheAdditionalHeaders) {
- if (response.headers.has(name)) {
- metadata.resHeaders[name] = response.headers.get(name)
- }
- }
-
- return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
- constructor ({ entry, request, response, options }) {
- if (entry) {
- this.key = entry.key
- this.entry = entry
- // previous versions of this module didn't write an explicit timestamp in
- // the metadata, so fall back to the entry's timestamp. we can't use the
- // entry timestamp to determine staleness because cacache will update it
- // when it verifies its data
- this.entry.metadata.time = this.entry.metadata.time || this.entry.time
- } else {
- this.key = cacheKey(request)
- }
-
- this.options = options
-
- // these properties are behind getters that lazily evaluate
- this[_request] = request
- this[_response] = response
- this[_policy] = null
- }
-
- // returns a CacheEntry instance that satisfies the given request
- // or undefined if no existing entry satisfies
- static async find (request, options) {
- try {
- // compacts the index and returns an array of unique entries
- var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
- const entryA = new CacheEntry({ entry: A, options })
- const entryB = new CacheEntry({ entry: B, options })
- return entryA.policy.satisfies(entryB.request)
- }, {
- validateEntry: (entry) => {
- // clean out entries with a buggy content-encoding value
- if (entry.metadata &&
- entry.metadata.resHeaders &&
- entry.metadata.resHeaders['content-encoding'] === null) {
- return false
- }
-
- // if an integrity is null, it needs to have a status specified
- if (entry.integrity === null) {
- return !!(entry.metadata && entry.metadata.status)
- }
-
- return true
- },
- })
- } catch (err) {
- // if the compact request fails, ignore the error and return
- return
- }
-
- // a cache mode of 'reload' means to behave as though we have no cache
- // on the way to the network. return undefined to allow cacheFetch to
- // create a brand new request no matter what.
- if (options.cache === 'reload') {
- return
- }
-
- // find the specific entry that satisfies the request
- let match
- for (const entry of matches) {
- const _entry = new CacheEntry({
- entry,
- options,
- })
-
- if (_entry.policy.satisfies(request)) {
- match = _entry
- break
- }
- }
-
- return match
- }
-
- // if the user made a PUT/POST/PATCH then we invalidate our
- // cache for the same url by deleting the index entirely
- static async invalidate (request, options) {
- const key = cacheKey(request)
- try {
- await cacache.rm.entry(options.cachePath, key, { removeFully: true })
- } catch (err) {
- // ignore errors
- }
- }
-
- get request () {
- if (!this[_request]) {
- this[_request] = new Request(this.entry.metadata.url, {
- method: 'GET',
- headers: this.entry.metadata.reqHeaders,
- ...this.entry.metadata.options,
- })
- }
-
- return this[_request]
- }
-
- get response () {
- if (!this[_response]) {
- this[_response] = new Response(null, {
- url: this.entry.metadata.url,
- counter: this.options.counter,
- status: this.entry.metadata.status || 200,
- headers: {
- ...this.entry.metadata.resHeaders,
- 'content-length': this.entry.size,
- },
- })
- }
-
- return this[_response]
- }
-
- get policy () {
- if (!this[_policy]) {
- this[_policy] = new CachePolicy({
- entry: this.entry,
- request: this.request,
- response: this.response,
- options: this.options,
- })
- }
-
- return this[_policy]
- }
-
- // wraps the response in a pipeline that stores the data
- // in the cache while the user consumes it
- async store (status) {
- // if we got a status other than 200, 301, or 308,
- // or the CachePolicy forbid storage, append the
- // cache status header and return it untouched
- if (
- this.request.method !== 'GET' ||
- ![200, 301, 308].includes(this.response.status) ||
- !this.policy.storable()
- ) {
- this.response.headers.set('x-local-cache-status', 'skip')
- return this.response
- }
-
- const size = this.response.headers.get('content-length')
- const cacheOpts = {
- algorithms: this.options.algorithms,
- metadata: getMetadata(this.request, this.response, this.options),
- size,
- integrity: this.options.integrity,
- integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
- }
-
- let body = null
- // we only set a body if the status is a 200, redirects are
- // stored as metadata only
- if (this.response.status === 200) {
- let cacheWriteResolve, cacheWriteReject
- const cacheWritePromise = new Promise((resolve, reject) => {
- cacheWriteResolve = resolve
- cacheWriteReject = reject
- }).catch((err) => {
- body.emit('error', err)
- })
-
- body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
- flush () {
- return cacheWritePromise
- },
- }))
- // this is always true since if we aren't reusing the one from the remote fetch, we
- // are using the one from cacache
- body.hasIntegrityEmitter = true
-
- const onResume = () => {
- const tee = new Minipass()
- const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
- // re-emit the integrity and size events on our new response body so they can be reused
- cacheStream.on('integrity', i => body.emit('integrity', i))
- cacheStream.on('size', s => body.emit('size', s))
- // stick a flag on here so downstream users will know if they can expect integrity events
- tee.pipe(cacheStream)
- // TODO if the cache write fails, log a warning but return the response anyway
- // eslint-disable-next-line promise/catch-or-return
- cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
- body.unshift(tee)
- body.unshift(this.response.body)
- }
-
- body.once('resume', onResume)
- body.once('end', () => body.removeListener('resume', onResume))
- } else {
- await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
- }
-
- // note: we do not set the x-local-cache-hash header because we do not know
- // the hash value until after the write to the cache completes, which doesn't
- // happen until after the response has been sent and it's too late to write
- // the header anyway
- this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
- this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
- this.response.headers.set('x-local-cache-mode', 'stream')
- this.response.headers.set('x-local-cache-status', status)
- this.response.headers.set('x-local-cache-time', new Date().toISOString())
- const newResponse = new Response(body, {
- url: this.response.url,
- status: this.response.status,
- headers: this.response.headers,
- counter: this.options.counter,
- })
- return newResponse
- }
-
- // use the cached data to create a response and return it
- async respond (method, options, status) {
- let response
- if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
- // if the request is a HEAD, or the response is a redirect,
- // then the metadata in the entry already includes everything
- // we need to build a response
- response = this.response
- } else {
- // we're responding with a full cached response, so create a body
- // that reads from cacache and attach it to a new Response
- const body = new Minipass()
- const headers = { ...this.policy.responseHeaders() }
-
- const onResume = () => {
- const cacheStream = cacache.get.stream.byDigest(
- this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
- )
- cacheStream.on('error', async (err) => {
- cacheStream.pause()
- if (err.code === 'EINTEGRITY') {
- await cacache.rm.content(
- this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
- )
- }
- if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
- await CacheEntry.invalidate(this.request, this.options)
- }
- body.emit('error', err)
- cacheStream.resume()
- })
- // emit the integrity and size events based on our metadata so we're consistent
- body.emit('integrity', this.entry.integrity)
- body.emit('size', Number(headers['content-length']))
- cacheStream.pipe(body)
- }
-
- body.once('resume', onResume)
- body.once('end', () => body.removeListener('resume', onResume))
- response = new Response(body, {
- url: this.entry.metadata.url,
- counter: options.counter,
- status: 200,
- headers,
- })
- }
-
- response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
- response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
- response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
- response.headers.set('x-local-cache-mode', 'stream')
- response.headers.set('x-local-cache-status', status)
- response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
- return response
- }
-
- // use the provided request along with this cache entry to
- // revalidate the stored response. returns a response, either
- // from the cache or from the update
- async revalidate (request, options) {
- const revalidateRequest = new Request(request, {
- headers: this.policy.revalidationHeaders(request),
- })
-
- try {
- // NOTE: be sure to remove the headers property from the
- // user supplied options, since we have already defined
- // them on the new request object. if they're still in the
- // options then those will overwrite the ones from the policy
- var response = await remote(revalidateRequest, {
- ...options,
- headers: undefined,
- })
- } catch (err) {
- // if the network fetch fails, return the stale
- // cached response unless it has a cache-control
- // of 'must-revalidate'
- if (!this.policy.mustRevalidate) {
- return this.respond(request.method, options, 'stale')
- }
-
- throw err
- }
-
- if (this.policy.revalidated(revalidateRequest, response)) {
- // we got a 304, write a new index to the cache and respond from cache
- const metadata = getMetadata(request, response, options)
- // 304 responses do not include headers that are specific to the response data
- // since they do not include a body, so we copy values for headers that were
- // in the old cache entry to the new one, if the new metadata does not already
- // include that header
- for (const name of KEEP_RESPONSE_HEADERS) {
- if (
- !hasOwnProperty(metadata.resHeaders, name) &&
- hasOwnProperty(this.entry.metadata.resHeaders, name)
- ) {
- metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
- }
- }
-
- for (const name of options.cacheAdditionalHeaders) {
- const inMeta = hasOwnProperty(metadata.resHeaders, name)
- const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
- const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
- // if the header is in the existing entry, but it is not in the metadata
- // then we need to write it to the metadata as this will refresh the on-disk cache
- if (!inMeta && inEntry) {
- metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
- }
- // if the header is in the metadata, but not in the policy, then we need to set
- // it in the policy so that it's included in the immediate response. future
- // responses will load a new cache entry, so we don't need to change that
- if (!inPolicy && inMeta) {
- this.policy.response.headers[name] = metadata.resHeaders[name]
- }
- }
-
- try {
- await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
- size: this.entry.size,
- metadata,
- })
- } catch (err) {
- // if updating the cache index fails, we ignore it and
- // respond anyway
- }
- return this.respond(request.method, options, 'revalidated')
- }
-
- // if we got a modified response, create a new entry based on it
- const newEntry = new CacheEntry({
- request,
- response,
- options,
- })
-
- // respond with the new entry while writing it to the cache
- return newEntry.store('updated')
- }
-}
-
-module.exports = CacheEntry
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe66..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
- constructor (url) {
- /* eslint-disable-next-line max-len */
- super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
- this.code = 'ENOTCACHED'
- }
-}
-
-module.exports = {
- NotCachedError,
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb9336..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
- // try to find a cached entry that satisfies this request
- const entry = await CacheEntry.find(request, options)
- if (!entry) {
- // no cached result, if the cache mode is 'only-if-cached' that's a failure
- if (options.cache === 'only-if-cached') {
- throw new NotCachedError(request.url)
- }
-
- // otherwise, we make a request, store it and return it
- const response = await remote(request, options)
- const newEntry = new CacheEntry({ request, response, options })
- return newEntry.store('miss')
- }
-
- // we have a cached response that satisfies this request, however if the cache
- // mode is 'no-cache' then we send the revalidation request no matter what
- if (options.cache === 'no-cache') {
- return entry.revalidate(request, options)
- }
-
- // if the cached entry is not stale, or if the cache mode is 'force-cache' or
- // 'only-if-cached' we can respond with the cached entry. set the status
- // based on the result of needsRevalidation and respond
- const _needsRevalidation = entry.policy.needsRevalidation(request)
- if (options.cache === 'force-cache' ||
- options.cache === 'only-if-cached' ||
- !_needsRevalidation) {
- return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
- }
-
- // if we got here, the cache entry is stale so revalidate it
- return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
- if (!options.cachePath) {
- return
- }
-
- return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fae..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
- auth: false,
- fragment: false,
- search: true,
- unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
- const parsed = new URL(request.url)
- return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae92..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
- shared: false,
- ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
- const _obj = {
- method: request.method,
- url: request.url,
- headers: {},
- compress: request.compress,
- }
-
- request.headers.forEach((value, key) => {
- _obj.headers[key] = value
- })
-
- return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
- const _obj = {
- status: response.status,
- headers: {},
- }
-
- response.headers.forEach((value, key) => {
- _obj.headers[key] = value
- })
-
- return _obj
-}
-
-class CachePolicy {
- constructor ({ entry, request, response, options }) {
- this.entry = entry
- this.request = requestObject(request)
- this.response = responseObject(response)
- this.options = options
- this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
- if (this.entry) {
- // if we have an entry, copy the timestamp to the _responseTime
- // this is necessary because the CacheSemantics constructor forces
- // the value to Date.now() which means a policy created from a
- // cache entry is likely to always identify itself as stale
- this.policy._responseTime = this.entry.metadata.time
- }
- }
-
- // static method to quickly determine if a request alone is storable
- static storable (request, options) {
- // no cachePath means no caching
- if (!options.cachePath) {
- return false
- }
-
- // user explicitly asked not to cache
- if (options.cache === 'no-store') {
- return false
- }
-
- // we only cache GET and HEAD requests
- if (!['GET', 'HEAD'].includes(request.method)) {
- return false
- }
-
- // otherwise, let http-cache-semantics make the decision
- // based on the request's headers
- const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
- return policy.storable()
- }
-
- // returns true if the policy satisfies the request
- satisfies (request) {
- const _req = requestObject(request)
- if (this.request.headers.host !== _req.headers.host) {
- return false
- }
-
- if (this.request.compress !== _req.compress) {
- return false
- }
-
- const negotiatorA = new Negotiator(this.request)
- const negotiatorB = new Negotiator(_req)
-
- if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
- return false
- }
-
- if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
- return false
- }
-
- if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
- return false
- }
-
- if (this.options.integrity) {
- return ssri.parse(this.options.integrity).match(this.entry.integrity)
- }
-
- return true
- }
-
- // returns true if the request and response allow caching
- storable () {
- return this.policy.storable()
- }
-
- // NOTE: this is a hack to avoid parsing the cache-control
- // header ourselves, it returns true if the response's
- // cache-control contains must-revalidate
- get mustRevalidate () {
- return !!this.policy._rescc['must-revalidate']
- }
-
- // returns true if the cached response requires revalidation
- // for the given request
- needsRevalidation (request) {
- const _req = requestObject(request)
- // force method to GET because we only cache GETs
- // but can serve a HEAD from a cached GET
- _req.method = 'GET'
- return !this.policy.satisfiesWithoutRevalidation(_req)
- }
-
- responseHeaders () {
- return this.policy.responseHeaders()
- }
-
- // returns a new object containing the appropriate headers
- // to send a revalidation request
- revalidationHeaders (request) {
- const _req = requestObject(request)
- return this.policy.revalidationHeaders(_req)
- }
-
- // returns true if the request/response was revalidated
- // successfully. returns false if a new response was received
- revalidated (request, response) {
- const _req = requestObject(request)
- const _res = responseObject(response)
- const policy = this.policy.revalidatedPolicy(_req, _res)
- return !policy.modified
- }
-}
-
-module.exports = CachePolicy
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e165502..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
- if (!isRedirect(response.status)) {
- return false
- }
-
- if (options.redirect === 'manual') {
- return false
- }
-
- if (options.redirect === 'error') {
- throw new FetchError(`redirect mode is set to error: ${request.url}`,
- 'no-redirect', { code: 'ENOREDIRECT' })
- }
-
- if (!response.headers.has('location')) {
- throw new FetchError(`redirect location header missing for: ${request.url}`,
- 'no-location', { code: 'EINVALIDREDIRECT' })
- }
-
- if (request.counter >= request.follow) {
- throw new FetchError(`maximum redirect reached at: ${request.url}`,
- 'max-redirect', { code: 'EMAXREDIRECT' })
- }
-
- return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
- const _opts = { ...options }
- const location = response.headers.get('location')
- const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
- // Comment below is used under the following license:
- /**
- * @license
- * Copyright (c) 2010-2012 Mikeal Rogers
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an "AS
- * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied. See the License for the specific language
- * governing permissions and limitations under the License.
- */
-
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
- request.headers.delete('authorization')
- request.headers.delete('cookie')
- }
-
- // for POST request with 301/302 response, or any request with 303 response,
- // use GET when following redirect
- if (
- response.status === 303 ||
- (request.method === 'POST' && [301, 302].includes(response.status))
- ) {
- _opts.method = 'GET'
- _opts.body = null
- request.headers.delete('content-length')
- }
-
- _opts.headers = {}
- request.headers.forEach((value, key) => {
- _opts.headers[key] = value
- })
-
- _opts.counter = ++request.counter
- const redirectReq = new Request(url.format(redirectUrl), _opts)
- return {
- request: redirectReq,
- options: _opts,
- }
-}
-
-const fetch = async (request, options) => {
- const response = CachePolicy.storable(request, options)
- ? await cache(request, options)
- : await remote(request, options)
-
- // if the request wasn't a GET or HEAD, and the response
- // status is between 200 and 399 inclusive, invalidate the
- // request url
- if (!['GET', 'HEAD'].includes(request.method) &&
- response.status >= 200 &&
- response.status <= 399) {
- await cache.invalidate(request, options)
- }
-
- if (!canFollowRedirect(request, response, options)) {
- return response
- }
-
- const redirect = getRedirect(request, response, options)
- return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b61131..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
- const options = configureOptions(opts)
-
- const request = new Request(url, options)
- return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
- if (typeof defaultUrl === 'object') {
- defaultOptions = defaultUrl
- defaultUrl = null
- }
-
- const defaultedFetch = (url, options = {}) => {
- const finalUrl = url || defaultUrl
- const finalOptions = {
- ...defaultOptions,
- ...options,
- headers: {
- ...defaultOptions.headers,
- ...options.headers,
- },
- }
- return wrappedFetch(finalUrl, finalOptions)
- }
-
- defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
- makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
- return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index f77511279f831d..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
- 'if-modified-since',
- 'if-none-match',
- 'if-unmodified-since',
- 'if-match',
- 'if-range',
-]
-
-const configureOptions = (opts) => {
- const { strictSSL, ...options } = { ...opts }
- options.method = options.method ? options.method.toUpperCase() : 'GET'
- options.rejectUnauthorized = strictSSL !== false
-
- if (!options.retry) {
- options.retry = { retries: 0 }
- } else if (typeof options.retry === 'string') {
- const retries = parseInt(options.retry, 10)
- if (isFinite(retries)) {
- options.retry = { retries }
- } else {
- options.retry = { retries: 0 }
- }
- } else if (typeof options.retry === 'number') {
- options.retry = { retries: options.retry }
- } else {
- options.retry = { retries: 0, ...options.retry }
- }
-
- options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
- options.cache = options.cache || 'default'
- if (options.cache === 'default') {
- const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
- return conditionalHeaders.includes(name.toLowerCase())
- })
- if (hasConditionalHeader) {
- options.cache = 'no-store'
- }
- }
-
- options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
- // cacheManager is deprecated, but if it's set and
- // cachePath is not we should copy it to the new field
- if (options.cacheManager && !options.cachePath) {
- options.cachePath = options.cacheManager
- }
-
- return options
-}
-
-module.exports = configureOptions
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce31..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
- #events = []
- #data = new Map()
-
- constructor (opts, ...streams) {
- // CRITICAL: do NOT pass the streams to the call to super(), this will start
- // the flow of data and potentially cause the events we need to catch to emit
- // before we've finished our own setup. instead we call super() with no args,
- // finish our setup, and then push the streams into ourselves to start the
- // data flow
- super()
- this.#events = opts.events
-
- /* istanbul ignore next - coverage disabled because this is pointless to test here */
- if (streams.length) {
- this.push(...streams)
- }
- }
-
- on (event, handler) {
- if (this.#events.includes(event) && this.#data.has(event)) {
- return handler(...this.#data.get(event))
- }
-
- return super.on(event, handler)
- }
-
- emit (event, ...data) {
- if (this.#events.includes(event)) {
- this.#data.set(event, data)
- }
-
- return super.emit(event, ...data)
- }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index 8554564074de6e..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,131 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-const { log } = require('proc-log')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const { getAgent } = require('@npmcli/agent')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
- 'ECONNRESET', // remote socket closed on us
- 'ECONNREFUSED', // remote host refused to open connection
- 'EADDRINUSE', // failed to bind to a local port (proxy?)
- 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
- // from @npmcli/agent
- 'ECONNECTIONTIMEOUT',
- 'EIDLETIMEOUT',
- 'ERESPONSETIMEOUT',
- 'ETRANSFERTIMEOUT',
- // Known codes we do NOT retry on:
- // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
- // EINVALIDPROXY // invalid protocol from @npmcli/agent
- // EINVALIDRESPONSE // invalid status code from @npmcli/agent
-]
-
-const RETRY_TYPES = [
- 'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
- const agent = getAgent(request.url, options)
- if (!request.headers.has('connection')) {
- request.headers.set('connection', agent ? 'keep-alive' : 'close')
- }
-
- if (!request.headers.has('user-agent')) {
- request.headers.set('user-agent', USER_AGENT)
- }
-
- // keep our own options since we're overriding the agent
- // and the redirect mode
- const _opts = {
- ...options,
- agent,
- redirect: 'manual',
- }
-
- return promiseRetry(async (retryHandler, attemptNum) => {
- const req = new fetch.Request(request, _opts)
- try {
- let res = await fetch(req, _opts)
- if (_opts.integrity && res.status === 200) {
- // we got a 200 response and the user has specified an expected
- // integrity value, so wrap the response in an ssri stream to verify it
- const integrityStream = ssri.integrityStream({
- algorithms: _opts.algorithms,
- integrity: _opts.integrity,
- size: _opts.size,
- })
- const pipeline = new CachingMinipassPipeline({
- events: ['integrity', 'size'],
- }, res.body, integrityStream)
- // we also propagate the integrity and size events out to the pipeline so we can use
- // this new response body as an integrityEmitter for cacache
- integrityStream.on('integrity', i => pipeline.emit('integrity', i))
- integrityStream.on('size', s => pipeline.emit('size', s))
- res = new fetch.Response(pipeline, res)
- // set an explicit flag so we know if our response body will emit integrity and size
- res.body.hasIntegrityEmitter = true
- }
-
- res.headers.set('x-fetch-attempts', attemptNum)
-
- // do not retry POST requests, or requests with a streaming body
- // do retry requests with a 408, 420, 429 or 500+ status in the response
- const isStream = Minipass.isStream(req.body)
- const isRetriable = req.method !== 'POST' &&
- !isStream &&
- ([408, 420, 429].includes(res.status) || res.status >= 500)
-
- if (isRetriable) {
- if (typeof options.onRetry === 'function') {
- options.onRetry(res)
- }
-
- /* eslint-disable-next-line max-len */
- log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
- return retryHandler(res)
- }
-
- return res
- } catch (err) {
- const code = (err.code === 'EPROMISERETRY')
- ? err.retried.code
- : err.code
-
- // err.retried will be the thing that was thrown from above
- // if it's a response, we just got a bad status code and we
- // can re-throw to allow the retry
- const isRetryError = err.retried instanceof fetch.Response ||
- (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
- if (req.method === 'POST' || isRetryError) {
- throw err
- }
-
- if (typeof options.onRetry === 'function') {
- options.onRetry(err)
- }
-
- log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
- return retryHandler(err)
- }
- }, options.retry).catch((err) => {
- // don't reject for http errors, just return them
- if (err.status >= 400 && err.type !== 'system') {
- return err
- }
-
- throw err
- })
-}
-
-module.exports = remoteFetch
diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index 7adb4d1e7f9719..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "name": "make-fetch-happen",
- "version": "13.0.1",
- "description": "Opinionated, caching, retrying fetch client",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "test": "tap",
- "posttest": "npm run lint",
- "eslint": "eslint",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "lintfix": "npm run lint -- --fix",
- "postlint": "template-oss-check",
- "snap": "tap",
- "template-oss-apply": "template-oss-apply --force"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/make-fetch-happen.git"
- },
- "keywords": [
- "http",
- "request",
- "fetch",
- "mean girls",
- "caching",
- "cache",
- "subresource integrity"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "dependencies": {
- "@npmcli/agent": "^2.0.0",
- "cacache": "^18.0.0",
- "http-cache-semantics": "^4.1.1",
- "is-lambda": "^1.0.1",
- "minipass": "^7.0.2",
- "minipass-fetch": "^3.0.0",
- "minipass-flush": "^1.0.5",
- "minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.3",
- "proc-log": "^4.2.0",
- "promise-retry": "^2.0.1",
- "ssri": "^10.0.0"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.4",
- "nock": "^13.2.4",
- "safe-buffer": "^5.2.1",
- "standard-version": "^9.3.2",
- "tap": "^16.0.0"
- },
- "engines": {
- "node": "^16.14.0 || >=18.0.0"
- },
- "tap": {
- "color": 1,
- "files": "test/*.js",
- "check-coverage": true,
- "timeout": 60,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.4",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE
deleted file mode 100644
index 3c3410cdc12ee3..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE
+++ /dev/null
@@ -1,28 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-Copyright (c) 2016 David Frank
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
----
-
-Note: This is a derivative work based on "node-fetch" by David Frank,
-modified and distributed under the terms of the MIT license above.
-https://github.com/bitinn/node-fetch
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js
deleted file mode 100644
index b18f643269e375..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-class AbortError extends Error {
- constructor (message) {
- super(message)
- this.code = 'FETCH_ABORTED'
- this.type = 'aborted'
- Error.captureStackTrace(this, this.constructor)
- }
-
- get name () {
- return 'AbortError'
- }
-
- // don't allow name to be overridden, but don't throw either
- set name (s) {}
-}
-module.exports = AbortError
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js
deleted file mode 100644
index 121b1730102e72..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const TYPE = Symbol('type')
-const BUFFER = Symbol('buffer')
-
-class Blob {
- constructor (blobParts, options) {
- this[TYPE] = ''
-
- const buffers = []
- let size = 0
-
- if (blobParts) {
- const a = blobParts
- const length = Number(a.length)
- for (let i = 0; i < length; i++) {
- const element = a[i]
- const buffer = element instanceof Buffer ? element
- : ArrayBuffer.isView(element)
- ? Buffer.from(element.buffer, element.byteOffset, element.byteLength)
- : element instanceof ArrayBuffer ? Buffer.from(element)
- : element instanceof Blob ? element[BUFFER]
- : typeof element === 'string' ? Buffer.from(element)
- : Buffer.from(String(element))
- size += buffer.length
- buffers.push(buffer)
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers, size)
-
- const type = options && options.type !== undefined
- && String(options.type).toLowerCase()
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type
- }
- }
-
- get size () {
- return this[BUFFER].length
- }
-
- get type () {
- return this[TYPE]
- }
-
- text () {
- return Promise.resolve(this[BUFFER].toString())
- }
-
- arrayBuffer () {
- const buf = this[BUFFER]
- const off = buf.byteOffset
- const len = buf.byteLength
- const ab = buf.buffer.slice(off, off + len)
- return Promise.resolve(ab)
- }
-
- stream () {
- return new Minipass().end(this[BUFFER])
- }
-
- slice (start, end, type) {
- const size = this.size
- const relativeStart = start === undefined ? 0
- : start < 0 ? Math.max(size + start, 0)
- : Math.min(start, size)
- const relativeEnd = end === undefined ? size
- : end < 0 ? Math.max(size + end, 0)
- : Math.min(end, size)
- const span = Math.max(relativeEnd - relativeStart, 0)
-
- const buffer = this[BUFFER]
- const slicedBuffer = buffer.slice(
- relativeStart,
- relativeStart + span
- )
- const blob = new Blob([], { type })
- blob[BUFFER] = slicedBuffer
- return blob
- }
-
- get [Symbol.toStringTag] () {
- return 'Blob'
- }
-
- static get BUFFER () {
- return BUFFER
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
-})
-
-module.exports = Blob
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js
deleted file mode 100644
index 62286bd1de0d91..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js
+++ /dev/null
@@ -1,350 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const MinipassSized = require('minipass-sized')
-
-const Blob = require('./blob.js')
-const { BUFFER } = Blob
-const FetchError = require('./fetch-error.js')
-
-// optional dependency on 'encoding'
-let convert
-try {
- convert = require('encoding').convert
-} catch (e) {
- // defer error until textConverted is called
-}
-
-const INTERNALS = Symbol('Body internals')
-const CONSUME_BODY = Symbol('consumeBody')
-
-class Body {
- constructor (bodyArg, options = {}) {
- const { size = 0, timeout = 0 } = options
- const body = bodyArg === undefined || bodyArg === null ? null
- : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString())
- : isBlob(bodyArg) ? bodyArg
- : Buffer.isBuffer(bodyArg) ? bodyArg
- : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]'
- ? Buffer.from(bodyArg)
- : ArrayBuffer.isView(bodyArg)
- ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength)
- : Minipass.isStream(bodyArg) ? bodyArg
- : Buffer.from(String(bodyArg))
-
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null,
- }
-
- this.size = size
- this.timeout = timeout
-
- if (Minipass.isStream(body)) {
- body.on('error', er => {
- const error = er.name === 'AbortError' ? er
- : new FetchError(`Invalid response while trying to fetch ${
- this.url}: ${er.message}`, 'system', er)
- this[INTERNALS].error = error
- })
- }
- }
-
- get body () {
- return this[INTERNALS].body
- }
-
- get bodyUsed () {
- return this[INTERNALS].disturbed
- }
-
- arrayBuffer () {
- return this[CONSUME_BODY]().then(buf =>
- buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
- }
-
- blob () {
- const ct = this.headers && this.headers.get('content-type') || ''
- return this[CONSUME_BODY]().then(buf => Object.assign(
- new Blob([], { type: ct.toLowerCase() }),
- { [BUFFER]: buf }
- ))
- }
-
- async json () {
- const buf = await this[CONSUME_BODY]()
- try {
- return JSON.parse(buf.toString())
- } catch (er) {
- throw new FetchError(
- `invalid json response body at ${this.url} reason: ${er.message}`,
- 'invalid-json'
- )
- }
- }
-
- text () {
- return this[CONSUME_BODY]().then(buf => buf.toString())
- }
-
- buffer () {
- return this[CONSUME_BODY]()
- }
-
- textConverted () {
- return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers))
- }
-
- [CONSUME_BODY] () {
- if (this[INTERNALS].disturbed) {
- return Promise.reject(new TypeError(`body used already for: ${
- this.url}`))
- }
-
- this[INTERNALS].disturbed = true
-
- if (this[INTERNALS].error) {
- return Promise.reject(this[INTERNALS].error)
- }
-
- // body is null
- if (this.body === null) {
- return Promise.resolve(Buffer.alloc(0))
- }
-
- if (Buffer.isBuffer(this.body)) {
- return Promise.resolve(this.body)
- }
-
- const upstream = isBlob(this.body) ? this.body.stream() : this.body
-
- /* istanbul ignore if: should never happen */
- if (!Minipass.isStream(upstream)) {
- return Promise.resolve(Buffer.alloc(0))
- }
-
- const stream = this.size && upstream instanceof MinipassSized ? upstream
- : !this.size && upstream instanceof Minipass &&
- !(upstream instanceof MinipassSized) ? upstream
- : this.size ? new MinipassSized({ size: this.size })
- : new Minipass()
-
- // allow timeout on slow response body, but only if the stream is still writable. this
- // makes the timeout center on the socket stream from lib/index.js rather than the
- // intermediary minipass stream we create to receive the data
- const resTimeout = this.timeout && stream.writable ? setTimeout(() => {
- stream.emit('error', new FetchError(
- `Response timeout while trying to fetch ${
- this.url} (over ${this.timeout}ms)`, 'body-timeout'))
- }, this.timeout) : null
-
- // do not keep the process open just for this timeout, even
- // though we expect it'll get cleared eventually.
- if (resTimeout && resTimeout.unref) {
- resTimeout.unref()
- }
-
- // do the pipe in the promise, because the pipe() can send too much
- // data through right away and upset the MP Sized object
- return new Promise((resolve) => {
- // if the stream is some other kind of stream, then pipe through a MP
- // so we can collect it more easily.
- if (stream !== upstream) {
- upstream.on('error', er => stream.emit('error', er))
- upstream.pipe(stream)
- }
- resolve()
- }).then(() => stream.concat()).then(buf => {
- clearTimeout(resTimeout)
- return buf
- }).catch(er => {
- clearTimeout(resTimeout)
- // request was aborted, reject with this Error
- if (er.name === 'AbortError' || er.name === 'FetchError') {
- throw er
- } else if (er.name === 'RangeError') {
- throw new FetchError(`Could not create Buffer from response body for ${
- this.url}: ${er.message}`, 'system', er)
- } else {
- // other errors, such as incorrect content-encoding or content-length
- throw new FetchError(`Invalid response body while trying to fetch ${
- this.url}: ${er.message}`, 'system', er)
- }
- })
- }
-
- static clone (instance) {
- if (instance.bodyUsed) {
- throw new Error('cannot clone body after it is used')
- }
-
- const body = instance.body
-
- // check that body is a stream and not form-data object
- // NB: can't clone the form-data object without having it as a dependency
- if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') {
- // create a dedicated tee stream so that we don't lose data
- // potentially sitting in the body stream's buffer by writing it
- // immediately to p1 and not having it for p2.
- const tee = new Minipass()
- const p1 = new Minipass()
- const p2 = new Minipass()
- tee.on('error', er => {
- p1.emit('error', er)
- p2.emit('error', er)
- })
- body.on('error', er => tee.emit('error', er))
- tee.pipe(p1)
- tee.pipe(p2)
- body.pipe(tee)
- // set instance body to one fork, return the other
- instance[INTERNALS].body = p1
- return p2
- } else {
- return instance.body
- }
- }
-
- static extractContentType (body) {
- return body === null || body === undefined ? null
- : typeof body === 'string' ? 'text/plain;charset=UTF-8'
- : isURLSearchParams(body)
- ? 'application/x-www-form-urlencoded;charset=UTF-8'
- : isBlob(body) ? body.type || null
- : Buffer.isBuffer(body) ? null
- : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null
- : ArrayBuffer.isView(body) ? null
- : typeof body.getBoundary === 'function'
- ? `multipart/form-data;boundary=${body.getBoundary()}`
- : Minipass.isStream(body) ? null
- : 'text/plain;charset=UTF-8'
- }
-
- static getTotalBytes (instance) {
- const { body } = instance
- return (body === null || body === undefined) ? 0
- : isBlob(body) ? body.size
- : Buffer.isBuffer(body) ? body.length
- : body && typeof body.getLengthSync === 'function' && (
- // detect form data input from form-data module
- body._lengthRetrievers &&
- /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x
- body.hasKnownLength && body.hasKnownLength()) // 2.x
- ? body.getLengthSync()
- : null
- }
-
- static writeToStream (dest, instance) {
- const { body } = instance
-
- if (body === null || body === undefined) {
- dest.end()
- } else if (Buffer.isBuffer(body) || typeof body === 'string') {
- dest.end(body)
- } else {
- // body is stream or blob
- const stream = isBlob(body) ? body.stream() : body
- stream.on('error', er => dest.emit('error', er)).pipe(dest)
- }
-
- return dest
- }
-}
-
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true },
-})
-
-const isURLSearchParams = obj =>
- // Duck-typing as a necessary condition.
- (typeof obj !== 'object' ||
- typeof obj.append !== 'function' ||
- typeof obj.delete !== 'function' ||
- typeof obj.get !== 'function' ||
- typeof obj.getAll !== 'function' ||
- typeof obj.has !== 'function' ||
- typeof obj.set !== 'function') ? false
- // Brand-checking and more duck-typing as optional condition.
- : obj.constructor.name === 'URLSearchParams' ||
- Object.prototype.toString.call(obj) === '[object URLSearchParams]' ||
- typeof obj.sort === 'function'
-
-const isBlob = obj =>
- typeof obj === 'object' &&
- typeof obj.arrayBuffer === 'function' &&
- typeof obj.type === 'string' &&
- typeof obj.stream === 'function' &&
- typeof obj.constructor === 'function' &&
- typeof obj.constructor.name === 'string' &&
- /^(Blob|File)$/.test(obj.constructor.name) &&
- /^(Blob|File)$/.test(obj[Symbol.toStringTag])
-
-const convertBody = (buffer, headers) => {
- /* istanbul ignore if */
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function')
- }
-
- const ct = headers && headers.get('content-type')
- let charset = 'utf-8'
- let res
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct)
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- const str = buffer.slice(0, 1024).toString()
-
- // html5
- if (!res && str) {
- res = / this.expect
- ? 'max-size' : type
- this.message = message
- Error.captureStackTrace(this, this.constructor)
- }
-
- get name () {
- return 'FetchError'
- }
-
- // don't allow name to be overwritten
- set name (n) {}
-
- get [Symbol.toStringTag] () {
- return 'FetchError'
- }
-}
-module.exports = FetchError
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js
deleted file mode 100644
index dd6e854d5ba399..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js
+++ /dev/null
@@ -1,267 +0,0 @@
-'use strict'
-const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/
-const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/
-
-const validateName = name => {
- name = `${name}`
- if (invalidTokenRegex.test(name) || name === '') {
- throw new TypeError(`${name} is not a legal HTTP header name`)
- }
-}
-
-const validateValue = value => {
- value = `${value}`
- if (invalidHeaderCharRegex.test(value)) {
- throw new TypeError(`${value} is not a legal HTTP header value`)
- }
-}
-
-const find = (map, name) => {
- name = name.toLowerCase()
- for (const key in map) {
- if (key.toLowerCase() === name) {
- return key
- }
- }
- return undefined
-}
-
-const MAP = Symbol('map')
-class Headers {
- constructor (init = undefined) {
- this[MAP] = Object.create(null)
- if (init instanceof Headers) {
- const rawHeaders = init.raw()
- const headerNames = Object.keys(rawHeaders)
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value)
- }
- }
- return
- }
-
- // no-op
- if (init === undefined || init === null) {
- return
- }
-
- if (typeof init === 'object') {
- const method = init[Symbol.iterator]
- if (method !== null && method !== undefined) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable')
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = []
- for (const pair of init) {
- if (typeof pair !== 'object' ||
- typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable')
- }
- const arrPair = Array.from(pair)
- if (arrPair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple')
- }
- pairs.push(arrPair)
- }
-
- for (const pair of pairs) {
- this.append(pair[0], pair[1])
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- this.append(key, init[key])
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object')
- }
- }
-
- get (name) {
- name = `${name}`
- validateName(name)
- const key = find(this[MAP], name)
- if (key === undefined) {
- return null
- }
-
- return this[MAP][key].join(', ')
- }
-
- forEach (callback, thisArg = undefined) {
- let pairs = getHeaders(this)
- for (let i = 0; i < pairs.length; i++) {
- const [name, value] = pairs[i]
- callback.call(thisArg, value, name, this)
- // refresh in case the callback added more headers
- pairs = getHeaders(this)
- }
- }
-
- set (name, value) {
- name = `${name}`
- value = `${value}`
- validateName(name)
- validateValue(value)
- const key = find(this[MAP], name)
- this[MAP][key !== undefined ? key : name] = [value]
- }
-
- append (name, value) {
- name = `${name}`
- value = `${value}`
- validateName(name)
- validateValue(value)
- const key = find(this[MAP], name)
- if (key !== undefined) {
- this[MAP][key].push(value)
- } else {
- this[MAP][name] = [value]
- }
- }
-
- has (name) {
- name = `${name}`
- validateName(name)
- return find(this[MAP], name) !== undefined
- }
-
- delete (name) {
- name = `${name}`
- validateName(name)
- const key = find(this[MAP], name)
- if (key !== undefined) {
- delete this[MAP][key]
- }
- }
-
- raw () {
- return this[MAP]
- }
-
- keys () {
- return new HeadersIterator(this, 'key')
- }
-
- values () {
- return new HeadersIterator(this, 'value')
- }
-
- [Symbol.iterator] () {
- return new HeadersIterator(this, 'key+value')
- }
-
- entries () {
- return new HeadersIterator(this, 'key+value')
- }
-
- get [Symbol.toStringTag] () {
- return 'Headers'
- }
-
- static exportNodeCompatibleHeaders (headers) {
- const obj = Object.assign(Object.create(null), headers[MAP])
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host')
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0]
- }
-
- return obj
- }
-
- static createHeadersLenient (obj) {
- const headers = new Headers()
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue
- }
-
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue
- }
-
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val]
- } else {
- headers[MAP][name].push(val)
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]]
- }
- }
- return headers
- }
-}
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true },
-})
-
-const getHeaders = (headers, kind = 'key+value') =>
- Object.keys(headers[MAP]).sort().map(
- kind === 'key' ? k => k.toLowerCase()
- : kind === 'value' ? k => headers[MAP][k].join(', ')
- : k => [k.toLowerCase(), headers[MAP][k].join(', ')]
- )
-
-const INTERNAL = Symbol('internal')
-
-class HeadersIterator {
- constructor (target, kind) {
- this[INTERNAL] = {
- target,
- kind,
- index: 0,
- }
- }
-
- get [Symbol.toStringTag] () {
- return 'HeadersIterator'
- }
-
- next () {
- /* istanbul ignore if: should be impossible */
- if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator')
- }
-
- const { target, kind, index } = this[INTERNAL]
- const values = getHeaders(target, kind)
- const len = values.length
- if (index >= len) {
- return {
- value: undefined,
- done: true,
- }
- }
-
- this[INTERNAL].index++
-
- return { value: values[index], done: false }
- }
-}
-
-// manually extend because 'extends' requires a ctor
-Object.setPrototypeOf(HeadersIterator.prototype,
- Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())))
-
-module.exports = Headers
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js
deleted file mode 100644
index da402161670e65..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js
+++ /dev/null
@@ -1,377 +0,0 @@
-'use strict'
-const { URL } = require('url')
-const http = require('http')
-const https = require('https')
-const zlib = require('minizlib')
-const { Minipass } = require('minipass')
-
-const Body = require('./body.js')
-const { writeToStream, getTotalBytes } = Body
-const Response = require('./response.js')
-const Headers = require('./headers.js')
-const { createHeadersLenient } = Headers
-const Request = require('./request.js')
-const { getNodeRequestOptions } = Request
-const FetchError = require('./fetch-error.js')
-const AbortError = require('./abort-error.js')
-
-// XXX this should really be split up and unit-ized for easier testing
-// and better DRY implementation of data/http request aborting
-const fetch = async (url, opts) => {
- if (/^data:/.test(url)) {
- const request = new Request(url, opts)
- // delay 1 promise tick so that the consumer can abort right away
- return Promise.resolve().then(() => new Promise((resolve, reject) => {
- let type, data
- try {
- const { pathname, search } = new URL(url)
- const split = pathname.split(',')
- if (split.length < 2) {
- throw new Error('invalid data: URI')
- }
- const mime = split.shift()
- const base64 = /;base64$/.test(mime)
- type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime
- const rawData = decodeURIComponent(split.join(',') + search)
- data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData)
- } catch (er) {
- return reject(new FetchError(`[${request.method}] ${
- request.url} invalid URL, ${er.message}`, 'system', er))
- }
-
- const { signal } = request
- if (signal && signal.aborted) {
- return reject(new AbortError('The user aborted a request.'))
- }
-
- const headers = { 'Content-Length': data.length }
- if (type) {
- headers['Content-Type'] = type
- }
- return resolve(new Response(data, { headers }))
- }))
- }
-
- return new Promise((resolve, reject) => {
- // build request object
- const request = new Request(url, opts)
- let options
- try {
- options = getNodeRequestOptions(request)
- } catch (er) {
- return reject(er)
- }
-
- const send = (options.protocol === 'https:' ? https : http).request
- const { signal } = request
- let response = null
- const abort = () => {
- const error = new AbortError('The user aborted a request.')
- reject(error)
- if (Minipass.isStream(request.body) &&
- typeof request.body.destroy === 'function') {
- request.body.destroy(error)
- }
- if (response && response.body) {
- response.body.emit('error', error)
- }
- }
-
- if (signal && signal.aborted) {
- return abort()
- }
-
- const abortAndFinalize = () => {
- abort()
- finalize()
- }
-
- const finalize = () => {
- req.abort()
- if (signal) {
- signal.removeEventListener('abort', abortAndFinalize)
- }
- clearTimeout(reqTimeout)
- }
-
- // send request
- const req = send(options)
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize)
- }
-
- let reqTimeout = null
- if (request.timeout) {
- req.once('socket', () => {
- reqTimeout = setTimeout(() => {
- reject(new FetchError(`network timeout at: ${
- request.url}`, 'request-timeout'))
- finalize()
- }, request.timeout)
- })
- }
-
- req.on('error', er => {
- // if a 'response' event is emitted before the 'error' event, then by the
- // time this handler is run it's too late to reject the Promise for the
- // response. instead, we forward the error event to the response stream
- // so that the error will surface to the user when they try to consume
- // the body. this is done as a side effect of aborting the request except
- // for in windows, where we must forward the event manually, otherwise
- // there is no longer a ref'd socket attached to the request and the
- // stream never ends so the event loop runs out of work and the process
- // exits without warning.
- // coverage skipped here due to the difficulty in testing
- // istanbul ignore next
- if (req.res) {
- req.res.emit('error', er)
- }
- reject(new FetchError(`request to ${request.url} failed, reason: ${
- er.message}`, 'system', er))
- finalize()
- })
-
- req.on('response', res => {
- clearTimeout(reqTimeout)
-
- const headers = createHeadersLenient(res.headers)
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location')
-
- // HTTP fetch step 5.3
- let locationURL = null
- try {
- locationURL = location === null ? null : new URL(location, request.url).toString()
- } catch {
- // error here can only be invalid URL in Location: header
- // do not throw when options.redirect == manual
- // let the user extract the errorneous redirect URL
- if (request.redirect !== 'manual') {
- /* eslint-disable-next-line max-len */
- reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'))
- finalize()
- return
- }
- }
-
- // HTTP fetch step 5.5
- if (request.redirect === 'error') {
- reject(new FetchError('uri requested responds with a redirect, ' +
- `redirect mode is set to error: ${request.url}`, 'no-redirect'))
- finalize()
- return
- } else if (request.redirect === 'manual') {
- // node-fetch-specific step: make manual redirect a bit easier to
- // use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL)
- } catch (err) {
- /* istanbul ignore next: nodejs server prevent invalid
- response headers, we can't test this through normal
- request */
- reject(err)
- }
- }
- } else if (request.redirect === 'follow' && locationURL !== null) {
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${
- request.url}`, 'max-redirect'))
- finalize()
- return
- }
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 &&
- request.body &&
- getTotalBytes(request) === null) {
- reject(new FetchError(
- 'Cannot follow redirect with body being a readable stream',
- 'unsupported-redirect'
- ))
- finalize()
- return
- }
-
- // Update host due to redirection
- request.headers.set('host', (new URL(locationURL)).host)
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout,
- }
-
- // if the redirect is to a new hostname, strip the authorization and cookie headers
- const parsedOriginal = new URL(request.url)
- const parsedRedirect = new URL(locationURL)
- if (parsedOriginal.hostname !== parsedRedirect.hostname) {
- requestOpts.headers.delete('authorization')
- requestOpts.headers.delete('cookie')
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (
- (res.statusCode === 301 || res.statusCode === 302) &&
- request.method === 'POST'
- )) {
- requestOpts.method = 'GET'
- requestOpts.body = undefined
- requestOpts.headers.delete('content-length')
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)))
- finalize()
- return
- }
- } // end if(isRedirect)
-
- // prepare response
- res.once('end', () =>
- signal && signal.removeEventListener('abort', abortAndFinalize))
-
- const body = new Minipass()
- // if an error occurs, either on the response stream itself, on one of the
- // decoder streams, or a response length timeout from the Body class, we
- // forward the error through to our internal body stream. If we see an
- // error event on that, we call finalize to abort the request and ensure
- // we don't leave a socket believing a request is in flight.
- // this is difficult to test, so lacks specific coverage.
- body.on('error', finalize)
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- res.on('error', /* istanbul ignore next */ er => body.emit('error', er))
- res.on('data', (chunk) => body.write(chunk))
- res.on('end', () => body.end())
-
- const responseOptions = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter,
- trailer: new Promise(resolveTrailer =>
- res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))),
- }
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding')
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress ||
- request.method === 'HEAD' ||
- codings === null ||
- res.statusCode === 204 ||
- res.statusCode === 304) {
- response = new Response(body, responseOptions)
- resolve(response)
- return
- }
-
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.constants.Z_SYNC_FLUSH,
- finishFlush: zlib.constants.Z_SYNC_FLUSH,
- }
-
- // for gzip
- if (codings === 'gzip' || codings === 'x-gzip') {
- const unzip = new zlib.Gunzip(zlibOptions)
- response = new Response(
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip),
- responseOptions
- )
- resolve(response)
- return
- }
-
- // for deflate
- if (codings === 'deflate' || codings === 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new Minipass())
- raw.once('data', chunk => {
- // see http://stackoverflow.com/questions/37519828
- const decoder = (chunk[0] & 0x0F) === 0x08
- ? new zlib.Inflate()
- : new zlib.InflateRaw()
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
- response = new Response(decoder, responseOptions)
- resolve(response)
- })
- return
- }
-
- // for br
- if (codings === 'br') {
- // ignoring coverage so tests don't have to fake support (or lack of) for brotli
- // istanbul ignore next
- try {
- var decoder = new zlib.BrotliDecompress()
- } catch (err) {
- reject(err)
- finalize()
- return
- }
- // exceedingly rare that the stream would have an error,
- // but just in case we proxy it to the stream in use.
- body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
- response = new Response(decoder, responseOptions)
- resolve(response)
- return
- }
-
- // otherwise, use response as-is
- response = new Response(body, responseOptions)
- resolve(response)
- })
-
- writeToStream(req, request)
- })
-}
-
-module.exports = fetch
-
-fetch.isRedirect = code =>
- code === 301 ||
- code === 302 ||
- code === 303 ||
- code === 307 ||
- code === 308
-
-fetch.Headers = Headers
-fetch.Request = Request
-fetch.Response = Response
-fetch.FetchError = FetchError
-fetch.AbortError = AbortError
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js
deleted file mode 100644
index 054439e6699107..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js
+++ /dev/null
@@ -1,282 +0,0 @@
-'use strict'
-const { URL } = require('url')
-const { Minipass } = require('minipass')
-const Headers = require('./headers.js')
-const { exportNodeCompatibleHeaders } = Headers
-const Body = require('./body.js')
-const { clone, extractContentType, getTotalBytes } = Body
-
-const version = require('../package.json').version
-const defaultUserAgent =
- `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)`
-
-const INTERNALS = Symbol('Request internals')
-
-const isRequest = input =>
- typeof input === 'object' && typeof input[INTERNALS] === 'object'
-
-const isAbortSignal = signal => {
- const proto = (
- signal
- && typeof signal === 'object'
- && Object.getPrototypeOf(signal)
- )
- return !!(proto && proto.constructor.name === 'AbortSignal')
-}
-
-class Request extends Body {
- constructor (input, init = {}) {
- const parsedURL = isRequest(input) ? new URL(input.url)
- : input && input.href ? new URL(input.href)
- : new URL(`${input}`)
-
- if (isRequest(input)) {
- init = { ...input[INTERNALS], ...init }
- } else if (!input || typeof input === 'string') {
- input = {}
- }
-
- const method = (init.method || input.method || 'GET').toUpperCase()
- const isGETHEAD = method === 'GET' || method === 'HEAD'
-
- if ((init.body !== null && init.body !== undefined ||
- isRequest(input) && input.body !== null) && isGETHEAD) {
- throw new TypeError('Request with GET/HEAD method cannot have body')
- }
-
- const inputBody = init.body !== null && init.body !== undefined ? init.body
- : isRequest(input) && input.body !== null ? clone(input)
- : null
-
- super(inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0,
- })
-
- const headers = new Headers(init.headers || input.headers || {})
-
- if (inputBody !== null && inputBody !== undefined &&
- !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody)
- if (contentType) {
- headers.append('Content-Type', contentType)
- }
- }
-
- const signal = 'signal' in init ? init.signal
- : null
-
- if (signal !== null && signal !== undefined && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal must be an instanceof AbortSignal')
- }
-
- // TLS specific options that are handled by node
- const {
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0',
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- } = init
-
- this[INTERNALS] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal,
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- }
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow
- : input.follow !== undefined ? input.follow
- : 20
- this.compress = init.compress !== undefined ? init.compress
- : input.compress !== undefined ? input.compress
- : true
- this.counter = init.counter || input.counter || 0
- this.agent = init.agent || input.agent
- }
-
- get method () {
- return this[INTERNALS].method
- }
-
- get url () {
- return this[INTERNALS].parsedURL.toString()
- }
-
- get headers () {
- return this[INTERNALS].headers
- }
-
- get redirect () {
- return this[INTERNALS].redirect
- }
-
- get signal () {
- return this[INTERNALS].signal
- }
-
- clone () {
- return new Request(this)
- }
-
- get [Symbol.toStringTag] () {
- return 'Request'
- }
-
- static getNodeRequestOptions (request) {
- const parsedURL = request[INTERNALS].parsedURL
- const headers = new Headers(request[INTERNALS].headers)
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*')
- }
-
- // Basic fetch
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported')
- }
-
- if (request.signal &&
- Minipass.isStream(request.body) &&
- typeof request.body.destroy !== 'function') {
- throw new Error(
- 'Cancellation of streamed requests with AbortSignal is not supported')
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- const contentLengthValue =
- (request.body === null || request.body === undefined) &&
- /^(POST|PUT)$/i.test(request.method) ? '0'
- : request.body !== null && request.body !== undefined
- ? getTotalBytes(request)
- : null
-
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue + '')
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', defaultUserAgent)
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate')
- }
-
- const agent = typeof request.agent === 'function'
- ? request.agent(parsedURL)
- : request.agent
-
- if (!headers.has('Connection') && !agent) {
- headers.set('Connection', 'close')
- }
-
- // TLS specific options that are handled by node
- const {
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- } = request[INTERNALS]
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- // we cannot spread parsedURL directly, so we have to read each property one-by-one
- // and map them to the equivalent https?.request() method options
- const urlProps = {
- auth: parsedURL.username || parsedURL.password
- ? `${parsedURL.username}:${parsedURL.password}`
- : '',
- host: parsedURL.host,
- hostname: parsedURL.hostname,
- path: `${parsedURL.pathname}${parsedURL.search}`,
- port: parsedURL.port,
- protocol: parsedURL.protocol,
- }
-
- return {
- ...urlProps,
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent,
- ca,
- cert,
- ciphers,
- clientCertEngine,
- crl,
- dhparam,
- ecdhCurve,
- family,
- honorCipherOrder,
- key,
- passphrase,
- pfx,
- rejectUnauthorized,
- secureOptions,
- secureProtocol,
- servername,
- sessionIdContext,
- timeout: request.timeout,
- }
- }
-}
-
-module.exports = Request
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true },
-})
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js
deleted file mode 100644
index 54cb52db3594a7..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js
+++ /dev/null
@@ -1,90 +0,0 @@
-'use strict'
-const http = require('http')
-const { STATUS_CODES } = http
-
-const Headers = require('./headers.js')
-const Body = require('./body.js')
-const { clone, extractContentType } = Body
-
-const INTERNALS = Symbol('Response internals')
-
-class Response extends Body {
- constructor (body = null, opts = {}) {
- super(body, opts)
-
- const status = opts.status || 200
- const headers = new Headers(opts.headers)
-
- if (body !== null && body !== undefined && !headers.has('Content-Type')) {
- const contentType = extractContentType(body)
- if (contentType) {
- headers.append('Content-Type', contentType)
- }
- }
-
- this[INTERNALS] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter,
- trailer: Promise.resolve(opts.trailer || new Headers()),
- }
- }
-
- get trailer () {
- return this[INTERNALS].trailer
- }
-
- get url () {
- return this[INTERNALS].url || ''
- }
-
- get status () {
- return this[INTERNALS].status
- }
-
- get ok () {
- return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300
- }
-
- get redirected () {
- return this[INTERNALS].counter > 0
- }
-
- get statusText () {
- return this[INTERNALS].statusText
- }
-
- get headers () {
- return this[INTERNALS].headers
- }
-
- clone () {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected,
- trailer: this.trailer,
- })
- }
-
- get [Symbol.toStringTag] () {
- return 'Response'
- }
-}
-
-module.exports = Response
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true },
-})
diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json
deleted file mode 100644
index d491a7fba126d0..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "name": "minipass-fetch",
- "version": "3.0.5",
- "description": "An implementation of window.fetch in Node.js using Minipass streams",
- "license": "MIT",
- "main": "lib/index.js",
- "scripts": {
- "test:tls-fixtures": "./test/fixtures/tls/setup.sh",
- "test": "tap",
- "snap": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "lintfix": "npm run lint -- --fix",
- "posttest": "npm run lint",
- "template-oss-apply": "template-oss-apply --force"
- },
- "tap": {
- "coverage-map": "map.js",
- "check-coverage": true,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "@ungap/url-search-params": "^0.2.2",
- "abort-controller": "^3.0.0",
- "abortcontroller-polyfill": "~1.7.3",
- "encoding": "^0.1.13",
- "form-data": "^4.0.0",
- "nock": "^13.2.4",
- "parted": "^0.1.1",
- "string-to-arraybuffer": "^1.0.2",
- "tap": "^16.0.0"
- },
- "dependencies": {
- "minipass": "^7.0.3",
- "minipass-sized": "^1.0.3",
- "minizlib": "^2.1.2"
- },
- "optionalDependencies": {
- "encoding": "^0.1.13"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/minipass-fetch.git"
- },
- "keywords": [
- "fetch",
- "minipass",
- "node-fetch",
- "window.fetch"
- ],
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "author": "GitHub Inc.",
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js
deleted file mode 100644
index 86d90861078dab..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js
+++ /dev/null
@@ -1,153 +0,0 @@
-const META = Symbol('proc-log.meta')
-module.exports = {
- META: META,
- output: {
- LEVELS: [
- 'standard',
- 'error',
- 'buffer',
- 'flush',
- ],
- KEYS: {
- standard: 'standard',
- error: 'error',
- buffer: 'buffer',
- flush: 'flush',
- },
- standard: function (...args) {
- return process.emit('output', 'standard', ...args)
- },
- error: function (...args) {
- return process.emit('output', 'error', ...args)
- },
- buffer: function (...args) {
- return process.emit('output', 'buffer', ...args)
- },
- flush: function (...args) {
- return process.emit('output', 'flush', ...args)
- },
- },
- log: {
- LEVELS: [
- 'notice',
- 'error',
- 'warn',
- 'info',
- 'verbose',
- 'http',
- 'silly',
- 'timing',
- 'pause',
- 'resume',
- ],
- KEYS: {
- notice: 'notice',
- error: 'error',
- warn: 'warn',
- info: 'info',
- verbose: 'verbose',
- http: 'http',
- silly: 'silly',
- timing: 'timing',
- pause: 'pause',
- resume: 'resume',
- },
- error: function (...args) {
- return process.emit('log', 'error', ...args)
- },
- notice: function (...args) {
- return process.emit('log', 'notice', ...args)
- },
- warn: function (...args) {
- return process.emit('log', 'warn', ...args)
- },
- info: function (...args) {
- return process.emit('log', 'info', ...args)
- },
- verbose: function (...args) {
- return process.emit('log', 'verbose', ...args)
- },
- http: function (...args) {
- return process.emit('log', 'http', ...args)
- },
- silly: function (...args) {
- return process.emit('log', 'silly', ...args)
- },
- timing: function (...args) {
- return process.emit('log', 'timing', ...args)
- },
- pause: function () {
- return process.emit('log', 'pause')
- },
- resume: function () {
- return process.emit('log', 'resume')
- },
- },
- time: {
- LEVELS: [
- 'start',
- 'end',
- ],
- KEYS: {
- start: 'start',
- end: 'end',
- },
- start: function (name, fn) {
- process.emit('time', 'start', name)
- function end () {
- return process.emit('time', 'end', name)
- }
- if (typeof fn === 'function') {
- const res = fn()
- if (res && res.finally) {
- return res.finally(end)
- }
- end()
- return res
- }
- return end
- },
- end: function (name) {
- return process.emit('time', 'end', name)
- },
- },
- input: {
- LEVELS: [
- 'start',
- 'end',
- 'read',
- ],
- KEYS: {
- start: 'start',
- end: 'end',
- read: 'read',
- },
- start: function (fn) {
- process.emit('input', 'start')
- function end () {
- return process.emit('input', 'end')
- }
- if (typeof fn === 'function') {
- const res = fn()
- if (res && res.finally) {
- return res.finally(end)
- }
- end()
- return res
- }
- return end
- },
- end: function () {
- return process.emit('input', 'end')
- },
- read: function (...args) {
- let resolve, reject
- const promise = new Promise((_resolve, _reject) => {
- resolve = _resolve
- reject = _reject
- })
- process.emit('input', 'read', resolve, reject, ...args)
- return promise
- },
- },
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json b/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json
deleted file mode 100644
index 4ab89102ecc9b5..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "name": "proc-log",
- "version": "4.2.0",
- "files": [
- "bin/",
- "lib/"
- ],
- "main": "lib/index.js",
- "description": "just emit 'log' events on the process object",
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/proc-log.git"
- },
- "author": "GitHub Inc.",
- "license": "ISC",
- "scripts": {
- "test": "tap",
- "snap": "tap",
- "posttest": "npm run lint",
- "postsnap": "eslint index.js test/*.js --fix",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "lintfix": "npm run lint -- --fix",
- "template-oss-apply": "template-oss-apply --force"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.21.3",
- "tap": "^16.0.1"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.21.3",
- "publish": true
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md
deleted file mode 100644
index e335388869f50f..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2021 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js
deleted file mode 100644
index 7d749ed480fb98..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js
+++ /dev/null
@@ -1,580 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const { Minipass } = require('minipass')
-
-const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256']
-const DEFAULT_ALGORITHMS = ['sha512']
-
-// TODO: this should really be a hardcoded list of algorithms we support,
-// rather than [a-z0-9].
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const getOptString = options => options?.length ? `?${options.join('?')}` : ''
-
-class IntegrityStream extends Minipass {
- #emittedIntegrity
- #emittedSize
- #emittedVerified
-
- constructor (opts) {
- super()
- this.size = 0
- this.opts = opts
-
- // may be overridden later, but set now for class consistency
- this.#getOptions()
-
- // options used for calculating stream. can't be changed.
- if (opts?.algorithms) {
- this.algorithms = [...opts.algorithms]
- } else {
- this.algorithms = [...DEFAULT_ALGORITHMS]
- }
- if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) {
- this.algorithms.push(this.algorithm)
- }
-
- this.hashes = this.algorithms.map(crypto.createHash)
- }
-
- #getOptions () {
- // For verification
- this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null
- this.expectedSize = this.opts?.size
-
- if (!this.sri) {
- this.algorithm = null
- } else if (this.sri.isHash) {
- this.goodSri = true
- this.algorithm = this.sri.algorithm
- } else {
- this.goodSri = !this.sri.isEmpty()
- this.algorithm = this.sri.pickAlgorithm(this.opts)
- }
-
- this.digests = this.goodSri ? this.sri[this.algorithm] : null
- this.optString = getOptString(this.opts?.options)
- }
-
- on (ev, handler) {
- if (ev === 'size' && this.#emittedSize) {
- return handler(this.#emittedSize)
- }
-
- if (ev === 'integrity' && this.#emittedIntegrity) {
- return handler(this.#emittedIntegrity)
- }
-
- if (ev === 'verified' && this.#emittedVerified) {
- return handler(this.#emittedVerified)
- }
-
- return super.on(ev, handler)
- }
-
- emit (ev, data) {
- if (ev === 'end') {
- this.#onEnd()
- }
- return super.emit(ev, data)
- }
-
- write (data) {
- this.size += data.length
- this.hashes.forEach(h => h.update(data))
- return super.write(data)
- }
-
- #onEnd () {
- if (!this.goodSri) {
- this.#getOptions()
- }
- const newSri = parse(this.hashes.map((h, i) => {
- return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}`
- }).join(' '), this.opts)
- // Integrity verification mode
- const match = this.goodSri && newSri.match(this.sri, this.opts)
- if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`)
- err.code = 'EBADSIZE'
- err.found = this.size
- err.expected = this.expectedSize
- err.sri = this.sri
- this.emit('error', err)
- } else if (this.sri && !match) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = this.digests
- err.algorithm = this.algorithm
- err.sri = this.sri
- this.emit('error', err)
- } else {
- this.#emittedSize = this.size
- this.emit('size', this.size)
- this.#emittedIntegrity = newSri
- this.emit('integrity', newSri)
- if (match) {
- this.#emittedVerified = match
- this.emit('verified', match)
- }
- }
- }
-}
-
-class Hash {
- get isHash () {
- return true
- }
-
- constructor (hash, opts) {
- const strict = opts?.strict
- this.source = hash.trim()
-
- // set default values so that we make V8 happy to
- // always see a familiar object template.
- this.digest = ''
- this.algorithm = ''
- this.options = []
-
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) {
- return
- }
- if (strict && !SPEC_ALGORITHMS.includes(match[1])) {
- return
- }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- if (rawOpts) {
- this.options = rawOpts.slice(1).split('?')
- }
- }
-
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
-
- toJSON () {
- return this.toString()
- }
-
- match (integrity, opts) {
- const other = parse(integrity, opts)
- if (!other) {
- return false
- }
- if (other.isIntegrity) {
- const algo = other.pickAlgorithm(opts, [this.algorithm])
-
- if (!algo) {
- return false
- }
-
- const foundHash = other[algo].find(hash => hash.digest === this.digest)
-
- if (foundHash) {
- return foundHash
- }
-
- return false
- }
- return other.digest === this.digest ? other : false
- }
-
- toString (opts) {
- if (opts?.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.includes(this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- this.options.every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- return `${this.algorithm}-${this.digest}${getOptString(this.options)}`
- }
-}
-
-function integrityHashToString (toString, sep, opts, hashes) {
- const toStringIsNotEmpty = toString !== ''
-
- let shouldAddFirstSep = false
- let complement = ''
-
- const lastIndex = hashes.length - 1
-
- for (let i = 0; i < lastIndex; i++) {
- const hashString = Hash.prototype.toString.call(hashes[i], opts)
-
- if (hashString) {
- shouldAddFirstSep = true
-
- complement += hashString
- complement += sep
- }
- }
-
- const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts)
-
- if (finalHashString) {
- shouldAddFirstSep = true
- complement += finalHashString
- }
-
- if (toStringIsNotEmpty && shouldAddFirstSep) {
- return toString + sep + complement
- }
-
- return toString + complement
-}
-
-class Integrity {
- get isIntegrity () {
- return true
- }
-
- toJSON () {
- return this.toString()
- }
-
- isEmpty () {
- return Object.keys(this).length === 0
- }
-
- toString (opts) {
- let sep = opts?.sep || ' '
- let toString = ''
-
- if (opts?.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
-
- for (const hash of SPEC_ALGORITHMS) {
- if (this[hash]) {
- toString = integrityHashToString(toString, sep, opts, this[hash])
- }
- }
- } else {
- for (const hash of Object.keys(this)) {
- toString = integrityHashToString(toString, sep, opts, this[hash])
- }
- }
-
- return toString
- }
-
- concat (integrity, opts) {
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
-
- hexDigest () {
- return parse(this, { single: true }).hexDigest()
- }
-
- // add additional hashes to an integrity value, but prevent
- // *changing* an existing integrity hash.
- merge (integrity, opts) {
- const other = parse(integrity, opts)
- for (const algo in other) {
- if (this[algo]) {
- if (!this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest))) {
- throw new Error('hashes do not match, cannot update integrity')
- }
- } else {
- this[algo] = other[algo]
- }
- }
- }
-
- match (integrity, opts) {
- const other = parse(integrity, opts)
- if (!other) {
- return false
- }
- const algo = other.pickAlgorithm(opts, Object.keys(this))
- return (
- !!algo &&
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
-
- // Pick the highest priority algorithm present, optionally also limited to a
- // set of hashes found in another integrity. When limiting it may return
- // nothing.
- pickAlgorithm (opts, hashes) {
- const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash
- const keys = Object.keys(this).filter(k => {
- if (hashes?.length) {
- return hashes.includes(k)
- }
- return true
- })
- if (keys.length) {
- return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc)
- }
- // no intersection between this and hashes,
- return null
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- if (!sri) {
- return null
- }
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts?.single) {
- return new Hash(integrity, opts)
- }
- const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) {
- acc[algo] = []
- }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
- return hashes.isEmpty() ? null : hashes
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- const optString = getOptString(opts?.options)
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
- const optString = getOptString(opts?.options)
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- /* istanbul ignore else - it would be VERY strange if the string we
- * just calculated with an algo did not have an algo or digest.
- */
- if (hash.algorithm && hash.digest) {
- const hashAlgo = hash.algorithm
- if (!acc[hashAlgo]) {
- acc[hashAlgo] = []
- }
- acc[hashAlgo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- const istream = integrityStream(opts)
- return new Promise((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => {
- sri = s
- })
- istream.on('end', () => resolve(sri))
- istream.resume()
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- sri = parse(sri, opts)
- if (!sri || !Object.keys(sri).length) {
- if (opts?.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY',
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({ algorithm, digest })
- const match = newSri.match(sri, opts)
- opts = opts || {}
- if (match || !(opts.error)) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- /* eslint-disable-next-line max-len */
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- /* eslint-disable-next-line max-len */
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = opts || Object.create(null)
- opts.integrity = sri
- sri = parse(sri, opts)
- if (!sri || !Object.keys(sri).length) {
- return Promise.reject(Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY',
- }
- ))
- }
- const checker = integrityStream(opts)
- return new Promise((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let verified
- checker.on('verified', s => {
- verified = s
- })
- checker.on('end', () => resolve(verified))
- checker.resume()
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts = Object.create(null)) {
- return new IntegrityStream(opts)
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
- const optString = getOptString(opts?.options)
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function () {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- /* istanbul ignore else - it would be VERY strange if the hash we
- * just calculated with an algo did not have an algo or digest.
- */
- if (hash.algorithm && hash.digest) {
- const hashAlgo = hash.algorithm
- if (!acc[hashAlgo]) {
- acc[hashAlgo] = []
- }
- acc[hashAlgo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- },
- }
-}
-
-const NODE_HASHES = crypto.getHashes()
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512',
-].filter(algo => NODE_HASHES.includes(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- /* eslint-disable-next-line max-len */
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json b/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json
deleted file mode 100644
index 28395414e4643c..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "name": "ssri",
- "version": "10.0.6",
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "scripts": {
- "prerelease": "npm t",
- "postrelease": "npm publish",
- "posttest": "npm run lint",
- "test": "tap",
- "coverage": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap"
- },
- "tap": {
- "check-coverage": true,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/ssri.git"
- },
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "author": "GitHub Inc.",
- "license": "ISC",
- "dependencies": {
- "minipass": "^7.0.3"
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
- "tap": "^16.0.1"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0",
- "publish": "true"
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE
deleted file mode 100644
index 69619c125ea7ef..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE
+++ /dev/null
@@ -1,5 +0,0 @@
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js
deleted file mode 100644
index d067d2e709809a..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-var path = require('path')
-
-var uniqueSlug = require('unique-slug')
-
-module.exports = function (filepath, prefix, uniq) {
- return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json
deleted file mode 100644
index b2fbf0666489a6..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "name": "unique-filename",
- "version": "3.0.0",
- "description": "Generate a unique filename for use in temporary directories or caches.",
- "main": "lib/index.js",
- "scripts": {
- "test": "tap",
- "lint": "eslint \"**/*.js\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/unique-filename.git"
- },
- "keywords": [],
- "author": "GitHub Inc.",
- "license": "ISC",
- "bugs": {
- "url": "https://github.com/iarna/unique-filename/issues"
- },
- "homepage": "https://github.com/iarna/unique-filename",
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.5.1",
- "tap": "^16.3.0"
- },
- "dependencies": {
- "unique-slug": "^4.0.0"
- },
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE
deleted file mode 100644
index 7953647e7760b8..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js
deleted file mode 100644
index 1bac84d95d7307..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-var MurmurHash3 = require('imurmurhash')
-
-module.exports = function (uniq) {
- if (uniq) {
- var hash = new MurmurHash3(uniq)
- return ('00000000' + hash.result().toString(16)).slice(-8)
- } else {
- return (Math.random().toString(16) + '0000000').slice(2, 10)
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json
deleted file mode 100644
index 33732cdbb42859..00000000000000
--- a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "name": "unique-slug",
- "version": "4.0.0",
- "description": "Generate a unique character string suitible for use in files and URLs.",
- "main": "lib/index.js",
- "scripts": {
- "test": "tap",
- "lint": "eslint \"**/*.js\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "keywords": [],
- "author": "GitHub Inc.",
- "license": "ISC",
- "devDependencies": {
- "@npmcli/eslint-config": "^3.1.0",
- "@npmcli/template-oss": "4.5.1",
- "tap": "^16.3.0"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/unique-slug.git"
- },
- "dependencies": {
- "imurmurhash": "^0.1.4"
- },
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- }
-}
diff --git a/deps/npm/node_modules/tuf-js/package.json b/deps/npm/node_modules/tuf-js/package.json
index 9280719230d9ab..e79a3d45f3f06a 100644
--- a/deps/npm/node_modules/tuf-js/package.json
+++ b/deps/npm/node_modules/tuf-js/package.json
@@ -1,12 +1,12 @@
{
"name": "tuf-js",
- "version": "2.2.1",
+ "version": "3.0.1",
"description": "JavaScript implementation of The Update Framework (TUF)",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc --build",
- "clean": "rm -rf dist",
+ "clean": "rm -rf dist && rm tsconfig.tsbuildinfo",
"test": "jest"
},
"repository": {
@@ -28,16 +28,16 @@
},
"homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
"devDependencies": {
- "@tufjs/repo-mock": "2.0.1",
+ "@tufjs/repo-mock": "3.0.1",
"@types/debug": "^4.1.12",
"@types/make-fetch-happen": "^10.0.4"
},
"dependencies": {
- "@tufjs/models": "2.0.1",
- "debug": "^4.3.4",
- "make-fetch-happen": "^13.0.1"
+ "@tufjs/models": "3.0.1",
+ "debug": "^4.3.6",
+ "make-fetch-happen": "^14.0.1"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/index.js b/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
index 130a0929b8ce8c..ddfdba39a783a4 100644
--- a/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
+++ b/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
@@ -1,7 +1,9 @@
export default function ansiRegex({onlyFirst = false} = {}) {
+ // Valid string terminator sequences are BEL, ESC\, and 0x9c
+ const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)';
const pattern = [
- '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
- '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
+ `[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
+ '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))',
].join('|');
return new RegExp(pattern, onlyFirst ? undefined : 'g');
diff --git a/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/package.json b/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
index 7bbb563bf2a70a..49f3f61021512b 100644
--- a/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
+++ b/deps/npm/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
@@ -1,6 +1,6 @@
{
"name": "ansi-regex",
- "version": "6.0.1",
+ "version": "6.1.0",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": "chalk/ansi-regex",
@@ -12,6 +12,8 @@
},
"type": "module",
"exports": "./index.js",
+ "types": "./index.d.ts",
+ "sideEffects": false,
"engines": {
"node": ">=12"
},
@@ -51,8 +53,9 @@
"pattern"
],
"devDependencies": {
+ "ansi-escapes": "^5.0.0",
"ava": "^3.15.0",
- "tsd": "^0.14.0",
- "xo": "^0.38.2"
+ "tsd": "^0.21.0",
+ "xo": "^0.54.2"
}
}
diff --git a/deps/npm/package.json b/deps/npm/package.json
index c92578506b30ac..2f5676d81bde93 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "10.9.0",
+ "version": "10.9.1",
"name": "npm",
"description": "a package manager for JavaScript",
"workspaces": [
@@ -57,21 +57,21 @@
"@npmcli/fs": "^4.0.0",
"@npmcli/map-workspaces": "^4.0.1",
"@npmcli/package-json": "^6.0.1",
- "@npmcli/promise-spawn": "^8.0.1",
+ "@npmcli/promise-spawn": "^8.0.2",
"@npmcli/redact": "^3.0.0",
"@npmcli/run-script": "^9.0.1",
- "@sigstore/tuf": "^2.3.4",
+ "@sigstore/tuf": "^3.0.0",
"abbrev": "^3.0.0",
"archy": "~1.0.0",
"cacache": "^19.0.1",
"chalk": "^5.3.0",
- "ci-info": "^4.0.0",
+ "ci-info": "^4.1.0",
"cli-columns": "^4.0.0",
"fastest-levenshtein": "^1.0.16",
"fs-minipass": "^3.0.3",
"glob": "^10.4.5",
"graceful-fs": "^4.2.11",
- "hosted-git-info": "^8.0.0",
+ "hosted-git-info": "^8.0.2",
"ini": "^5.0.0",
"init-package-json": "^7.0.1",
"is-cidr": "^5.1.0",
@@ -83,11 +83,11 @@
"libnpmhook": "^11.0.0",
"libnpmorg": "^7.0.0",
"libnpmpack": "^8.0.0",
- "libnpmpublish": "^10.0.0",
+ "libnpmpublish": "^10.0.1",
"libnpmsearch": "^8.0.0",
"libnpmteam": "^7.0.0",
"libnpmversion": "^7.0.0",
- "make-fetch-happen": "^14.0.1",
+ "make-fetch-happen": "^14.0.3",
"minimatch": "^9.0.5",
"minipass": "^7.1.1",
"minipass-pipeline": "^1.2.4",
@@ -96,14 +96,14 @@
"nopt": "^8.0.0",
"normalize-package-data": "^7.0.0",
"npm-audit-report": "^6.0.0",
- "npm-install-checks": "^7.1.0",
+ "npm-install-checks": "^7.1.1",
"npm-package-arg": "^12.0.0",
"npm-pick-manifest": "^10.0.0",
"npm-profile": "^11.0.1",
- "npm-registry-fetch": "^18.0.1",
+ "npm-registry-fetch": "^18.0.2",
"npm-user-validate": "^3.0.0",
"p-map": "^4.0.0",
- "pacote": "^19.0.0",
+ "pacote": "^19.0.1",
"parse-conflict-json": "^4.0.0",
"proc-log": "^5.0.0",
"qrcode-terminal": "^0.12.0",