diff --git a/.changeset/bright-keys-march.md b/.changeset/bright-keys-march.md new file mode 100644 index 000000000000..e1ab85956cd3 --- /dev/null +++ b/.changeset/bright-keys-march.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +chore: removes --experimental-versions flag, as versions is now GA. diff --git a/.changeset/c3-frameworks-update-7617.md b/.changeset/c3-frameworks-update-7617.md deleted file mode 100644 index 4d886587610f..000000000000 --- a/.changeset/c3-frameworks-update-7617.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ----------- | ----- | ----- | -| create-vite | 6.0.1 | 6.1.0 | diff --git a/.changeset/c3-frameworks-update-7618.md b/.changeset/c3-frameworks-update-7618.md deleted file mode 100644 index d41821a15419..000000000000 --- a/.changeset/c3-frameworks-update-7618.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ----- | ----- | -| sv | 0.6.7 | 0.6.9 | diff --git a/.changeset/c3-frameworks-update-7619.md b/.changeset/c3-frameworks-update-7619.md deleted file mode 100644 index e102a9563173..000000000000 --- a/.changeset/c3-frameworks-update-7619.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ------ | ------ | -| gatsby | 5.14.0 | 5.14.1 | diff --git a/.changeset/c3-frameworks-update-7620.md b/.changeset/c3-frameworks-update-7620.md deleted file mode 100644 index e7b737e50a83..000000000000 --- a/.changeset/c3-frameworks-update-7620.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ------ | ------ | -| nuxi | 3.16.0 | 3.17.1 | diff --git a/.changeset/c3-frameworks-update-7621.md b/.changeset/c3-frameworks-update-7621.md deleted file mode 100644 index 267fd1e4fa94..000000000000 --- a/.changeset/c3-frameworks-update-7621.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| --------------- | ------ | ------ | -| @angular/create | 19.0.5 | 19.0.6 | diff --git a/.changeset/c3-frameworks-update-7622.md b/.changeset/c3-frameworks-update-7622.md deleted file mode 100644 index 5e98547324cd..000000000000 --- a/.changeset/c3-frameworks-update-7622.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ------------ | ------ | ------ | -| create-remix | 2.15.1 | 2.15.2 | diff --git a/.changeset/c3-frameworks-update-7623.md b/.changeset/c3-frameworks-update-7623.md deleted file mode 100644 index 76c430cd10d2..000000000000 --- a/.changeset/c3-frameworks-update-7623.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ----------- | ------ | ------ | -| create-qwik | 1.11.0 | 1.12.0 | diff --git a/.changeset/c3-frameworks-update-7624.md b/.changeset/c3-frameworks-update-7624.md deleted file mode 100644 index e3bfa3a8cac4..000000000000 --- a/.changeset/c3-frameworks-update-7624.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| --------------- | ------ | ------ | -| create-next-app | 15.1.0 | 15.1.2 | diff --git a/.changeset/c3-frameworks-update-7643.md b/.changeset/c3-frameworks-update-7643.md deleted file mode 100644 index d00ddb867f1f..000000000000 --- a/.changeset/c3-frameworks-update-7643.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ------ | ------ | -| create-vue | 3.12.2 | 3.13.0 | diff --git a/.changeset/c3-frameworks-update-7644.md b/.changeset/c3-frameworks-update-7644.md deleted file mode 100644 index f572b8bf7de5..000000000000 --- a/.changeset/c3-frameworks-update-7644.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ----------- | ----- | ----- | -| create-vite | 6.1.0 | 6.1.1 | diff --git a/.changeset/c3-frameworks-update-7645.md b/.changeset/c3-frameworks-update-7645.md deleted file mode 100644 index 44ca5e677226..000000000000 --- a/.changeset/c3-frameworks-update-7645.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| --------------- | ------ | ------ | -| create-next-app | 15.1.2 | 15.1.3 | diff --git a/.changeset/c3-frameworks-update-7646.md b/.changeset/c3-frameworks-update-7646.md deleted file mode 100644 index 13cd1a002de1..000000000000 --- a/.changeset/c3-frameworks-update-7646.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ------ | ------ | -| nuxi | 3.17.1 | 3.17.2 | diff --git a/.changeset/c3-frameworks-update-7647.md b/.changeset/c3-frameworks-update-7647.md deleted file mode 100644 index 3d076e79018c..000000000000 --- a/.changeset/c3-frameworks-update-7647.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"create-cloudflare": patch ---- - -chore: update dependencies of "create-cloudflare" package - -The following dependency versions have been updated: - -| Dependency | From | To | -| ---------- | ----- | ------ | -| sv | 0.6.9 | 0.6.10 | diff --git a/.changeset/chilled-mugs-fail.md b/.changeset/chilled-mugs-fail.md deleted file mode 100644 index 570d40f6d857..000000000000 --- a/.changeset/chilled-mugs-fail.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -"wrangler": patch ---- - -chore(wrangler): update unenv dependency version - -unenv now uses the workerd implementation on node:dns -See the [unjs/unenv#376](https://github.com/unjs/unenv/pull/376) diff --git a/.changeset/empty-cars-provide.md b/.changeset/empty-cars-provide.md deleted file mode 100644 index 2b6f0d28a121..000000000000 --- a/.changeset/empty-cars-provide.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@cloudflare/workers-shared": patch ---- - -fix: resolves an issue where a malformed path such as `https://example.com/%A0` would cause an unhandled error diff --git a/.changeset/fuzzy-nails-invent.md b/.changeset/fuzzy-nails-invent.md new file mode 100644 index 000000000000..e21d23648c10 --- /dev/null +++ b/.changeset/fuzzy-nails-invent.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/pages-shared": minor +--- + +feat: Return a 304 Not Modified response when matching an asset preservation cache request if appropriate diff --git a/.changeset/gorgeous-kids-protect.md b/.changeset/gorgeous-kids-protect.md deleted file mode 100644 index 9386ce52c15d..000000000000 --- a/.changeset/gorgeous-kids-protect.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"create-cloudflare": patch ---- - -fix and update experimental Next.js template to work on `@opennextjs/cloudflare@0.3.x` diff --git a/.changeset/green-dodos-push.md b/.changeset/green-dodos-push.md new file mode 100644 index 000000000000..00f14ab3b2a2 --- /dev/null +++ b/.changeset/green-dodos-push.md @@ -0,0 +1,5 @@ +--- +"miniflare": patch +--- + +Fix D1 exports to properly pad HEX strings for binary values. diff --git a/.changeset/green-socks-trade.md b/.changeset/green-socks-trade.md new file mode 100644 index 000000000000..56a625bfceb4 --- /dev/null +++ b/.changeset/green-socks-trade.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/pages-shared": patch +--- + +chore: Remove now-unused asset preservation cache (v1) diff --git a/.changeset/healthy-bags-cry.md b/.changeset/healthy-bags-cry.md new file mode 100644 index 000000000000..7a4dc6d710d0 --- /dev/null +++ b/.changeset/healthy-bags-cry.md @@ -0,0 +1,5 @@ +--- +"create-cloudflare": minor +--- + +chore: remove nodejs_compat flag from basic C3 templates diff --git a/.changeset/hungry-llamas-repair.md b/.changeset/hungry-llamas-repair.md new file mode 100644 index 000000000000..d726a47e5e57 --- /dev/null +++ b/.changeset/hungry-llamas-repair.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +include the top level Worker name in the parsed config structure diff --git a/.changeset/long-houses-mate.md b/.changeset/long-houses-mate.md deleted file mode 100644 index e3a4f88391c4..000000000000 --- a/.changeset/long-houses-mate.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"wrangler": minor ---- - -feat: Capture Workers with static assets in the telemetry data - -We want to measure accurately what this number of Workers + Assets projects running in remote mode is, as this number will be a very helpful data point down the road, when more decisions around remote mode will have to be taken. - -These changes add this kind of insight to our telemetry data, by capturing whether the command running is in the context of a Workers + Assets project. - -N.B. With these changes in place we will be capturing the Workers + Assets context for all commands, not just wrangler dev --remote. diff --git a/.changeset/nervous-scissors-suffer.md b/.changeset/nervous-scissors-suffer.md new file mode 100644 index 000000000000..6da7fc0e0910 --- /dev/null +++ b/.changeset/nervous-scissors-suffer.md @@ -0,0 +1,7 @@ +--- +"@cloudflare/unenv-preset": minor +--- + +chore(unenv-preset): drop unused .cjs files + +Only .mjs files are used. diff --git a/.changeset/nice-pandas-wait.md b/.changeset/nice-pandas-wait.md deleted file mode 100644 index b267aeb133e1..000000000000 --- a/.changeset/nice-pandas-wait.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -"wrangler": patch ---- - -chore(wrangler): update unenv dependency version - -The updated unenv contains a fix for the module resolution, -see . -That bug prevented us from using unenv module resolution, -see . diff --git a/.changeset/polite-goats-behave.md b/.changeset/polite-goats-behave.md new file mode 100644 index 000000000000..330b226c1edf --- /dev/null +++ b/.changeset/polite-goats-behave.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/vitest-pool-workers": minor +--- + +chore: add nodejs_compat by default in Vitest Pool Workers diff --git a/.changeset/proud-rules-try.md b/.changeset/proud-rules-try.md new file mode 100644 index 000000000000..a3bdede4a6bb --- /dev/null +++ b/.changeset/proud-rules-try.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/pages-shared": patch +--- + +fix: Store an empty result when Early Hints parsing returns nothing or errors. Previously, we weren't storing anything which resulted in Early Hints being parsed on every request. diff --git a/.changeset/red-lamps-obey.md b/.changeset/red-lamps-obey.md new file mode 100644 index 000000000000..93c477bcab5c --- /dev/null +++ b/.changeset/red-lamps-obey.md @@ -0,0 +1,5 @@ +--- +"miniflare": minor +--- + +Support the `CF-Connecting-IP` header, which will be available in your Worker to determine the IP address of the client that initiated a request. diff --git a/.changeset/red-pillows-provide.md b/.changeset/red-pillows-provide.md new file mode 100644 index 000000000000..78589c191409 --- /dev/null +++ b/.changeset/red-pillows-provide.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/chrome-devtools-patches": patch +--- + +chore: rebases patches on latest devtools head diff --git a/.changeset/thin-pots-camp.md b/.changeset/thin-pots-camp.md new file mode 100644 index 000000000000..e578c1d1ca85 --- /dev/null +++ b/.changeset/thin-pots-camp.md @@ -0,0 +1,84 @@ +--- +"wrangler": minor +--- + +feat: add support for redirecting Wrangler to a generated config when running deploy-related commands + +This new feature is designed for build tools and frameworks to provide a deploy-specific configuration, +which Wrangler can use instead of user configuration when running deploy-related commands. +It is not expected that developers of Workers will need to use this feature directly. + +### Affected commands + +The commands that use this feature are: + +- `wrangler deploy` +- `wrangler dev` +- `wrangler versions upload` +- `wrangler versions deploy` +- `wrangler pages deploy` +- `wrangler pages build` +- `wrangler pages build-env` + +### Config redirect file + +When running these commands, Wrangler will look up the directory tree from the current working directory for a file at the path `.wrangler/deploy/config.json`. This file must contain only a single JSON object of the form: + +```json +{ "configPath": "../../path/to/wrangler.json" } +``` + +When this file exists Wrangler will follow the `configPath` (relative to the `.wrangler/deploy/config.json` file) to find an alternative Wrangler configuration file to load and use as part of this command. + +When this happens Wrangler will display a warning to the user to indicate that the configuration has been redirected to a different file than the user's configuration file. + +### Custom build tool example + +A common approach that a build tool might choose to implement. + +- The user writes code that uses Cloudflare Workers resources, configured via a user `wrangler.toml` file. + + ```toml + name = "my-worker" + main = "src/index.ts" + [[kv_namespaces]] + binding = "" + id = "" + ``` + + Note that this configuration points `main` at user code entry-point. + +- The user runs a custom build, which might read the `wrangler.toml` to find the entry-point: + + ```bash + > my-tool build + ``` + +- This tool generates a `dist` directory that contains both compiled code and a new deployment configuration file, but also a `.wrangler/deploy/config.json` file that redirects Wrangler to this new deployment configuration file: + + ```plain + - dist + - index.js + - wrangler.json + - .wrangler + - deploy + - config.json + ``` + + The `dist/wrangler.json` will contain: + + ```json + { + "name": "my-worker", + "main": "./index.js", + "kv_namespaces": [{ "binding": "", "id": "" }] + } + ``` + + And the `.wrangler/deploy/config.json` will contain: + + ```json + { + "configPath": "../../dist/wrangler.json" + } + ``` diff --git a/.changeset/three-chefs-bathe.md b/.changeset/three-chefs-bathe.md new file mode 100644 index 000000000000..2c816c7df720 --- /dev/null +++ b/.changeset/three-chefs-bathe.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +Remove defaults for `batch-max-*` pipeline parameters and define value ranges diff --git a/.changeset/twelve-fireants-hunt.md b/.changeset/twelve-fireants-hunt.md new file mode 100644 index 000000000000..164aa3504679 --- /dev/null +++ b/.changeset/twelve-fireants-hunt.md @@ -0,0 +1,11 @@ +--- +"wrangler": patch +--- + +allow overriding the unenv preset. + +By default wrangler uses the bundled unenv preset. + +Setting `WRANGLER_UNENV_RESOLVE_PATHS` allow to use another version of the preset. +Those paths are used when resolving the unenv module identifiers to absolute paths. +This can be used to test a development version. diff --git a/.changeset/young-icons-rescue.md b/.changeset/young-icons-rescue.md new file mode 100644 index 000000000000..a1e3851c1bdf --- /dev/null +++ b/.changeset/young-icons-rescue.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +Default wrangler d1 export to --local rather than failing diff --git a/.github/version-script.js b/.github/version-script.js index ec49ba2523b2..fb1380702229 100644 --- a/.github/version-script.js +++ b/.github/version-script.js @@ -16,7 +16,7 @@ const { execSync } = require("child_process"); try { const packageName = getArgs()[0] ?? "wrangler"; const packageJsonPath = `./packages/${packageName}/package.json`; - const pkg = JSON.parse(readFileSync(packageJsonPath)); + const pkg = JSON.parse(readFileSync(packageJsonPath, "utf-8")); const stdout = execSync("git rev-parse --short HEAD", { encoding: "utf8" }); pkg.version = "0.0.0-" + stdout.trim(); writeFileSync(packageJsonPath, JSON.stringify(pkg, null, "\t") + "\n"); diff --git a/.github/workflows/prereleases.yml b/.github/workflows/prereleases.yml index 2a211a5d68a8..9af0452867f4 100644 --- a/.github/workflows/prereleases.yml +++ b/.github/workflows/prereleases.yml @@ -34,6 +34,7 @@ jobs: node .github/version-script.js miniflare node .github/version-script.js create-cloudflare node .github/version-script.js workers-shared + node .github/version-script.js unenv-preset - name: Build run: pnpm run build @@ -73,6 +74,11 @@ jobs: env: NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} + - name: Publish unenv-preset@beta to NPM + run: pnpm --filter unenv-preset publish --tag beta + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} + - name: Get Package Version run: echo "WRANGLER_VERSION=$(npm view wrangler@beta version)" >> $GITHUB_ENV working-directory: packages/wrangler diff --git a/.prettierignore b/.prettierignore index d55c0a6c1427..aee96a33703f 100644 --- a/.prettierignore +++ b/.prettierignore @@ -40,3 +40,7 @@ templates/*/dist # This file intentionally has a syntax error fixtures/interactive-dev-tests/src/startup-error.ts + +# These are generated by the build step +fixtures/pages-redirected-config/build/* +fixtures/redirected-config-worker/build/* \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6b44f7811ab5..389d1ddbaf54 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,6 +23,8 @@ Wrangler is built and run on the Node.js JavaScript runtime. ### Fork and clone this repository +#### For External Contributors + Any contributions you make will be via [Pull Requests](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests) on [GitHub](https://github.com/) developed in a local git repository and pushed to your own fork of the repository. - Ensure you have [created an account](https://docs.github.com/en/get-started/onboarding/getting-started-with-your-github-account) on GitHub. @@ -57,6 +59,26 @@ Any contributions you make will be via [Pull Requests](https://docs.github.com/e Already up to date. ``` +#### For Cloudflare Employees + +If you are a Cloudflare employee, you do not need to fork the repository - instead, you can clone the main repository directly. This allows you to push branches directly to the upstream repository. + +If you find that you don't have write access, please reach out to your manager or the Wrangler team internally. + +Clone the main repository: + +```sh +git clone https://github.com/cloudflare/workers-sdk.git +cd workers-sdk +``` + +Create new branches directly in the cloned repository and push them to the main repository: + +```sh +git checkout -b +git push origin +``` + ### Install dependencies **Warning** diff --git a/fixtures/additional-modules/package.json b/fixtures/additional-modules/package.json index 0fdc48a07bcf..50c543085cea 100644 --- a/fixtures/additional-modules/package.json +++ b/fixtures/additional-modules/package.json @@ -12,7 +12,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/ai-app/tests/index.test.ts b/fixtures/ai-app/tests/index.test.ts index 7202daf5fabb..3eb4ff54ca80 100644 --- a/fixtures/ai-app/tests/index.test.ts +++ b/fixtures/ai-app/tests/index.test.ts @@ -33,7 +33,9 @@ describe("'wrangler dev' correctly renders pages", () => { expect((content as Record).binding).toEqual({ aiGatewayLogId: null, fetcher: {}, + lastRequestHttpStatusCode: null, lastRequestId: null, + lastRequestInternalStatusCode: null, logs: [], options: {}, }); diff --git a/fixtures/asset-config/package.json b/fixtures/asset-config/package.json index 735bcb5856ed..cbeebc40885e 100644 --- a/fixtures/asset-config/package.json +++ b/fixtures/asset-config/package.json @@ -12,7 +12,7 @@ "devDependencies": { "@cloudflare/vitest-pool-workers": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "run-script-os": "^1.1.6", "typescript": "catalog:default", "undici": "catalog:default", diff --git a/fixtures/get-platform-proxy/package.json b/fixtures/get-platform-proxy/package.json index a547e6e2c514..91544bfac66b 100644 --- a/fixtures/get-platform-proxy/package.json +++ b/fixtures/get-platform-proxy/package.json @@ -9,7 +9,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/import-npm/package-lock.json b/fixtures/import-npm/package-lock.json deleted file mode 100644 index 11234218d1a9..000000000000 --- a/fixtures/import-npm/package-lock.json +++ /dev/null @@ -1,185 +0,0 @@ -{ - "name": "import-npm", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "import-npm", - "workspaces": [ - "packages/*" - ] - }, - "../../packages/workers-tsconfig": { - "name": "@cloudflare/workers-tsconfig", - "version": "0.0.0", - "dev": true - }, - "../../packages/wrangler": { - "version": "3.95.0", - "dev": true, - "license": "MIT OR Apache-2.0", - "dependencies": { - "@cloudflare/kv-asset-handler": "workspace:*", - "@cloudflare/workers-shared": "workspace:*", - "@esbuild-plugins/node-globals-polyfill": "^0.2.3", - "@esbuild-plugins/node-modules-polyfill": "^0.2.2", - "blake3-wasm": "^2.1.5", - "chokidar": "^4.0.1", - "date-fns": "^4.1.0", - "esbuild": "0.17.19", - "itty-time": "^1.0.6", - "miniflare": "workspace:*", - "nanoid": "^3.3.3", - "path-to-regexp": "^6.3.0", - "resolve": "^1.22.8", - "selfsigned": "^2.0.1", - "source-map": "^0.6.1", - "unenv": "npm:unenv-nightly@2.0.0-20241204-140205-a5d5190", - "workerd": "1.20241205.0", - "xxhash-wasm": "^1.0.1" - }, - "bin": { - "wrangler": "bin/wrangler.js", - "wrangler2": "bin/wrangler.js" - }, - "devDependencies": { - "@cloudflare/cli": "workspace:*", - "@cloudflare/eslint-config-worker": "workspace:*", - "@cloudflare/pages-shared": "workspace:^", - "@cloudflare/types": "6.18.4", - "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241205.0", - "@cspotcode/source-map-support": "0.8.1", - "@iarna/toml": "^3.0.0", - "@microsoft/api-extractor": "^7.47.0", - "@sentry/node": "^7.86.0", - "@sentry/types": "^7.86.0", - "@sentry/utils": "^7.86.0", - "@types/body-parser": "^1.19.2", - "@types/command-exists": "^1.2.0", - "@types/express": "^4.17.13", - "@types/glob-to-regexp": "^0.4.1", - "@types/is-ci": "^3.0.0", - "@types/javascript-time-ago": "^2.0.3", - "@types/mime": "^3.0.4", - "@types/minimatch": "^5.1.2", - "@types/prompts": "^2.0.14", - "@types/resolve": "^1.20.6", - "@types/shell-quote": "^1.7.2", - "@types/signal-exit": "^3.0.1", - "@types/supports-color": "^8.1.1", - "@types/ws": "^8.5.7", - "@types/yargs": "^17.0.22", - "@vitest/ui": "catalog:default", - "@webcontainer/env": "^1.1.0", - "body-parser": "^1.20.0", - "chalk": "^5.2.0", - "cli-table3": "^0.6.3", - "cmd-shim": "^4.1.0", - "command-exists": "^1.2.9", - "concurrently": "^8.2.2", - "devtools-protocol": "^0.0.1182435", - "dotenv": "^16.0.0", - "execa": "^6.1.0", - "express": "^4.18.1", - "find-up": "^6.3.0", - "get-port": "^7.0.0", - "glob-to-regexp": "^0.4.1", - "http-terminator": "^3.2.0", - "https-proxy-agent": "7.0.2", - "ignore": "^5.2.0", - "is-ci": "^3.0.1", - "javascript-time-ago": "^2.5.4", - "md5-file": "5.0.0", - "mime": "^3.0.0", - "minimatch": "^5.1.0", - "mock-socket": "^9.3.1", - "msw": "2.4.3", - "open": "^8.4.0", - "p-queue": "^7.2.0", - "patch-console": "^1.0.0", - "pretty-bytes": "^6.0.0", - "prompts": "^2.4.2", - "semiver": "^1.1.0", - "shell-quote": "^1.8.1", - "signal-exit": "^3.0.7", - "strip-ansi": "^7.1.0", - "supports-color": "^9.2.2", - "timeago.js": "^4.0.2", - "ts-dedent": "^2.2.0", - "ts-json-schema-generator": "^1.5.0", - "undici": "catalog:default", - "update-check": "^1.5.4", - "vitest": "catalog:default", - "vitest-websocket-mock": "^0.4.0", - "ws": "^8.18.0", - "xdg-app-paths": "^8.3.0", - "yargs": "^17.7.2" - }, - "engines": { - "node": ">=16.17.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - }, - "peerDependencies": { - "@cloudflare/workers-types": "^4.20241205.0" - }, - "peerDependenciesMeta": { - "@cloudflare/workers-types": { - "optional": true - } - } - }, - "../import-wasm-static": {}, - "node_modules/@cloudflare/workers-tsconfig": { - "resolved": "../../packages/workers-tsconfig", - "link": true - }, - "node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - } - }, - "node_modules/import-example": { - "resolved": "packages/import-example", - "link": true - }, - "node_modules/import-wasm-static": { - "resolved": "../import-wasm-static", - "link": true - }, - "node_modules/undici": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", - "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, - "engines": { - "node": ">=14.0" - } - }, - "node_modules/wrangler": { - "resolved": "../../packages/wrangler", - "link": true - }, - "packages/import-example": { - "dependencies": { - "import-wasm-static": "../../../../fixtures/import-wasm-static" - }, - "devDependencies": { - "@cloudflare/workers-tsconfig": "../../../../packages/workers-tsconfig", - "undici": "^5.28.4", - "wrangler": "../../../../packages/wrangler" - } - } - } -} diff --git a/fixtures/import-npm/package.json b/fixtures/import-npm/package.json index eb8252f6b488..c72de6df0261 100644 --- a/fixtures/import-npm/package.json +++ b/fixtures/import-npm/package.json @@ -7,9 +7,10 @@ "packages/*" ], "scripts": { - "check:type": "rm -rf node_modules && npm install && npm run check:type --workspaces", - "test:ci": "npm install && npm run test:ci --workspaces", - "test:watch": "npm install && npm run test:watch --workspaces", - "type:tests": "rm -rf node_modules && npm install && npm run type:tests --workspaces" + "_clean_install": "rm -rf node_modules && npm install --no-package-lock --workspaces", + "check:type": "npm run check:type --workspaces", + "test:ci": "npm run test:ci --workspaces", + "test:watch": "npm run test:watch --workspaces", + "type:tests": "npm run type:tests --workspaces" } } diff --git a/fixtures/import-npm/turbo.json b/fixtures/import-npm/turbo.json index ac10a3dc52e8..8b7b7af1c14d 100644 --- a/fixtures/import-npm/turbo.json +++ b/fixtures/import-npm/turbo.json @@ -1,11 +1,20 @@ { "extends": ["//"], "tasks": { - "test": { - "dependsOn": ["wrangler#build"] + "_clean_install": { + "outputs": ["node_modules"] + }, + "check:type": { + "dependsOn": ["_clean_install"] + }, + "test:watch": { + "dependsOn": ["_clean_install"] + }, + "type:tests": { + "dependsOn": ["_clean_install"] }, "test:ci": { - "dependsOn": ["wrangler#build"] + "dependsOn": ["_clean_install", "wrangler#build"] } } } diff --git a/fixtures/local-mode-tests/package.json b/fixtures/local-mode-tests/package.json index 0fa361e80c85..6f2e6efeda6b 100644 --- a/fixtures/local-mode-tests/package.json +++ b/fixtures/local-mode-tests/package.json @@ -14,7 +14,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/node": "catalog:default", "buffer": "^6.0.3", "typescript": "catalog:default", diff --git a/fixtures/node-app-pages/package.json b/fixtures/node-app-pages/package.json index d9b1beba44ee..aeccf8b2147a 100644 --- a/fixtures/node-app-pages/package.json +++ b/fixtures/node-app-pages/package.json @@ -15,7 +15,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/nodejs-als-app/package.json b/fixtures/nodejs-als-app/package.json index 3cbd73bd9beb..e516fc81519a 100644 --- a/fixtures/nodejs-als-app/package.json +++ b/fixtures/nodejs-als-app/package.json @@ -9,7 +9,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "undici": "catalog:default", "vitest": "catalog:default", "wrangler": "workspace:*" diff --git a/fixtures/nodejs-hybrid-app/package.json b/fixtures/nodejs-hybrid-app/package.json index 12de46cbb6fa..1927e8159350 100644 --- a/fixtures/nodejs-hybrid-app/package.json +++ b/fixtures/nodejs-hybrid-app/package.json @@ -9,7 +9,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/pg": "^8.11.2", "pg": "8.11.3", "pg-cloudflare": "^1.1.1", diff --git a/fixtures/nodejs-hybrid-app/src/index.ts b/fixtures/nodejs-hybrid-app/src/index.ts index a7acfc1005eb..de35f8f3afe7 100644 --- a/fixtures/nodejs-hybrid-app/src/index.ts +++ b/fixtures/nodejs-hybrid-app/src/index.ts @@ -6,7 +6,6 @@ import { Stream } from "node:stream"; import { Context } from "vm"; import { Client } from "pg"; import { s } from "./dep.cjs"; -import { testUnenvPreset } from "./unenv-preset"; testBasicNodejsProperties(); @@ -29,8 +28,6 @@ export default { return testX509Certificate(); case "/test-require-alias": return testRequireUenvAliasedPackages(); - case "/test-unenv-preset": - return await testUnenvPreset(); } return new Response( @@ -39,7 +36,6 @@ export default { Test getRandomValues() Test X509Certificate Test require unenv aliased packages -Test unenv preset `, { headers: { "Content-Type": "text/html; charset=utf-8" } } ); diff --git a/fixtures/nodejs-hybrid-app/tests/index.test.ts b/fixtures/nodejs-hybrid-app/tests/index.test.ts index d9fbda7f1b67..d5510c5a5bc3 100644 --- a/fixtures/nodejs-hybrid-app/tests/index.test.ts +++ b/fixtures/nodejs-hybrid-app/tests/index.test.ts @@ -1,92 +1,61 @@ import { resolve } from "node:path"; import { fetch } from "undici"; -import { describe, it, test } from "vitest"; +import { afterAll, beforeAll, describe, it, test } from "vitest"; import { runWranglerDev } from "../../shared/src/run-wrangler-long-lived"; describe("nodejs compat", () => { + let wrangler: Awaited>; + + beforeAll(async () => { + wrangler = await runWranglerDev(resolve(__dirname, "../src"), [ + "--port=0", + "--inspector-port=0", + ]); + }); + + afterAll(async () => { + await wrangler.stop(); + }); it("should work when running code requiring polyfills", async ({ expect, }) => { - const { ip, port, stop } = await runWranglerDev( - resolve(__dirname, "../src"), - ["--port=0", "--inspector-port=0"] - ); - try { - const response = await fetch(`http://${ip}:${port}/test-process`); - const body = await response.text(); - expect(body).toMatchInlineSnapshot(`"OK!"`); + const { ip, port } = wrangler; + const response = await fetch(`http://${ip}:${port}/test-process`); + const body = await response.text(); + expect(body).toMatchInlineSnapshot(`"OK!"`); - // Disabling actually querying the database since we are getting this error: - // > too many connections for role 'reader' - // const response = await fetch(`http://${ip}:${port}/query`); - // const body = await response.text(); - // console.log(body); - // const result = JSON.parse(body) as { id: string }; - // expect(result.id).toEqual("1"); - } finally { - await stop(); - } + // Disabling actually querying the database since we are getting this error: + // > too many connections for role 'reader' + // const response = await fetch(`http://${ip}:${port}/query`); + // const body = await response.text(); + // console.log(body); + // const result = JSON.parse(body) as { id: string }; + // expect(result.id).toEqual("1"); }); it("should be able to call `getRandomValues()` bound to any object", async ({ expect, }) => { - const { ip, port, stop } = await runWranglerDev( - resolve(__dirname, "../src"), - ["--port=0", "--inspector-port=0"] - ); - try { - const response = await fetch(`http://${ip}:${port}/test-random`); - const body = await response.json(); - expect(body).toEqual([ - expect.any(String), - expect.any(String), - expect.any(String), - expect.any(String), - ]); - } finally { - await stop(); - } + const { ip, port } = wrangler; + const response = await fetch(`http://${ip}:${port}/test-random`); + const body = await response.json(); + expect(body).toEqual([ + expect.any(String), + expect.any(String), + expect.any(String), + expect.any(String), + ]); }); test("crypto.X509Certificate is implemented", async ({ expect }) => { - const { ip, port, stop } = await runWranglerDev( - resolve(__dirname, "../src"), - ["--port=0", "--inspector-port=0"] - ); - try { - const response = await fetch( - `http://${ip}:${port}/test-x509-certificate` - ); - await expect(response.text()).resolves.toBe(`"OK!"`); - } finally { - await stop(); - } + const { ip, port } = wrangler; + const response = await fetch(`http://${ip}:${port}/test-x509-certificate`); + await expect(response.text()).resolves.toBe(`"OK!"`); }); test("import unenv aliased packages", async ({ expect }) => { - const { ip, port, stop } = await runWranglerDev( - resolve(__dirname, "../src"), - ["--port=0", "--inspector-port=0"] - ); - try { - const response = await fetch(`http://${ip}:${port}/test-require-alias`); - await expect(response.text()).resolves.toBe(`"OK!"`); - } finally { - await stop(); - } - }); - - test("unenv preset", async ({ expect }) => { - const { ip, port, stop } = await runWranglerDev( - resolve(__dirname, "../src"), - ["--port=0", "--inspector-port=0"] - ); - try { - const response = await fetch(`http://${ip}:${port}/test-unenv-preset`); - await expect(response.text()).resolves.toBe("OK!"); - } finally { - await stop(); - } + const { ip, port } = wrangler; + const response = await fetch(`http://${ip}:${port}/test-require-alias`); + await expect(response.text()).resolves.toBe(`"OK!"`); }); }); diff --git a/fixtures/pages-dev-proxy-with-script/package.json b/fixtures/pages-dev-proxy-with-script/package.json index 378ba9982c9c..1c7f00d2a074 100644 --- a/fixtures/pages-dev-proxy-with-script/package.json +++ b/fixtures/pages-dev-proxy-with-script/package.json @@ -10,7 +10,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/pages-functions-app/package.json b/fixtures/pages-functions-app/package.json index fcf83b9bf7ce..cf17f4fe3be8 100644 --- a/fixtures/pages-functions-app/package.json +++ b/fixtures/pages-functions-app/package.json @@ -15,7 +15,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "pages-plugin-example": "workspace:*", "typescript": "catalog:default", "undici": "catalog:default", diff --git a/fixtures/pages-functions-with-routes-app/package.json b/fixtures/pages-functions-with-routes-app/package.json index 0659113ffd32..497d99d3f7a5 100644 --- a/fixtures/pages-functions-with-routes-app/package.json +++ b/fixtures/pages-functions-with-routes-app/package.json @@ -11,7 +11,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/pages-plugin-mounted-on-root-app/package.json b/fixtures/pages-plugin-mounted-on-root-app/package.json index 1cfa3c2c2660..41497ef9b41d 100644 --- a/fixtures/pages-plugin-mounted-on-root-app/package.json +++ b/fixtures/pages-plugin-mounted-on-root-app/package.json @@ -15,7 +15,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "pages-plugin-example": "workspace:*", "typescript": "catalog:default", "undici": "catalog:default", diff --git a/fixtures/pages-redirected-config/.gitignore b/fixtures/pages-redirected-config/.gitignore new file mode 100644 index 000000000000..2cf1da454af0 --- /dev/null +++ b/fixtures/pages-redirected-config/.gitignore @@ -0,0 +1,2 @@ +dist +build \ No newline at end of file diff --git a/fixtures/pages-redirected-config/package.json b/fixtures/pages-redirected-config/package.json new file mode 100644 index 000000000000..6c19f514440f --- /dev/null +++ b/fixtures/pages-redirected-config/package.json @@ -0,0 +1,23 @@ +{ + "name": "pages-redirected-config", + "private": true, + "description": "", + "license": "ISC", + "author": "", + "main": "src/index.js", + "scripts": { + "build": "node -r esbuild-register tools/build.ts", + "check:type": "tsc", + "dev": "pnpm run build && wrangler pages dev", + "test:ci": "pnpm run build && vitest run" + }, + "devDependencies": { + "@cloudflare/workers-tsconfig": "workspace:^", + "undici": "catalog:default", + "vitest": "catalog:default", + "wrangler": "workspace:*" + }, + "volta": { + "extends": "../../package.json" + } +} diff --git a/fixtures/pages-redirected-config/src/index.js b/fixtures/pages-redirected-config/src/index.js new file mode 100644 index 000000000000..e1ce8e64af6b --- /dev/null +++ b/fixtures/pages-redirected-config/src/index.js @@ -0,0 +1,5 @@ +export default { + async fetch(request, env) { + return new Response("Generated: " + env.generated ?? false); + }, +}; diff --git a/fixtures/pages-redirected-config/tests/index.test.ts b/fixtures/pages-redirected-config/tests/index.test.ts new file mode 100644 index 000000000000..e30c7fec1f73 --- /dev/null +++ b/fixtures/pages-redirected-config/tests/index.test.ts @@ -0,0 +1,54 @@ +import { rmSync, writeFileSync } from "fs"; +import { resolve } from "path"; +import { fetch } from "undici"; +import { describe, it } from "vitest"; +import { runWranglerPagesDev } from "../../shared/src/run-wrangler-long-lived"; + +const basePath = resolve(__dirname, ".."); + +describe("wrangler pages dev", () => { + it("uses the generated config if there is no wrangler.toml", async ({ + expect, + onTestFinished, + }) => { + const { ip, port, stop } = await runWranglerPagesDev(basePath, undefined, [ + "--port=0", + "--inspector-port=0", + ]); + onTestFinished(async () => await stop?.()); + + // Note that the local protocol defaults to http + const response = await fetch(`http://${ip}:${port}/`); + const text = await response.text(); + expect(response.status).toBe(200); + expect(text).toMatchInlineSnapshot(`"Generated: true"`); + }); + + it("uses the generated config instead of a user wrangler.toml", async ({ + expect, + onTestFinished, + }) => { + writeFileSync( + "wrangler.toml", + [ + `name = "redirected-config-worker"`, + `compatibility_date = "2024-12-01"`, + `pages_build_output_dir = "public"`, + ].join("\n") + ); + const { ip, port, stop } = await runWranglerPagesDev(basePath, undefined, [ + "--port=0", + "--inspector-port=0", + ]); + onTestFinished(async () => { + rmSync("wrangler.toml", { force: true }); + await stop?.(); + }); + + // Note that the local protocol defaults to http + const response = await fetch(`http://${ip}:${port}/`); + const text = await response.text(); + expect(response.status).toBe(200); + expect(text).toMatchInlineSnapshot(`"Generated: true"`); + }); +}); diff --git a/fixtures/pages-redirected-config/tools/build.ts b/fixtures/pages-redirected-config/tools/build.ts new file mode 100644 index 000000000000..01594c49f463 --- /dev/null +++ b/fixtures/pages-redirected-config/tools/build.ts @@ -0,0 +1,24 @@ +import { copyFileSync, mkdirSync, rmSync, writeFileSync } from "fs"; + +// Create a pseudo build directory +rmSync("build", { recursive: true, force: true }); +mkdirSync("build"); +const config = { + name: "redirected-config-worker", + compatibility_date: "2024-12-01", + pages_build_output_dir: "./public", + vars: { generated: true }, +}; +writeFileSync("build/wrangler.json", JSON.stringify(config, undefined, 2)); + +mkdirSync("build/public"); +copyFileSync("src/index.js", "build/public/_worker.js"); + +// Create the redirect file +rmSync(".wrangler/deploy", { recursive: true, force: true }); +mkdirSync(".wrangler/deploy", { recursive: true }); +const redirect = { configPath: "../../build/wrangler.json" }; +writeFileSync( + ".wrangler/deploy/config.json", + JSON.stringify(redirect, undefined, 2) +); diff --git a/fixtures/pages-redirected-config/tsconfig.json b/fixtures/pages-redirected-config/tsconfig.json new file mode 100644 index 000000000000..b901134e4e79 --- /dev/null +++ b/fixtures/pages-redirected-config/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "esModuleInterop": true, + "module": "CommonJS", + "lib": ["ES2020"], + "types": ["node"], + "skipLibCheck": true, + "moduleResolution": "node", + "noEmit": true + }, + "include": ["tests", "../../node-types.d.ts"] +} diff --git a/fixtures/pages-redirected-config/turbo.json b/fixtures/pages-redirected-config/turbo.json new file mode 100644 index 000000000000..3394ff556c71 --- /dev/null +++ b/fixtures/pages-redirected-config/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://turbo.build/schema.json", + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["build/**"] + } + } +} diff --git a/fixtures/pages-redirected-config/vitest.config.mts b/fixtures/pages-redirected-config/vitest.config.mts new file mode 100644 index 000000000000..846cddc41995 --- /dev/null +++ b/fixtures/pages-redirected-config/vitest.config.mts @@ -0,0 +1,9 @@ +import { defineProject, mergeConfig } from "vitest/config"; +import configShared from "../../vitest.shared"; + +export default mergeConfig( + configShared, + defineProject({ + test: {}, + }) +); diff --git a/fixtures/pages-simple-assets/package.json b/fixtures/pages-simple-assets/package.json index 119bfbf797d2..1f54692ea300 100644 --- a/fixtures/pages-simple-assets/package.json +++ b/fixtures/pages-simple-assets/package.json @@ -12,7 +12,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/pages-workerjs-with-config-file-app/tests/index.test.ts b/fixtures/pages-workerjs-with-config-file-app/tests/index.test.ts index ac712f429daa..35bdc3471716 100644 --- a/fixtures/pages-workerjs-with-config-file-app/tests/index.test.ts +++ b/fixtures/pages-workerjs-with-config-file-app/tests/index.test.ts @@ -66,7 +66,7 @@ describe("Pages Advanced Mode with wrangler.toml", () => { it("has version_metadata binding", async ({ expect }) => { const response = await fetch(`http://${ip}:${port}/version_metadata`); - expect(response.json()).resolves.toMatchObject({ + await expect(response.json()).resolves.toMatchObject({ id: expect.any(String), tag: expect.any(String), }); diff --git a/fixtures/redirected-config-worker/.gitignore b/fixtures/redirected-config-worker/.gitignore new file mode 100644 index 000000000000..2cf1da454af0 --- /dev/null +++ b/fixtures/redirected-config-worker/.gitignore @@ -0,0 +1,2 @@ +dist +build \ No newline at end of file diff --git a/fixtures/redirected-config-worker/package.json b/fixtures/redirected-config-worker/package.json new file mode 100644 index 000000000000..44a6770bed68 --- /dev/null +++ b/fixtures/redirected-config-worker/package.json @@ -0,0 +1,23 @@ +{ + "name": "redirected-config-worker", + "private": true, + "description": "", + "license": "ISC", + "author": "", + "main": "src/index.js", + "scripts": { + "build": "node -r esbuild-register tools/build.ts", + "check:type": "tsc", + "dev": "pnpm run build && wrangler dev", + "test:ci": "pnpm run build && vitest run" + }, + "devDependencies": { + "@cloudflare/workers-tsconfig": "workspace:^", + "undici": "catalog:default", + "vitest": "catalog:default", + "wrangler": "workspace:*" + }, + "volta": { + "extends": "../../package.json" + } +} diff --git a/fixtures/redirected-config-worker/src/index.js b/fixtures/redirected-config-worker/src/index.js new file mode 100644 index 000000000000..e1ce8e64af6b --- /dev/null +++ b/fixtures/redirected-config-worker/src/index.js @@ -0,0 +1,5 @@ +export default { + async fetch(request, env) { + return new Response("Generated: " + env.generated ?? false); + }, +}; diff --git a/fixtures/redirected-config-worker/tests/index.test.ts b/fixtures/redirected-config-worker/tests/index.test.ts new file mode 100644 index 000000000000..81e8f5a8c68b --- /dev/null +++ b/fixtures/redirected-config-worker/tests/index.test.ts @@ -0,0 +1,40 @@ +import { resolve } from "path"; +import { fetch } from "undici"; +import { describe, it } from "vitest"; +import { runWranglerDev } from "../../shared/src/run-wrangler-long-lived"; + +const basePath = resolve(__dirname, ".."); + +describe("'wrangler dev' correctly renders pages", () => { + it("uses the generated config", async ({ expect, onTestFinished }) => { + const { ip, port, stop } = await runWranglerDev(basePath, [ + "--port=0", + "--inspector-port=0", + ]); + onTestFinished(async () => await stop?.()); + + // Note that the local protocol defaults to http + const response = await fetch(`http://${ip}:${port}/`); + const text = await response.text(); + expect(response.status).toBe(200); + expect(text).toMatchInlineSnapshot(`"Generated: true"`); + }); + + it("uses a custom config from command line rather than generated config", async ({ + expect, + onTestFinished, + }) => { + const { ip, port, stop } = await runWranglerDev(basePath, [ + "-c=wrangler.toml", + "--port=0", + "--inspector-port=0", + ]); + onTestFinished(async () => await stop?.()); + + // Note that the local protocol defaults to http + const response = await fetch(`http://${ip}:${port}/`); + const text = await response.text(); + expect(response.status).toBe(200); + expect(text).toMatchInlineSnapshot(`"Generated: undefined"`); + }); +}); diff --git a/fixtures/redirected-config-worker/tools/build.ts b/fixtures/redirected-config-worker/tools/build.ts new file mode 100644 index 000000000000..a5b0f354931e --- /dev/null +++ b/fixtures/redirected-config-worker/tools/build.ts @@ -0,0 +1,22 @@ +import { copyFileSync, mkdirSync, rmSync, writeFileSync } from "fs"; + +// Create a pseudo build directory +rmSync("build", { recursive: true, force: true }); +mkdirSync("build"); +const config = { + name: "redirected-config-worker", + compatibility_date: "2024-12-01", + main: "index.js", + vars: { generated: true }, +}; +writeFileSync("build/wrangler.json", JSON.stringify(config, undefined, 2)); +copyFileSync("src/index.js", "build/index.js"); + +// Create the redirect file +rmSync(".wrangler/deploy", { recursive: true, force: true }); +mkdirSync(".wrangler/deploy", { recursive: true }); +const redirect = { configPath: "../../build/wrangler.json" }; +writeFileSync( + ".wrangler/deploy/config.json", + JSON.stringify(redirect, undefined, 2) +); diff --git a/fixtures/redirected-config-worker/tsconfig.json b/fixtures/redirected-config-worker/tsconfig.json new file mode 100644 index 000000000000..b901134e4e79 --- /dev/null +++ b/fixtures/redirected-config-worker/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "esModuleInterop": true, + "module": "CommonJS", + "lib": ["ES2020"], + "types": ["node"], + "skipLibCheck": true, + "moduleResolution": "node", + "noEmit": true + }, + "include": ["tests", "../../node-types.d.ts"] +} diff --git a/fixtures/redirected-config-worker/turbo.json b/fixtures/redirected-config-worker/turbo.json new file mode 100644 index 000000000000..3394ff556c71 --- /dev/null +++ b/fixtures/redirected-config-worker/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://turbo.build/schema.json", + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["build/**"] + } + } +} diff --git a/fixtures/redirected-config-worker/vitest.config.mts b/fixtures/redirected-config-worker/vitest.config.mts new file mode 100644 index 000000000000..846cddc41995 --- /dev/null +++ b/fixtures/redirected-config-worker/vitest.config.mts @@ -0,0 +1,9 @@ +import { defineProject, mergeConfig } from "vitest/config"; +import configShared from "../../vitest.shared"; + +export default mergeConfig( + configShared, + defineProject({ + test: {}, + }) +); diff --git a/fixtures/redirected-config-worker/wrangler.toml b/fixtures/redirected-config-worker/wrangler.toml new file mode 100644 index 000000000000..449bd0b5b2de --- /dev/null +++ b/fixtures/redirected-config-worker/wrangler.toml @@ -0,0 +1,4 @@ +name = "redirected-config-worker" +compatibility_date = "2024-12-01" + +main = "src/index.js" diff --git a/fixtures/shared/src/run-wrangler-long-lived.ts b/fixtures/shared/src/run-wrangler-long-lived.ts index b16460b41a7e..0eb2439e55e9 100644 --- a/fixtures/shared/src/run-wrangler-long-lived.ts +++ b/fixtures/shared/src/run-wrangler-long-lived.ts @@ -92,11 +92,15 @@ async function runLongLivedWrangler( const chunks: Buffer[] = []; wranglerProcess.stdout?.on("data", (chunk) => { - console.log(`[${command}]`, chunk.toString()); + if (process.env.WRANGLER_LOG === "debug") { + console.log(`[${command}]`, chunk.toString()); + } chunks.push(chunk); }); wranglerProcess.stderr?.on("data", (chunk) => { - console.log(`[${command}]`, chunk.toString()); + if (process.env.WRANGLER_LOG === "debug") { + console.log(`[${command}]`, chunk.toString()); + } chunks.push(chunk); }); const getOutput = () => Buffer.concat(chunks).toString(); diff --git a/fixtures/vitest-pool-workers-examples/basics-integration-auxiliary/wrangler.toml b/fixtures/vitest-pool-workers-examples/basics-integration-auxiliary/wrangler.toml index 30e711fbabe8..cbc5a55606e6 100644 --- a/fixtures/vitest-pool-workers-examples/basics-integration-auxiliary/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/basics-integration-auxiliary/wrangler.toml @@ -1,4 +1,3 @@ name = "basics-integration-auxiliary" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/basics-unit-integration-self/wrangler.toml b/fixtures/vitest-pool-workers-examples/basics-unit-integration-self/wrangler.toml index 0a18c071dba2..e693f6579564 100644 --- a/fixtures/vitest-pool-workers-examples/basics-unit-integration-self/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/basics-unit-integration-self/wrangler.toml @@ -1,4 +1,3 @@ name = "basics-unit-integration-self" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/d1/wrangler.toml b/fixtures/vitest-pool-workers-examples/d1/wrangler.toml index 9f0d148c3f36..b294b5d48d8f 100644 --- a/fixtures/vitest-pool-workers-examples/d1/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/d1/wrangler.toml @@ -1,7 +1,6 @@ name = "d1" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] [[env.production.d1_databases]] binding = "DATABASE" diff --git a/fixtures/vitest-pool-workers-examples/durable-objects/wrangler.toml b/fixtures/vitest-pool-workers-examples/durable-objects/wrangler.toml index 2fe699772c8b..8f7e3134cb6c 100644 --- a/fixtures/vitest-pool-workers-examples/durable-objects/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/durable-objects/wrangler.toml @@ -1,7 +1,6 @@ name = "durable-objects" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] [durable_objects] bindings = [ diff --git a/fixtures/vitest-pool-workers-examples/external-package-resolution/wrangler.toml b/fixtures/vitest-pool-workers-examples/external-package-resolution/wrangler.toml index fe144b823a0e..8b21069c7756 100644 --- a/fixtures/vitest-pool-workers-examples/external-package-resolution/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/external-package-resolution/wrangler.toml @@ -1,4 +1,3 @@ name = "external-package-resolution" main = "src/index.ts" compatibility_date = "2024-04-05" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/hyperdrive/wrangler.toml b/fixtures/vitest-pool-workers-examples/hyperdrive/wrangler.toml index 517015ad6bf9..9b6fa3142475 100644 --- a/fixtures/vitest-pool-workers-examples/hyperdrive/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/hyperdrive/wrangler.toml @@ -1,7 +1,6 @@ name = "hyperdrive" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] [[hyperdrive]] binding = "ECHO_SERVER_HYPERDRIVE" diff --git a/fixtures/vitest-pool-workers-examples/internal-module-resolution/wrangler.toml b/fixtures/vitest-pool-workers-examples/internal-module-resolution/wrangler.toml index 90ee33b878f1..e9ac5d285f72 100644 --- a/fixtures/vitest-pool-workers-examples/internal-module-resolution/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/internal-module-resolution/wrangler.toml @@ -1,3 +1,2 @@ name = "internal-module-resolution" compatibility_date = "2024-04-05" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/kv-r2-caches/wrangler.toml b/fixtures/vitest-pool-workers-examples/kv-r2-caches/wrangler.toml index d8d142aedfb5..c145ffba320c 100644 --- a/fixtures/vitest-pool-workers-examples/kv-r2-caches/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/kv-r2-caches/wrangler.toml @@ -1,7 +1,6 @@ name = "kv-r2-caches" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] kv_namespaces = [ { binding = "KV_NAMESPACE", id = "00000000000000000000000000000000" } diff --git a/fixtures/vitest-pool-workers-examples/misc/test/fetch-mock.test.ts b/fixtures/vitest-pool-workers-examples/misc/test/fetch-mock.test.ts index 9c181bcb2f50..4db01193ecca 100644 --- a/fixtures/vitest-pool-workers-examples/misc/test/fetch-mock.test.ts +++ b/fixtures/vitest-pool-workers-examples/misc/test/fetch-mock.test.ts @@ -32,6 +32,35 @@ it("falls through to global fetch() if unmatched", async () => { expect(await response.text()).toBe("fallthrough:GET https://example.com/bad"); }); +it("intercepts URLs with query parameters with repeated keys", async () => { + fetchMock + .get("https://example.com") + .intercept({ path: "/foo", query: { key: "value" } }) + .reply(200, "foo"); + + fetchMock + .get("https://example.com") + .intercept({ path: "/bar?a=1&a=2" }) + .reply(200, "bar"); + + fetchMock + .get("https://example.com") + .intercept({ path: "/baz", query: { key1: ["a", "b"], key2: "c" } }) + .reply(200, "baz"); + + let response1 = await fetch("https://example.com/foo?key=value"); + expect(response1.url).toEqual("https://example.com/foo?key=value"); + expect(await response1.text()).toBe("foo"); + + let response2 = await fetch("https://example.com/bar?a=1&a=2"); + expect(response2.url).toEqual("https://example.com/bar?a=1&a=2"); + expect(await response2.text()).toBe("bar"); + + let response3 = await fetch("https://example.com/baz?key1=a&key2=c&key1=b"); + expect(response3.url).toEqual("https://example.com/baz?key1=a&key2=c&key1=b"); + expect(await response3.text()).toBe("baz"); +}); + describe("AbortSignal", () => { let abortSignalTimeoutMock: MockInstance; diff --git a/fixtures/vitest-pool-workers-examples/misc/wrangler.toml b/fixtures/vitest-pool-workers-examples/misc/wrangler.toml index 79dcd876a212..003ac0655ec7 100644 --- a/fixtures/vitest-pool-workers-examples/misc/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/misc/wrangler.toml @@ -1,5 +1,4 @@ compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] [define] WRANGLER_DEFINED_THING = "\"thing\"" diff --git a/fixtures/vitest-pool-workers-examples/multiple-workers/api-service/wrangler.toml b/fixtures/vitest-pool-workers-examples/multiple-workers/api-service/wrangler.toml index 0ae381a7b00d..8e2850cddcc0 100644 --- a/fixtures/vitest-pool-workers-examples/multiple-workers/api-service/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/multiple-workers/api-service/wrangler.toml @@ -1,7 +1,6 @@ name = "api-service" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] [[services]] binding = "AUTH_SERVICE" diff --git a/fixtures/vitest-pool-workers-examples/multiple-workers/auth-service/wrangler.toml b/fixtures/vitest-pool-workers-examples/multiple-workers/auth-service/wrangler.toml index e50c632d3f91..eec289665530 100644 --- a/fixtures/vitest-pool-workers-examples/multiple-workers/auth-service/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/multiple-workers/auth-service/wrangler.toml @@ -1,7 +1,6 @@ name = "auth-service" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] # `AUTH_PUBLIC_KEY` would be a secret created with `wrangler secret put`, and # stored in `.dev.vars` during development diff --git a/fixtures/vitest-pool-workers-examples/multiple-workers/database-service/wrangler.toml b/fixtures/vitest-pool-workers-examples/multiple-workers/database-service/wrangler.toml index fd027b1ae612..70e5c81ac8a9 100644 --- a/fixtures/vitest-pool-workers-examples/multiple-workers/database-service/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/multiple-workers/database-service/wrangler.toml @@ -1,7 +1,6 @@ name = "database-service" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] kv_namespaces = [ { binding = "KV_NAMESPACE", id = "00000000000000000000000000000000" } diff --git a/fixtures/vitest-pool-workers-examples/package.json b/fixtures/vitest-pool-workers-examples/package.json index e9bdeeeddb91..76250bbcdccf 100644 --- a/fixtures/vitest-pool-workers-examples/package.json +++ b/fixtures/vitest-pool-workers-examples/package.json @@ -12,7 +12,7 @@ }, "devDependencies": { "@cloudflare/vitest-pool-workers": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/node": "catalog:default", "ext-dep": "file:./internal-module-resolution/vendor/ext-dep", "jose": "^5.2.2", diff --git a/fixtures/vitest-pool-workers-examples/pages-with-config/wrangler.toml b/fixtures/vitest-pool-workers-examples/pages-with-config/wrangler.toml index 03a5499d8753..d051ad6bb920 100644 --- a/fixtures/vitest-pool-workers-examples/pages-with-config/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/pages-with-config/wrangler.toml @@ -1,6 +1,5 @@ #:schema node_modules/wrangler/config-schema.json name = "pages-with-config" compatibility_date = "2024-09-19" -compatibility_flags = ["nodejs_compat"] pages_build_output_dir = "public" diff --git a/fixtures/vitest-pool-workers-examples/queues/wrangler.toml b/fixtures/vitest-pool-workers-examples/queues/wrangler.toml index 222b39986c25..5ba495754104 100644 --- a/fixtures/vitest-pool-workers-examples/queues/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/queues/wrangler.toml @@ -1,7 +1,6 @@ name = "queues" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] kv_namespaces = [ { binding = "QUEUE_RESULTS", id = "00000000000000000000000000000000" } diff --git a/fixtures/vitest-pool-workers-examples/request-mocking/wrangler.toml b/fixtures/vitest-pool-workers-examples/request-mocking/wrangler.toml index c772b0ba7a1a..5b59826b38fb 100644 --- a/fixtures/vitest-pool-workers-examples/request-mocking/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/request-mocking/wrangler.toml @@ -1,4 +1,3 @@ name = "request-mocking" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/rpc/wrangler.toml b/fixtures/vitest-pool-workers-examples/rpc/wrangler.toml index a221e63747a9..05571efda950 100644 --- a/fixtures/vitest-pool-workers-examples/rpc/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/rpc/wrangler.toml @@ -1,7 +1,7 @@ name = "rpc" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat", "rpc"] +compatibility_flags = ["rpc"] kv_namespaces = [ { binding = "KV_NAMESPACE", id = "00000000000000000000000000000000" } diff --git a/fixtures/vitest-pool-workers-examples/web-assembly/wrangler.toml b/fixtures/vitest-pool-workers-examples/web-assembly/wrangler.toml index 3febb0123c29..fe0eae38a5fc 100644 --- a/fixtures/vitest-pool-workers-examples/web-assembly/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/web-assembly/wrangler.toml @@ -1,4 +1,3 @@ name = "web-assembly" main = "src/index.ts" compatibility_date = "2024-01-01" -compatibility_flags = ["nodejs_compat"] diff --git a/fixtures/vitest-pool-workers-examples/workers-assets-only/wrangler.toml b/fixtures/vitest-pool-workers-examples/workers-assets-only/wrangler.toml index d58bc3afe630..bac61bddf65a 100644 --- a/fixtures/vitest-pool-workers-examples/workers-assets-only/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/workers-assets-only/wrangler.toml @@ -1,7 +1,6 @@ #:schema node_modules/wrangler/config-schema.json name = "workers-static-assets-only" compatibility_date = "2024-09-19" -compatibility_flags = ["nodejs_compat"] [assets] directory = "./public" diff --git a/fixtures/vitest-pool-workers-examples/workers-assets/wrangler.toml b/fixtures/vitest-pool-workers-examples/workers-assets/wrangler.toml index f05b48b3e975..5f9ff0563709 100644 --- a/fixtures/vitest-pool-workers-examples/workers-assets/wrangler.toml +++ b/fixtures/vitest-pool-workers-examples/workers-assets/wrangler.toml @@ -2,7 +2,6 @@ name = "workers-static-assets-with-user-worker" main = "src/index.ts" compatibility_date = "2024-09-19" -compatibility_flags = ["nodejs_compat"] [assets] directory = "./public" diff --git a/fixtures/worker-ts/package.json b/fixtures/worker-ts/package.json index 591c0bdbaa56..1f266750d58f 100644 --- a/fixtures/worker-ts/package.json +++ b/fixtures/worker-ts/package.json @@ -6,7 +6,7 @@ "start": "wrangler dev" }, "devDependencies": { - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "wrangler": "workspace:*" }, "volta": { diff --git a/fixtures/workers-with-assets-only/package.json b/fixtures/workers-with-assets-only/package.json index 934984b91196..aec408b7200c 100644 --- a/fixtures/workers-with-assets-only/package.json +++ b/fixtures/workers-with-assets-only/package.json @@ -9,7 +9,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/workers-with-assets-serve-directly/package.json b/fixtures/workers-with-assets-serve-directly/package.json index 52e597e8e5a0..690871d78acd 100644 --- a/fixtures/workers-with-assets-serve-directly/package.json +++ b/fixtures/workers-with-assets-serve-directly/package.json @@ -10,7 +10,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/workers-with-assets/package.json b/fixtures/workers-with-assets/package.json index bdb550788e88..6c2e8b619c32 100644 --- a/fixtures/workers-with-assets/package.json +++ b/fixtures/workers-with-assets/package.json @@ -10,7 +10,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/workflow-multiple/package.json b/fixtures/workflow-multiple/package.json index b9355a2413d9..81826bf5506b 100644 --- a/fixtures/workflow-multiple/package.json +++ b/fixtures/workflow-multiple/package.json @@ -7,7 +7,7 @@ "test:ci": "vitest run" }, "devDependencies": { - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/fixtures/workflow/package.json b/fixtures/workflow/package.json index 0f8d252922e2..26d59b42f53d 100644 --- a/fixtures/workflow/package.json +++ b/fixtures/workflow/package.json @@ -7,7 +7,7 @@ "test:ci": "vitest run" }, "devDependencies": { - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "typescript": "catalog:default", "undici": "catalog:default", "vitest": "catalog:default", diff --git a/package.json b/package.json index 2b01bf28ba73..f55ce9f5970c 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "@changesets/changelog-github": "^0.5.0", "@changesets/cli": "^2.27.1", "@changesets/parse": "^0.4.0", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@ianvs/prettier-plugin-sort-imports": "4.2.1", "@manypkg/cli": "^0.21.4", "@turbo/gen": "^1.10.13", diff --git a/packages/chrome-devtools-patches/Makefile b/packages/chrome-devtools-patches/Makefile index cb74249b9099..95be899561a8 100644 --- a/packages/chrome-devtools-patches/Makefile +++ b/packages/chrome-devtools-patches/Makefile @@ -1,7 +1,7 @@ ROOT = $(realpath .) PATH_WITH_DEPOT = $(PATH):$(ROOT)/depot/ # The upstream devtools commit upon which our patches are based -HEAD = 279239c4c670edbde12345aca4fadb7f07d503e8 +HEAD = 538f92a49ba5cbc615bcaa063214fca38ab87813 PATCHES = $(shell ls ${PWD}/patches/*.patch) depot: git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot @@ -28,3 +28,8 @@ cleanup: test: git -C devtools-frontend am $(PATCHES) + +dev: + cd devtools-frontend && PATH="$(PATH_WITH_DEPOT)" npm run install-deps + cd devtools-frontend && PATH="$(PATH_WITH_DEPOT)" gn gen out/Default --args="devtools_skip_typecheck=true" + cd devtools-frontend && PATH="$(PATH_WITH_DEPOT)" npm run watch diff --git a/packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch b/packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch index a7f38d212e53..3471b5294e30 100644 --- a/packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch +++ b/packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch @@ -1,4 +1,4 @@ -From f33a1c016aca11dd75e912ba3373be01fc7480da Mon Sep 17 00:00:00 2001 +From 308d29672a61ba0cd0e4751460ae15c0f0930666 Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 14:15:43 +0100 Subject: [PATCH 1/8] Expand Browser support (make it work in Firefox & Safari) @@ -23,7 +23,7 @@ If updating the commit of devtools upon which these patches are based, make sure 4 files changed, 46 insertions(+), 2 deletions(-) diff --git a/front_end/core/dom_extension/DOMExtension.ts b/front_end/core/dom_extension/DOMExtension.ts -index 940ce81924..e9542743ff 100644 +index 5060011dba..1ca615e641 100644 --- a/front_end/core/dom_extension/DOMExtension.ts +++ b/front_end/core/dom_extension/DOMExtension.ts @@ -132,7 +132,7 @@ Node.prototype.getComponentSelection = function(): Selection|null { @@ -100,7 +100,7 @@ index 45028f436a..6a154030b6 100644 static instance(opts: {forceNew: boolean|null} = {forceNew: null}): JsMainImpl { const {forceNew} = opts; diff --git a/front_end/ui/legacy/components/data_grid/DataGrid.ts b/front_end/ui/legacy/components/data_grid/DataGrid.ts -index 93a117e746..d9e37e3d7a 100644 +index 24f6b09d0c..8b9ca969ae 100644 --- a/front_end/ui/legacy/components/data_grid/DataGrid.ts +++ b/front_end/ui/legacy/components/data_grid/DataGrid.ts @@ -217,6 +217,8 @@ export class DataGridImpl extends Common.ObjectWrapper.ObjectWrapper *under* the + (this.dataTableBody as HTMLElement).style.height = 'calc(100% - 27px)'; - this.topFillerRow = (this.dataTableBody.createChild('tr', 'data-grid-filler-row revealed') as HTMLElement); + this.topFillerRow = this.dataTableBody.createChild('tr', 'data-grid-filler-row revealed'); UI.ARIAUtils.setHidden(this.topFillerRow, true); - this.bottomFillerRow = (this.dataTableBody.createChild('tr', 'data-grid-filler-row revealed') as HTMLElement); + this.bottomFillerRow = this.dataTableBody.createChild('tr', 'data-grid-filler-row revealed'); @@ -505,6 +507,9 @@ export class DataGridImpl extends Common.ObjectWrapper.ObjectWrapper Date: Fri, 25 Oct 2024 16:06:06 +0100 Subject: [PATCH 2/8] Setup Cloudflare devtools target type diff --git a/packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch b/packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch index f480c65b59f1..2f7e77c4a6c7 100644 --- a/packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch +++ b/packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch @@ -1,4 +1,4 @@ -From 233ae8633d7a84f662ebdea564070fcbee2f42d4 Mon Sep 17 00:00:00 2001 +From 202a5226a8f850a955b07c7abc301f3031d9a72d Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 15:04:17 +0100 Subject: [PATCH 3/8] Add ping to improve connection stability. Without this, diff --git a/packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch b/packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch index 76468150b98c..8daf3007eca7 100644 --- a/packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch +++ b/packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch @@ -1,4 +1,4 @@ -From f55dc8200dda3025eb7866f2706a08851aa0d3dd Mon Sep 17 00:00:00 2001 +From 79eef448a1e63c2295d67938f92a83093289ffe6 Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 15:26:38 +0100 Subject: [PATCH 4/8] Support viewing source files over the network. This @@ -23,7 +23,7 @@ Subject: [PATCH 4/8] Support viewing source files over the network. This 6 files changed, 15 insertions(+), 40 deletions(-) diff --git a/front_end/core/common/ParsedURL.ts b/front_end/core/common/ParsedURL.ts -index 832d76c19c..968439ade4 100644 +index 100d9ec45f..d75d6e4f2f 100644 --- a/front_end/core/common/ParsedURL.ts +++ b/front_end/core/common/ParsedURL.ts @@ -366,7 +366,7 @@ export class ParsedURL { @@ -88,20 +88,20 @@ index 1a6644df92..1724a33bd4 100644 persistence: UI.ViewManager.ViewPersistence.PERMANENT, async loadView() { diff --git a/front_end/entrypoints/main/MainImpl.ts b/front_end/entrypoints/main/MainImpl.ts -index 978092dea5..fc463ec0c9 100644 +index fbc6a1bd71..e6bc57bbb0 100644 --- a/front_end/entrypoints/main/MainImpl.ts +++ b/front_end/entrypoints/main/MainImpl.ts -@@ -421,6 +421,8 @@ export class MainImpl { - Root.Runtime.ExperimentName.TIMELINE_ANNOTATIONS, +@@ -414,6 +414,8 @@ export class MainImpl { Root.Runtime.ExperimentName.NETWORK_PANEL_FILTER_BAR_REDESIGN, Root.Runtime.ExperimentName.FLOATING_ENTRY_POINTS_FOR_AI_ASSISTANCE, + Root.Runtime.ExperimentName.TIMELINE_ALTERNATIVE_NAVIGATION, + Root.Runtime.ExperimentName.AUTHORED_DEPLOYED_GROUPING, + Root.Runtime.ExperimentName.JUST_MY_CODE, ...(Root.Runtime.Runtime.queryParam('isChromeForTesting') ? ['protocol-monitor'] : []), ]); diff --git a/front_end/panels/sources/NavigatorView.ts b/front_end/panels/sources/NavigatorView.ts -index 6f4c52f3e7..2f5ada52e6 100644 +index d9da41ab84..73f42a22be 100644 --- a/front_end/panels/sources/NavigatorView.ts +++ b/front_end/panels/sources/NavigatorView.ts @@ -795,8 +795,9 @@ export class NavigatorView extends UI.Widget.VBox implements SDK.TargetManager.O @@ -117,7 +117,7 @@ index 6f4c52f3e7..2f5ada52e6 100644 const parsedURL = new Common.ParsedURL.ParsedURL(projectOrigin); diff --git a/front_end/panels/sources/sources-meta.ts b/front_end/panels/sources/sources-meta.ts -index 42a2e17f07..c41a8d2c7a 100644 +index f4284da33b..76bb296abb 100644 --- a/front_end/panels/sources/sources-meta.ts +++ b/front_end/panels/sources/sources-meta.ts @@ -495,32 +495,6 @@ UI.ViewManager.registerViewExtension({ diff --git a/packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch b/packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch index 0c956636dc76..e0ca73f5256f 100644 --- a/packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch +++ b/packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch @@ -1,4 +1,4 @@ -From 0f7a1adad6b0bfbafe60fbc4c161fa9a7ef0381f Mon Sep 17 00:00:00 2001 +From 94301beb8effbc0b3a2d11934369d4c6071fe73c Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 15:05:56 +0100 Subject: [PATCH 5/8] Support forcing the devtools theme via a query parameter, diff --git a/packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch b/packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch index a9dee1b74b0c..bb4a15ce9e61 100644 --- a/packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch +++ b/packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch @@ -1,4 +1,4 @@ -From 3b484138dabb583349363df0e2d69ffe3c1dc039 Mon Sep 17 00:00:00 2001 +From c0a3154c9483ac4390d8459cb5869b58afa8d503 Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 16:05:12 +0100 Subject: [PATCH 6/8] All about the network tab! @@ -16,7 +16,7 @@ Subject: [PATCH 6/8] All about the network tab! 4 files changed, 9 insertions(+), 28 deletions(-) diff --git a/front_end/core/sdk/NetworkManager.ts b/front_end/core/sdk/NetworkManager.ts -index 8735116e61..953d3fc7b1 100644 +index 4093f80cf8..1eb3199a53 100644 --- a/front_end/core/sdk/NetworkManager.ts +++ b/front_end/core/sdk/NetworkManager.ts @@ -34,6 +34,7 @@ @@ -73,7 +73,7 @@ index 1724a33bd4..884c6264d2 100644 import * as Common from '../../core/common/common.js'; diff --git a/front_end/panels/network/NetworkPanel.ts b/front_end/panels/network/NetworkPanel.ts -index 71c40a6873..abf30cc4e8 100644 +index 7d44f68bf3..f4b7ec0973 100644 --- a/front_end/panels/network/NetworkPanel.ts +++ b/front_end/panels/network/NetworkPanel.ts @@ -75,14 +75,6 @@ const UIStrings = { @@ -91,7 +91,7 @@ index 71c40a6873..abf30cc4e8 100644 /** *@description Tooltip text that appears when hovering over the largeicon settings gear in show settings pane setting in network panel of the network panel */ -@@ -158,10 +150,6 @@ const UIStrings = { +@@ -180,10 +172,6 @@ const UIStrings = { *@description Text in Network Panel that is displayed when frames are being fetched. */ fetchingFrames: 'Fetching frames...', @@ -102,7 +102,7 @@ index 71c40a6873..abf30cc4e8 100644 }; const str_ = i18n.i18n.registerUIStrings('panels/network/NetworkPanel.ts', UIStrings); const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_); -@@ -422,21 +410,6 @@ export class NetworkPanel extends UI.Panel.Panel implements +@@ -444,21 +432,6 @@ export class NetworkPanel extends UI.Panel.Panel implements this.panelToolbar.appendToolbarItem(new UI.Toolbar.ToolbarSettingCheckbox( this.preserveLogSetting, i18nString(UIStrings.doNotClearLogOnPageReload), i18nString(UIStrings.preserveLog))); diff --git a/packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch b/packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch index 8fa7a21e1e28..8d75040692a5 100644 --- a/packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch +++ b/packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch @@ -1,4 +1,4 @@ -From 01012531546fccc62536fba7d1bfde6c5cc1a6c2 Mon Sep 17 00:00:00 2001 +From 6bca6c563e616596aa75a88f1d52b8f9150ba93c Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 16:07:24 +0100 Subject: [PATCH 7/8] Limit heap profiling modes available diff --git a/packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch b/packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch index f5b5fb15a81d..654eb21be1fd 100644 --- a/packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch +++ b/packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch @@ -1,4 +1,4 @@ -From 57b100b2455fc3adb794218c4d15c44052a16f52 Mon Sep 17 00:00:00 2001 +From 25c22c6909222b58a576a04c29c2baf517ecf69c Mon Sep 17 00:00:00 2001 From: Workers DevProd Date: Fri, 25 Oct 2024 16:11:10 +0100 Subject: [PATCH 8/8] Use the worker name as the title for the Javascript diff --git a/packages/create-cloudflare/CHANGELOG.md b/packages/create-cloudflare/CHANGELOG.md index 68775fcecfe9..49cded8219be 100644 --- a/packages/create-cloudflare/CHANGELOG.md +++ b/packages/create-cloudflare/CHANGELOG.md @@ -1,5 +1,127 @@ # create-cloudflare +## 2.35.2 + +### Patch Changes + +- [#7617](https://github.com/cloudflare/workers-sdk/pull/7617) [`75393d6`](https://github.com/cloudflare/workers-sdk/commit/75393d67d04fae1d674b7b2fc0b751384b02103b) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ----------- | ----- | ----- | + | create-vite | 6.0.1 | 6.1.0 | + +- [#7618](https://github.com/cloudflare/workers-sdk/pull/7618) [`e5b9a9a`](https://github.com/cloudflare/workers-sdk/commit/e5b9a9a9d7363acf8ac7fa45f88294d879a3e996) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ----- | ----- | + | sv | 0.6.7 | 0.6.9 | + +- [#7619](https://github.com/cloudflare/workers-sdk/pull/7619) [`9989022`](https://github.com/cloudflare/workers-sdk/commit/998902238552c860cf9dd9f629e8f351352ae3c4) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ------ | ------ | + | gatsby | 5.14.0 | 5.14.1 | + +- [#7620](https://github.com/cloudflare/workers-sdk/pull/7620) [`4a38b9b`](https://github.com/cloudflare/workers-sdk/commit/4a38b9b1f9d4249f0d6756c22593a168ab4149bc) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ------ | ------ | + | nuxi | 3.16.0 | 3.17.1 | + +- [#7621](https://github.com/cloudflare/workers-sdk/pull/7621) [`05cc188`](https://github.com/cloudflare/workers-sdk/commit/05cc18889d604145d68ff0a306926ebb9380ea3b) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | --------------- | ------ | ------ | + | @angular/create | 19.0.5 | 19.0.6 | + +- [#7622](https://github.com/cloudflare/workers-sdk/pull/7622) [`a29b1c2`](https://github.com/cloudflare/workers-sdk/commit/a29b1c2c58f59a0e9891f1d30eb1c2c0fd68e42d) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ------------ | ------ | ------ | + | create-remix | 2.15.1 | 2.15.2 | + +- [#7623](https://github.com/cloudflare/workers-sdk/pull/7623) [`c448a57`](https://github.com/cloudflare/workers-sdk/commit/c448a57a23052d0fb75f06a282b735f452ddb8c5) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ----------- | ------ | ------ | + | create-qwik | 1.11.0 | 1.12.0 | + +- [#7624](https://github.com/cloudflare/workers-sdk/pull/7624) [`d4d8df1`](https://github.com/cloudflare/workers-sdk/commit/d4d8df1356df66b09f18abff09b201537373b839) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | --------------- | ------ | ------ | + | create-next-app | 15.1.0 | 15.1.2 | + +- [#7643](https://github.com/cloudflare/workers-sdk/pull/7643) [`ab7204a`](https://github.com/cloudflare/workers-sdk/commit/ab7204a6eb8f5bab00172a6a2623251ba2d757ba) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ------ | ------ | + | create-vue | 3.12.2 | 3.13.0 | + +- [#7644](https://github.com/cloudflare/workers-sdk/pull/7644) [`42e8ba1`](https://github.com/cloudflare/workers-sdk/commit/42e8ba1d5a130742d7d94ee9b0b42e0cba78a352) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ----------- | ----- | ----- | + | create-vite | 6.1.0 | 6.1.1 | + +- [#7645](https://github.com/cloudflare/workers-sdk/pull/7645) [`8b6f6e6`](https://github.com/cloudflare/workers-sdk/commit/8b6f6e633eda4c85f7e84144846bb83f0450d955) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | --------------- | ------ | ------ | + | create-next-app | 15.1.2 | 15.1.3 | + +- [#7646](https://github.com/cloudflare/workers-sdk/pull/7646) [`b9533a3`](https://github.com/cloudflare/workers-sdk/commit/b9533a375e25407251145dd94a0496faf5a16a4a) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ------ | ------ | + | nuxi | 3.17.1 | 3.17.2 | + +- [#7647](https://github.com/cloudflare/workers-sdk/pull/7647) [`df06a95`](https://github.com/cloudflare/workers-sdk/commit/df06a9592e8ce721da9b9b048f1db759826a6983) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ---------- | ----- | ------ | + | sv | 0.6.9 | 0.6.10 | + +- [#7673](https://github.com/cloudflare/workers-sdk/pull/7673) [`8ab13b2`](https://github.com/cloudflare/workers-sdk/commit/8ab13b2d7b397010532939bb9ba804c7dc84ea37) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "create-cloudflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ----------------- | ----- | ----- | + | create-docusaurus | 3.6.3 | 3.7.0 | + +- [#7638](https://github.com/cloudflare/workers-sdk/pull/7638) [`acbea32`](https://github.com/cloudflare/workers-sdk/commit/acbea32c6df82269b435e34d8c51fe8a26c973f1) Thanks [@dario-piotrowicz](https://github.com/dario-piotrowicz)! - fix and update experimental Next.js template to work on `@opennextjs/cloudflare@0.3.x` + +- [#7664](https://github.com/cloudflare/workers-sdk/pull/7664) [`5cc3bb1`](https://github.com/cloudflare/workers-sdk/commit/5cc3bb10ae3027fa40c77f42e94e6fb0eae710ce) Thanks [@dario-piotrowicz](https://github.com/dario-piotrowicz)! - Consistently use full official full stack framework names + + Rename the `Solid` framework entry to `SolidStart` and `Next` to `Next.js` so that these alongside `SvelteKit` present the full official unshortened names of full stack frameworks + ## 2.35.1 ### Patch Changes diff --git a/packages/create-cloudflare/package.json b/packages/create-cloudflare/package.json index 31fb01707350..b1140391bf73 100644 --- a/packages/create-cloudflare/package.json +++ b/packages/create-cloudflare/package.json @@ -1,6 +1,6 @@ { "name": "create-cloudflare", - "version": "2.35.1", + "version": "2.35.2", "description": "A CLI for creating and deploying new applications to Cloudflare.", "keywords": [ "cloudflare", @@ -49,7 +49,7 @@ "@cloudflare/cli": "workspace:*", "@cloudflare/eslint-config-worker": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@iarna/toml": "^3.0.0", "@types/command-exists": "^1.2.0", "@types/cross-spawn": "^6.0.2", diff --git a/packages/create-cloudflare/src/frameworks/package.json b/packages/create-cloudflare/src/frameworks/package.json index 1842e749198c..0f65ae7035d2 100644 --- a/packages/create-cloudflare/src/frameworks/package.json +++ b/packages/create-cloudflare/src/frameworks/package.json @@ -8,7 +8,7 @@ "create-astro": "4.11.0", "create-analog": "1.8.1", "@angular/create": "19.0.6", - "create-docusaurus": "3.6.3", + "create-docusaurus": "3.7.0", "create-hono": "0.14.3", "create-next-app": "15.1.3", "create-qwik": "1.12.0", diff --git a/packages/create-cloudflare/src/helpers/compatDate.ts b/packages/create-cloudflare/src/helpers/compatDate.ts index 1a4bca731924..6f83faa44278 100644 --- a/packages/create-cloudflare/src/helpers/compatDate.ts +++ b/packages/create-cloudflare/src/helpers/compatDate.ts @@ -27,7 +27,7 @@ export async function getWorkerdCompatibilityDate() { const match = latestWorkerdVersion.match(/\d+\.(\d{4})(\d{2})(\d{2})\.\d+/); if (match) { - const [, year, month, date] = match ?? []; + const [, year, month, date] = match; const compatDate = `${year}-${month}-${date}`; s.stop(`${brandColor("compatibility date")} ${dim(compatDate)}`); diff --git a/packages/create-cloudflare/templates-experimental/next/c3.ts b/packages/create-cloudflare/templates-experimental/next/c3.ts index 80f0959f68ac..86f2bf7f9251 100644 --- a/packages/create-cloudflare/templates-experimental/next/c3.ts +++ b/packages/create-cloudflare/templates-experimental/next/c3.ts @@ -38,7 +38,7 @@ export default { // is not yet fully ready for Next.js 15, once it is we should remove the following frameworkCliPinnedVersion: "14.2.5", platform: "workers", - displayName: "Next (using Node.js compat + Workers Assets)", + displayName: "Next.js (using Node.js compat + Workers Assets)", path: "templates-experimental/next", copyFiles: { path: "./templates", diff --git a/packages/create-cloudflare/templates-experimental/solid/c3.ts b/packages/create-cloudflare/templates-experimental/solid/c3.ts index 6f481f60e6fe..088774a570f0 100644 --- a/packages/create-cloudflare/templates-experimental/solid/c3.ts +++ b/packages/create-cloudflare/templates-experimental/solid/c3.ts @@ -72,7 +72,7 @@ const config: TemplateConfig = { configVersion: 1, id: "solid", frameworkCli: "create-solid", - displayName: "Solid", + displayName: "SolidStart", platform: "workers", copyFiles: { path: "./templates", diff --git a/packages/create-cloudflare/templates/hello-world/js/wrangler.toml b/packages/create-cloudflare/templates/hello-world/js/wrangler.toml index 5ce580a43254..9626e4b81f8e 100644 --- a/packages/create-cloudflare/templates/hello-world/js/wrangler.toml +++ b/packages/create-cloudflare/templates/hello-world/js/wrangler.toml @@ -2,7 +2,6 @@ name = "" main = "src/index.js" compatibility_date = "" -compatibility_flags = ["nodejs_compat"] # Workers Logs # Docs: https://developers.cloudflare.com/workers/observability/logs/workers-logs/ diff --git a/packages/create-cloudflare/templates/hello-world/ts/wrangler.toml b/packages/create-cloudflare/templates/hello-world/ts/wrangler.toml index 16655fdac667..36520f8670cb 100644 --- a/packages/create-cloudflare/templates/hello-world/ts/wrangler.toml +++ b/packages/create-cloudflare/templates/hello-world/ts/wrangler.toml @@ -2,7 +2,6 @@ name = "" main = "src/index.ts" compatibility_date = "" -compatibility_flags = ["nodejs_compat"] # Workers Logs # Docs: https://developers.cloudflare.com/workers/observability/logs/workers-logs/ diff --git a/packages/create-cloudflare/templates/next/c3.ts b/packages/create-cloudflare/templates/next/c3.ts index 61447f4972e5..71cb7b480c68 100644 --- a/packages/create-cloudflare/templates/next/c3.ts +++ b/packages/create-cloudflare/templates/next/c3.ts @@ -176,7 +176,7 @@ export default { id: "next", frameworkCli: "create-next-app", platform: "pages", - displayName: "Next", + displayName: "Next.js", generate, configure, copyFiles: { diff --git a/packages/create-cloudflare/templates/solid/c3.ts b/packages/create-cloudflare/templates/solid/c3.ts index 6898b7a39bad..e406ad86e30d 100644 --- a/packages/create-cloudflare/templates/solid/c3.ts +++ b/packages/create-cloudflare/templates/solid/c3.ts @@ -66,7 +66,7 @@ const config: TemplateConfig = { configVersion: 1, id: "solid", frameworkCli: "create-solid", - displayName: "Solid", + displayName: "SolidStart", platform: "pages", copyFiles: { path: "./templates", diff --git a/packages/edge-preview-authenticated-proxy/package.json b/packages/edge-preview-authenticated-proxy/package.json index ce0940d97bea..cf5527319039 100644 --- a/packages/edge-preview-authenticated-proxy/package.json +++ b/packages/edge-preview-authenticated-proxy/package.json @@ -12,7 +12,7 @@ }, "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/cookie": "^0.6.0", "cookie": "^0.6.0", "promjs": "^0.4.2", diff --git a/packages/format-errors/package.json b/packages/format-errors/package.json index 0838789dcda6..5db78cbb4415 100644 --- a/packages/format-errors/package.json +++ b/packages/format-errors/package.json @@ -11,7 +11,7 @@ }, "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "mustache": "^4.2.0", "promjs": "^0.4.2", "toucan-js": "^3.3.1", diff --git a/packages/kv-asset-handler/package.json b/packages/kv-asset-handler/package.json index b0e277877ccf..b5846483da8c 100644 --- a/packages/kv-asset-handler/package.json +++ b/packages/kv-asset-handler/package.json @@ -42,7 +42,7 @@ }, "devDependencies": { "@ava/typescript": "^4.1.0", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/mime": "^3.0.4", "@types/node": "catalog:default", "@types/service-worker-mock": "^2.0.1", diff --git a/packages/miniflare/CHANGELOG.md b/packages/miniflare/CHANGELOG.md index 048dbcc42a4f..3b263ebc1291 100644 --- a/packages/miniflare/CHANGELOG.md +++ b/packages/miniflare/CHANGELOG.md @@ -1,5 +1,18 @@ # miniflare +## 3.20241230.0 + +### Patch Changes + +- [#7652](https://github.com/cloudflare/workers-sdk/pull/7652) [`b4e0af1`](https://github.com/cloudflare/workers-sdk/commit/b4e0af163548ee8cc0aefc9165f67a0f83ea94d4) Thanks [@dependabot](https://github.com/apps/dependabot)! - chore: update dependencies of "miniflare" package + + The following dependency versions have been updated: + + | Dependency | From | To | + | ------------------------- | ------------- | ------------- | + | workerd | 1.20241218.0 | 1.20241230.0 | + | @cloudflare/workers-types | ^4.20241218.0 | ^4.20241230.0 | + ## 3.20241218.0 ### Patch Changes diff --git a/packages/miniflare/package.json b/packages/miniflare/package.json index be6716c02d15..fe21557f1bed 100644 --- a/packages/miniflare/package.json +++ b/packages/miniflare/package.json @@ -1,6 +1,6 @@ { "name": "miniflare", - "version": "3.20241218.0", + "version": "3.20241230.0", "description": "Fun, full-featured, fully-local simulator for Cloudflare Workers", "keywords": [ "cloudflare", @@ -50,7 +50,7 @@ "glob-to-regexp": "^0.4.1", "stoppable": "^1.1.0", "undici": "catalog:default", - "workerd": "1.20241218.0", + "workerd": "1.20241230.0", "ws": "^8.18.0", "youch": "^3.2.2", "zod": "^3.22.3" @@ -59,7 +59,7 @@ "@ava/typescript": "^4.1.0", "@cloudflare/kv-asset-handler": "workspace:*", "@cloudflare/workers-shared": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@cloudflare/workflows-shared": "workspace:*", "@microsoft/api-extractor": "^7.47.0", "@types/debug": "^4.1.7", diff --git a/packages/miniflare/src/http/server.ts b/packages/miniflare/src/http/server.ts index 1792a621d10f..76f617255f3d 100644 --- a/packages/miniflare/src/http/server.ts +++ b/packages/miniflare/src/http/server.ts @@ -2,15 +2,9 @@ import fs from "fs/promises"; import { z } from "zod"; import { CORE_PLUGIN } from "../plugins"; import { HttpOptions, Socket_Https } from "../runtime"; -import { Awaitable, CoreHeaders } from "../workers"; +import { Awaitable } from "../workers"; import { CERT, KEY } from "./cert"; -export const ENTRY_SOCKET_HTTP_OPTIONS: HttpOptions = { - // Even though we inject a `cf` object in the entry worker, allow it to - // be customised via `dispatchFetch` - cfBlobHeader: CoreHeaders.CF_BLOB, -}; - export async function getEntrySocketHttpOptions( coreOpts: z.infer ): Promise<{ http: HttpOptions } | { https: Socket_Https }> { @@ -34,7 +28,6 @@ export async function getEntrySocketHttpOptions( if (privateKey && certificateChain) { return { https: { - options: ENTRY_SOCKET_HTTP_OPTIONS, tlsOptions: { keypair: { privateKey: privateKey, @@ -44,7 +37,7 @@ export async function getEntrySocketHttpOptions( }, }; } else { - return { http: ENTRY_SOCKET_HTTP_OPTIONS }; + return { http: {} }; } } diff --git a/packages/miniflare/src/index.ts b/packages/miniflare/src/index.ts index 2c5987a4c216..398456c1055c 100644 --- a/packages/miniflare/src/index.ts +++ b/packages/miniflare/src/index.ts @@ -28,7 +28,6 @@ import { coupleWebSocket, DispatchFetch, DispatchFetchDispatcher, - ENTRY_SOCKET_HTTP_OPTIONS, fetch, getAccessibleHosts, getEntrySocketHttpOptions, @@ -1117,7 +1116,7 @@ export class Miniflare { sockets.push({ name: SOCKET_ENTRY_LOCAL, service: { name: SERVICE_ENTRY }, - http: ENTRY_SOCKET_HTTP_OPTIONS, + http: {}, address: "127.0.0.1:0", }); } diff --git a/packages/miniflare/src/workers/core/entry.worker.ts b/packages/miniflare/src/workers/core/entry.worker.ts index 884c818adc7c..402b71e75e5e 100644 --- a/packages/miniflare/src/workers/core/entry.worker.ts +++ b/packages/miniflare/src/workers/core/entry.worker.ts @@ -35,7 +35,8 @@ const encoder = new TextEncoder(); function getUserRequest( request: Request, - env: Env + env: Env, + clientIp: string | undefined ) { // The ORIGINAL_URL header is added to outbound requests from Miniflare, // triggered either by calling Miniflare.#dispatchFetch(request), @@ -89,15 +90,6 @@ function getUserRequest( // special handling to allow this if a `Request` instance is passed. // See https://github.com/cloudflare/workerd/issues/1122 for more details. request = new Request(url, request); - if (request.cf === undefined) { - const cf: IncomingRequestCfProperties = { - ...env[CoreBindings.JSON_CF_BLOB], - // Defaulting to empty string to preserve undefined `Accept-Encoding` - // through Wrangler's proxy worker. - clientAcceptEncoding: request.headers.get("Accept-Encoding") ?? "", - }; - request = new Request(request, { cf }); - } // `Accept-Encoding` is always set to "br, gzip" in Workers: // https://developers.cloudflare.com/fundamentals/reference/http-request-headers/#accept-encoding @@ -107,6 +99,18 @@ function getUserRequest( request.headers.set("Host", url.host); } + if (clientIp && !request.headers.get("CF-Connecting-IP")) { + const ipv4Regex = /(?.*?):\d+/; + const ipv6Regex = /\[(?.*?)\]:\d+/; + const ip = + clientIp.match(ipv6Regex)?.groups?.ip ?? + clientIp.match(ipv4Regex)?.groups?.ip; + + if (ip) { + request.headers.set("CF-Connecting-IP", ip); + } + } + request.headers.delete(CoreHeaders.PROXY_SHARED_SECRET); request.headers.delete(CoreHeaders.ORIGINAL_URL); request.headers.delete(CoreHeaders.DISABLE_PRETTY_ERROR); @@ -343,6 +347,22 @@ export default >{ async fetch(request, env, ctx) { const startTime = Date.now(); + const clientIp = request.cf?.clientIp as string; + + // Parse this manually (rather than using the `cfBlobHeader` config property in workerd to parse it into request.cf) + // This is because we want to have access to the clientIp, which workerd puts in request.cf if no cfBlobHeader is provided + const clientCfBlobHeader = request.headers.get(CoreHeaders.CF_BLOB); + + const cf: IncomingRequestCfProperties = clientCfBlobHeader + ? JSON.parse(clientCfBlobHeader) + : { + ...env[CoreBindings.JSON_CF_BLOB], + // Defaulting to empty string to preserve undefined `Accept-Encoding` + // through Wrangler's proxy worker. + clientAcceptEncoding: request.headers.get("Accept-Encoding") ?? "", + }; + request = new Request(request, { cf }); + // The proxy client will always specify an operation const isProxy = request.headers.get(CoreHeaders.OP) !== null; if (isProxy) return handleProxy(request, env); @@ -356,7 +376,7 @@ export default >{ const clientAcceptEncoding = request.headers.get("Accept-Encoding"); try { - request = getUserRequest(request, env); + request = getUserRequest(request, env, clientIp); } catch (e) { if (e instanceof HttpError) { return e.toResponse(); diff --git a/packages/miniflare/src/workers/d1/dumpSql.ts b/packages/miniflare/src/workers/d1/dumpSql.ts index 652c755c017b..b3cfed757fce 100644 --- a/packages/miniflare/src/workers/d1/dumpSql.ts +++ b/packages/miniflare/src/workers/d1/dumpSql.ts @@ -71,7 +71,7 @@ export function* dumpSql( return outputQuotedEscapedString(cell); } else if (cell instanceof ArrayBuffer) { return `X'${Array.prototype.map - .call(new Uint8Array(cell), (b) => b.toString(16)) + .call(new Uint8Array(cell), (b) => b.toString(16).padStart(2, "0")) .join("")}'`; } else { console.log({ colType, cellType, cell, column: columns[i] }); diff --git a/packages/miniflare/test/index.spec.ts b/packages/miniflare/test/index.spec.ts index ab3a872c05c8..f19177b2f06c 100644 --- a/packages/miniflare/test/index.spec.ts +++ b/packages/miniflare/test/index.spec.ts @@ -2610,6 +2610,85 @@ test("Miniflare: getCf() returns a user provided cf object", async (t) => { t.deepEqual(cf, { myFakeField: "test" }); }); +test("Miniflare: dispatchFetch() can override cf", async (t) => { + const mf = new Miniflare({ + script: + "export default { fetch(request) { return Response.json(request.cf) } }", + modules: true, + cf: { + myFakeField: "test", + }, + }); + t.teardown(() => mf.dispose()); + + const cf = await mf.dispatchFetch("http://example.com/", { + cf: { myFakeField: "test2" }, + }); + const cfJson = (await cf.json()) as { myFakeField: string }; + t.deepEqual(cfJson.myFakeField, "test2"); +}); + +test("Miniflare: CF-Connecting-IP is injected", async (t) => { + const mf = new Miniflare({ + script: + "export default { fetch(request) { return new Response(request.headers.get('CF-Connecting-IP')) } }", + modules: true, + cf: { + myFakeField: "test", + }, + }); + t.teardown(() => mf.dispose()); + + const ip = await mf.dispatchFetch("http://example.com/"); + // Tracked in https://github.com/cloudflare/workerd/issues/3310 + if (!isWindows) { + t.deepEqual(await ip.text(), "127.0.0.1"); + } else { + t.deepEqual(await ip.text(), ""); + } +}); + +test("Miniflare: CF-Connecting-IP is injected (ipv6)", async (t) => { + const mf = new Miniflare({ + script: + "export default { fetch(request) { return new Response(request.headers.get('CF-Connecting-IP')) } }", + modules: true, + cf: { + myFakeField: "test", + }, + host: "::1", + }); + t.teardown(() => mf.dispose()); + + const ip = await mf.dispatchFetch("http://example.com/"); + + // Tracked in https://github.com/cloudflare/workerd/issues/3310 + if (!isWindows) { + t.deepEqual(await ip.text(), "::1"); + } else { + t.deepEqual(await ip.text(), ""); + } +}); + +test("Miniflare: CF-Connecting-IP is preserved when present", async (t) => { + const mf = new Miniflare({ + script: + "export default { fetch(request) { return new Response(request.headers.get('CF-Connecting-IP')) } }", + modules: true, + cf: { + myFakeField: "test", + }, + }); + t.teardown(() => mf.dispose()); + + const ip = await mf.dispatchFetch("http://example.com/", { + headers: { + "CF-Connecting-IP": "128.0.0.1", + }, + }); + t.deepEqual(await ip.text(), "128.0.0.1"); +}); + test("Miniflare: can use module fallback service", async (t) => { const modulesRoot = "/"; const modules: Record> = { diff --git a/packages/pages-shared/CHANGELOG.md b/packages/pages-shared/CHANGELOG.md index f474d1cb0a24..f624c212d5e1 100644 --- a/packages/pages-shared/CHANGELOG.md +++ b/packages/pages-shared/CHANGELOG.md @@ -1,5 +1,12 @@ # @cloudflare/pages-shared +## 0.12.1 + +### Patch Changes + +- Updated dependencies [[`b4e0af1`](https://github.com/cloudflare/workers-sdk/commit/b4e0af163548ee8cc0aefc9165f67a0f83ea94d4)]: + - miniflare@3.20241230.0 + ## 0.12.0 ### Minor Changes diff --git a/packages/pages-shared/__tests__/asset-server/handler.test.ts b/packages/pages-shared/__tests__/asset-server/handler.test.ts index b97c1a8e91f0..905ffaed1a08 100644 --- a/packages/pages-shared/__tests__/asset-server/handler.test.ts +++ b/packages/pages-shared/__tests__/asset-server/handler.test.ts @@ -510,6 +510,87 @@ describe("asset-server handler", () => { ); }); + test("early hints should cache empty link headers", async () => { + const deploymentId = "deployment-" + Math.random(); + const metadata = createMetadataObject({ deploymentId }) as Metadata; + + const findAssetEntryForPath = async (path: string) => { + if (path === "/index.html") { + return "index.html"; + } + + return null; + }; + + // Create cache storage to reuse between requests + const { caches } = createCacheStorage(); + + const getResponse = async () => + getTestResponse({ + request: new Request("https://example.com/"), + metadata, + findAssetEntryForPath, + caches, + fetchAsset: () => + Promise.resolve( + Object.assign( + new Response(` + + + +

I'm a teapot

+ + `), + { contentType: "text/html" } + ) + ), + }); + + const { response, spies } = await getResponse(); + expect(response.status).toBe(200); + // waitUntil should be called twice: once for asset-preservation, once for early hints + expect(spies.waitUntil.length).toBe(2); + + await Promise.all(spies.waitUntil); + + const earlyHintsCache = await caches.open(`eh:${deploymentId}`); + const earlyHintsRes = await earlyHintsCache.match("https://example.com/"); + + if (!earlyHintsRes) { + throw new Error( + "Did not match early hints cache on https://example.com/" + ); + } + + expect(earlyHintsRes.headers.get("link")).toBeNull(); + + // Do it again, but this time ensure that we didn't write to cache again + const { response: response2, spies: spies2 } = await getResponse(); + + expect(response2.status).toBe(200); + // waitUntil should only be called for asset-preservation + expect(spies2.waitUntil.length).toBe(1); + + await Promise.all(spies2.waitUntil); + + const earlyHintsRes2 = await earlyHintsCache.match("https://example.com/"); + + if (!earlyHintsRes2) { + throw new Error( + "Did not match early hints cache on https://example.com/" + ); + } + + expect(earlyHintsRes2.headers.get("link")).toBeNull(); + }); + + test.todo( + "early hints should temporarily cache failures to parse links", + async () => { + // I couldn't figure out a way to make HTMLRewriter error out + } + ); + describe("should serve deleted assets from preservation cache", async () => { beforeEach(() => { vi.useFakeTimers(); diff --git a/packages/pages-shared/asset-server/handler.ts b/packages/pages-shared/asset-server/handler.ts index d1f68e33e7c9..7e7db890c2e8 100644 --- a/packages/pages-shared/asset-server/handler.ts +++ b/packages/pages-shared/asset-server/handler.ts @@ -436,22 +436,30 @@ export async function generateHandler< }); const linkHeader = preEarlyHintsHeaders.get("Link"); + const earlyHintsHeaders = new Headers({ + "Cache-Control": "max-age=2592000", // 30 days + }); if (linkHeader) { - await earlyHintsCache.put( - earlyHintsCacheKey, - new Response(null, { - headers: { - Link: linkHeader, - "Cache-Control": "max-age=2592000", // 30 days - }, - }) - ); + earlyHintsHeaders.append("Link", linkHeader); } + await earlyHintsCache.put( + earlyHintsCacheKey, + new Response(null, { headers: earlyHintsHeaders }) + ); } catch (err) { // Nbd if we fail here in the deferred 'waitUntil' work. We're probably trying to parse a malformed page or something. // Totally fine to skip over any errors. // If we need to debug something, you can uncomment the following: // logError(err) + // In any case, let's not bother checking again for another day. + await earlyHintsCache.put( + earlyHintsCacheKey, + new Response(null, { + headers: { + "Cache-Control": "max-age=86400", // 1 day + }, + }) + ); } })() ); diff --git a/packages/pages-shared/package.json b/packages/pages-shared/package.json index aee1ff835fd0..9a75604198d4 100644 --- a/packages/pages-shared/package.json +++ b/packages/pages-shared/package.json @@ -1,6 +1,6 @@ { "name": "@cloudflare/pages-shared", - "version": "0.12.0", + "version": "0.12.1", "repository": { "type": "git", "url": "https://github.com/cloudflare/workers-sdk.git", @@ -23,7 +23,7 @@ }, "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@miniflare/cache": "^2.14.2", "@miniflare/core": "^2.14.2", "@miniflare/html-rewriter": "^2.14.2", diff --git a/packages/playground-preview-worker/package.json b/packages/playground-preview-worker/package.json index 44379d2c9fff..f804f277618d 100644 --- a/packages/playground-preview-worker/package.json +++ b/packages/playground-preview-worker/package.json @@ -19,7 +19,7 @@ }, "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/cookie": "^0.6.0", "cookie": "^0.6.0", "itty-router": "^4.0.13", diff --git a/packages/prerelease-registry/package.json b/packages/prerelease-registry/package.json index 52a137406025..25537de1acf1 100644 --- a/packages/prerelease-registry/package.json +++ b/packages/prerelease-registry/package.json @@ -18,7 +18,7 @@ "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "wrangler": "workspace:*" }, "volta": { diff --git a/packages/quick-edit-extension/package.json b/packages/quick-edit-extension/package.json index 4bfa6629d29e..d01ce7e113f7 100644 --- a/packages/quick-edit-extension/package.json +++ b/packages/quick-edit-extension/package.json @@ -43,7 +43,7 @@ ], "devDependencies": { "@cloudflare/workers-tsconfig": "workspace:^", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "esbuild": "0.17.19", "esbuild-register": "^3.5.0" }, diff --git a/packages/unenv-preset/build.config.ts b/packages/unenv-preset/build.config.ts index f73862c567ff..6ec0a78c5e61 100644 --- a/packages/unenv-preset/build.config.ts +++ b/packages/unenv-preset/build.config.ts @@ -1,19 +1,7 @@ import { defineBuildConfig } from "unbuild"; export default defineBuildConfig({ - declaration: true, - rollup: { - emitCJS: true, - }, entries: [ - "src/index", - { input: "src/runtime/", outDir: "dist/runtime", format: "esm" }, - { - input: "src/runtime/", - outDir: "dist/runtime", - format: "cjs", - ext: "cjs", - declaration: false, - }, + { input: "src/", outDir: "dist/src", format: "esm", declaration: true }, ], }); diff --git a/packages/unenv-preset/package.json b/packages/unenv-preset/package.json index 538d198473f1..f24e69ce7f5a 100644 --- a/packages/unenv-preset/package.json +++ b/packages/unenv-preset/package.json @@ -21,43 +21,49 @@ }, "license": "MIT OR Apache-2.0", "sideEffects": false, + "type": "module", "exports": { ".": { - "types": "./dist/index.d.ts", - "require": "./dist/index.cjs", - "import": "./dist/index.mjs" + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.mjs" }, "./runtime/*": { - "types": "./dist/runtime/*.d.ts", - "require": "./dist/runtime/*.cjs", - "import": "./dist/runtime/*.mjs" + "types": "./dist/src/runtime/*.d.ts", + "import": "./dist/src/runtime/*.mjs" } }, - "main": "./dist/index.cjs", - "types": "./dist/index.d.ts", + "main": "./dist/src/index.mjs", + "types": "./dist/src/index.d.ts", "files": [ - "dist", - "runtime" + "dist/src" ], "scripts": { "build": "unbuild", "check:lint": "eslint", - "check:type": "tsc --noEmit" + "check:type": "tsc --noEmit", + "test:ci": "vitest run", + "test:watch": "vitest" }, "devDependencies": { "@types/node": "*", "typescript": "catalog:default", - "unbuild": "^2.0.0" + "unbuild": "^2.0.0", + "undici": "catalog:default", + "vitest": "catalog:default", + "wrangler": "workspace:*" }, "peerDependencies": { "unenv": "npm:unenv-nightly@*", - "workerd": "^1.20241216.0" + "workerd": "^1.20241230.0" }, "peerDependenciesMeta": { "workerd": { "optional": true } }, + "publishConfig": { + "access": "public" + }, "workers-sdk": { "prerelease": true } diff --git a/packages/unenv-preset/tests/index.test.ts b/packages/unenv-preset/tests/index.test.ts new file mode 100644 index 000000000000..2f730e3bdb62 --- /dev/null +++ b/packages/unenv-preset/tests/index.test.ts @@ -0,0 +1,64 @@ +import fs from "node:fs"; +import path from "node:path"; +import { platform } from "node:process"; +import { fileURLToPath } from "node:url"; +import { fetch } from "undici"; +import { afterAll, beforeAll, describe, expect, test } from "vitest"; +import { runWranglerDev } from "../../../fixtures/shared/src/run-wrangler-long-lived"; +import { TESTS } from "./worker/index"; + +// Root of the current package +const pkgDir = path.resolve(fileURLToPath(import.meta.url), "../.."); +// workerd binary +const localWorkerdPath = path.join(pkgDir, "node_modules/.bin/workerd"); +// Base path for resolving `@cloudflare/unenv-preset` files +const localPresetResolveBaseDir = path.join(pkgDir, "package.json"); +// Base path for resolving `unjs/unenv` files +const localUnenvResolveBaseDir = path.join( + pkgDir, + "node_modules/unenv/package.json" +); + +// `runWranglerDev` + `MINIFLARE_WORKERD_PATH` is not supported on Windows +// See https://github.com/nodejs/node/issues/52554 +describe.skipIf(platform === "win32")( + `@cloudflare/unenv-preset ${platform} ${localWorkerdPath} ${fs.existsSync(localWorkerdPath)}`, + () => { + let wrangler: Awaited> | undefined; + + beforeAll(async () => { + // Use workerd binary install in `@cloudflare/unenv-preset` + // rather than the one bundled with wrangler. + const MINIFLARE_WORKERD_PATH = localWorkerdPath; + + // Use the preset from the local `@cloudflare/unenv-preset` and `unjs/unenv` + // rather than the one bundled with wrangler. + const WRANGLER_UNENV_RESOLVE_PATHS = [ + localPresetResolveBaseDir, + localUnenvResolveBaseDir, + ].join(","); + + wrangler = await runWranglerDev( + path.join(__dirname, "worker"), + ["--port=0", "--inspector-port=0"], + { + MINIFLARE_WORKERD_PATH, + WRANGLER_UNENV_RESOLVE_PATHS, + } + ); + }); + + afterAll(async () => { + await wrangler?.stop(); + wrangler = undefined; + }); + + test.for(Object.keys(TESTS))("%s", async (testName) => { + expect(wrangler).toBeDefined(); + const { ip, port } = wrangler!; + const response = await fetch(`http://${ip}:${port}/${testName}`); + const body = await response.text(); + expect(body).toMatch("OK!"); + }); + } +); diff --git a/fixtures/nodejs-hybrid-app/src/unenv-preset.ts b/packages/unenv-preset/tests/worker/index.ts similarity index 74% rename from fixtures/nodejs-hybrid-app/src/unenv-preset.ts rename to packages/unenv-preset/tests/worker/index.ts index 6b5bed213d0d..62cf9085a80c 100644 --- a/fixtures/nodejs-hybrid-app/src/unenv-preset.ts +++ b/packages/unenv-preset/tests/worker/index.ts @@ -1,21 +1,41 @@ import assert from "node:assert"; -// TODO: move to `@cloudflare/unenv-preset` -// See: https://github.com/cloudflare/workers-sdk/issues/7579 -export async function testUnenvPreset() { - try { - await testCryptoGetRandomValues(); - await testWorkerdImplementsBuffer(); - await testWorkerdModules(); - await testUtilImplements(); - await testWorkerdPath(); - await testWorkerdDns(); - } catch (e) { - return new Response(String(e)); - } +// List all the test functions. +// The test can be executing by fetching the `/${testName}` url. +export const TESTS = { + testCryptoGetRandomValues, + testImplementsBuffer, + testModules, + testUtilImplements, + testPath, + testDns, +}; - return new Response("OK!"); -} +export default { + async fetch(request: Request): Promise { + const url = new URL(request.url); + const testName = url.pathname.slice(1); + const test = TESTS[testName]; + if (!test) { + return new Response( + `

${testName ? `${testName} not found!` : `Pick a test to run`}

+
    + ${Object.keys(TESTS) + .map((name) => `
  • ${name}
  • `) + .join("")} +
`, + { headers: { "Content-Type": "text/html; charset=utf-8" } } + ); + } + try { + await test(); + } catch (e) { + return new Response(String(e)); + } + + return new Response("OK!"); + }, +}; async function testCryptoGetRandomValues() { const crypto = await import("node:crypto"); @@ -26,7 +46,7 @@ async function testCryptoGetRandomValues() { assert(array.every((v) => v >= 0 && v <= 0xff_ff_ff_ff)); } -async function testWorkerdImplementsBuffer() { +async function testImplementsBuffer() { const encoder = new TextEncoder(); const buffer = await import("node:buffer"); const Buffer = buffer.Buffer; @@ -53,7 +73,7 @@ async function testWorkerdImplementsBuffer() { assert.strictEqual(typeof buffer.resolveObjectURL, "function"); } -async function testWorkerdModules() { +async function testModules() { const module = await import("node:module"); // @ts-expect-error exposed by workerd const require = module.createRequire("/"); @@ -89,7 +109,7 @@ async function testUtilImplements() { assert.strictEqual(types.isAnyArrayBuffer(new ArrayBuffer(0)), true); } -async function testWorkerdPath() { +async function testPath() { const pathWin32 = await import("node:path/win32"); assert.strictEqual(pathWin32.sep, "\\"); assert.strictEqual(pathWin32.delimiter, ";"); @@ -98,7 +118,7 @@ async function testWorkerdPath() { assert.strictEqual(pathPosix.delimiter, ":"); } -async function testWorkerdDns() { +async function testDns() { const dns = await import("node:dns"); await new Promise((resolve, reject) => { dns.resolveTxt("nodejs.org", (error, results) => { diff --git a/packages/unenv-preset/tests/worker/wrangler.json b/packages/unenv-preset/tests/worker/wrangler.json new file mode 100644 index 000000000000..be599a2d4c4f --- /dev/null +++ b/packages/unenv-preset/tests/worker/wrangler.json @@ -0,0 +1,6 @@ +{ + "name": "cloudflare-unenv-preset-test", + "main": "./index.ts", + "compatibility_date": "2024-12-16", + "compatibility_flags": ["nodejs_compat"] +} diff --git a/packages/vitest-pool-workers/CHANGELOG.md b/packages/vitest-pool-workers/CHANGELOG.md index af5be2daa0d8..f04ae7fb8292 100644 --- a/packages/vitest-pool-workers/CHANGELOG.md +++ b/packages/vitest-pool-workers/CHANGELOG.md @@ -1,5 +1,17 @@ # @cloudflare/vitest-pool-workers +## 0.5.41 + +### Patch Changes + +- [#7668](https://github.com/cloudflare/workers-sdk/pull/7668) [`94f650e`](https://github.com/cloudflare/workers-sdk/commit/94f650eaa1c25b626a05c1a0c2b107910656dcf7) Thanks [@romeupalos](https://github.com/romeupalos)! - fix: Add support interception of URLs with repeated key/name in its query params. + + e.g., `https://example.com/foo/bar?a=1&a=2` + +- Updated dependencies [[`cac7fa6`](https://github.com/cloudflare/workers-sdk/commit/cac7fa6160ecc70d8f188de1f494a07c0e1e9626), [`d8fb032`](https://github.com/cloudflare/workers-sdk/commit/d8fb032ba24ac284147dc481c28ab8dbcf7a9d72), [`b4e0af1`](https://github.com/cloudflare/workers-sdk/commit/b4e0af163548ee8cc0aefc9165f67a0f83ea94d4), [`6c2f173`](https://github.com/cloudflare/workers-sdk/commit/6c2f17341037962bdf675e7008a4d91059465e16), [`755a27c`](https://github.com/cloudflare/workers-sdk/commit/755a27c7a5d7f35cb5f05ab2e12af6d64ce323fb), [`8abb43f`](https://github.com/cloudflare/workers-sdk/commit/8abb43fcdf0c506fa6268a7f07aa31b398b7daf2)]: + - wrangler@3.100.0 + - miniflare@3.20241230.0 + ## 0.5.40 ### Patch Changes diff --git a/packages/vitest-pool-workers/package.json b/packages/vitest-pool-workers/package.json index bd1740da036b..987884f3d8d5 100644 --- a/packages/vitest-pool-workers/package.json +++ b/packages/vitest-pool-workers/package.json @@ -1,6 +1,6 @@ { "name": "@cloudflare/vitest-pool-workers", - "version": "0.5.40", + "version": "0.5.41", "description": "Workers Vitest integration for writing Vitest unit and integration tests that run inside the Workers runtime", "keywords": [ "cloudflare", @@ -64,7 +64,7 @@ "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/node": "catalog:default", "@types/semver": "^7.5.1", "@vitest/runner": "catalog:default", diff --git a/packages/vitest-pool-workers/src/pool/index.ts b/packages/vitest-pool-workers/src/pool/index.ts index 19ec83497c36..cd68091bb267 100644 --- a/packages/vitest-pool-workers/src/pool/index.ts +++ b/packages/vitest-pool-workers/src/pool/index.ts @@ -9,6 +9,7 @@ import { createBirpc } from "birpc"; import * as devalue from "devalue"; import { compileModuleRules, + getNodeCompat, kCurrentWorker, kUnsafeEphemeralUniqueKey, Log, @@ -355,11 +356,6 @@ function buildProjectWorkerOptions( disableFlag: "export_commonjs_namespace", defaultOnDate: "2022-10-31", }), - () => - flagAssertions.assertAtLeastOneFlagExists([ - "nodejs_compat", - "nodejs_compat_v2", - ]), ]; for (const assertion of assertions) { @@ -369,6 +365,18 @@ function buildProjectWorkerOptions( } } + const { mode } = getNodeCompat( + runnerWorker.compatibilityDate, + runnerWorker.compatibilityFlags + ); + + if (mode !== "v1" && mode !== "v2") { + runnerWorker.compatibilityFlags.push( + "nodejs_compat", + "no_nodejs_compat_v2" + ); + } + // Required for `workerd:unsafe` module. We don't require this flag to be set // as it's experimental, so couldn't be deployed by users. if (!runnerWorker.compatibilityFlags.includes("unsafe_module")) { diff --git a/packages/vitest-pool-workers/src/worker/fetch-mock.ts b/packages/vitest-pool-workers/src/worker/fetch-mock.ts index e0b89b0b995a..3c5e64e0cd30 100644 --- a/packages/vitest-pool-workers/src/worker/fetch-mock.ts +++ b/packages/vitest-pool-workers/src/worker/fetch-mock.ts @@ -93,11 +93,10 @@ globalThis.fetch = async (input, init) => { const bodyText = bodyArray === null ? "" : DECODER.decode(bodyArray); const dispatchOptions: Dispatcher.DispatchOptions = { origin: url.origin, - path: url.pathname, + path: url.pathname + url.search, method: request.method as Dispatcher.HttpMethod, body: bodyText, headers: requestHeaders, - query: Object.fromEntries(url.searchParams), }; requests.set(dispatchOptions, { request, body: bodyArray }); diff --git a/packages/workers-shared/CHANGELOG.md b/packages/workers-shared/CHANGELOG.md index 2e44f3416542..adafa14eb562 100644 --- a/packages/workers-shared/CHANGELOG.md +++ b/packages/workers-shared/CHANGELOG.md @@ -1,5 +1,11 @@ # @cloudflare/workers-shared +## 0.11.2 + +### Patch Changes + +- [#7612](https://github.com/cloudflare/workers-sdk/pull/7612) [`2e78812`](https://github.com/cloudflare/workers-sdk/commit/2e78812ade7cd7361b023c90afe06221a52b79eb) Thanks [@Cherry](https://github.com/Cherry)! - fix: resolves an issue where a malformed path such as `https://example.com/%A0` would cause an unhandled error + ## 0.11.1 ### Patch Changes diff --git a/packages/workers-shared/package.json b/packages/workers-shared/package.json index be368edff415..83e21716c369 100644 --- a/packages/workers-shared/package.json +++ b/packages/workers-shared/package.json @@ -1,6 +1,6 @@ { "name": "@cloudflare/workers-shared", - "version": "0.11.1", + "version": "0.11.2", "description": "Package that is used at Cloudflare to power some internal features of Cloudflare Workers.", "keywords": [ "cloudflare", @@ -50,7 +50,7 @@ "@cloudflare/eslint-config-worker": "workspace:*", "@cloudflare/vitest-pool-workers": "^0.5.31", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/mime": "^3.0.4", "concurrently": "^8.2.2", "esbuild": "0.17.19", diff --git a/packages/workers.new/package.json b/packages/workers.new/package.json index 817a80a229df..c9dc02f673e9 100644 --- a/packages/workers.new/package.json +++ b/packages/workers.new/package.json @@ -11,7 +11,7 @@ }, "devDependencies": { "@cloudflare/vitest-pool-workers": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/node": "catalog:default", "miniflare": "workspace:*", "typescript": "catalog:default", diff --git a/packages/workflows-shared/package.json b/packages/workflows-shared/package.json index 7750ebd6e0b9..ba8e5fea429a 100644 --- a/packages/workflows-shared/package.json +++ b/packages/workflows-shared/package.json @@ -41,7 +41,7 @@ "devDependencies": { "@cloudflare/eslint-config-worker": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@types/mime": "^3.0.4", "esbuild": "0.17.19", "rimraf": "^6.0.1", diff --git a/packages/wrangler/CHANGELOG.md b/packages/wrangler/CHANGELOG.md index f60800d3f8bd..c243026c5fb2 100644 --- a/packages/wrangler/CHANGELOG.md +++ b/packages/wrangler/CHANGELOG.md @@ -1,5 +1,42 @@ # wrangler +## 3.100.0 + +### Minor Changes + +- [#7604](https://github.com/cloudflare/workers-sdk/pull/7604) [`6c2f173`](https://github.com/cloudflare/workers-sdk/commit/6c2f17341037962bdf675e7008a4d91059465e16) Thanks [@CarmenPopoviciu](https://github.com/CarmenPopoviciu)! - feat: Capture Workers with static assets in the telemetry data + + We want to measure accurately what this number of Workers + Assets projects running in remote mode is, as this number will be a very helpful data point down the road, when more decisions around remote mode will have to be taken. + + These changes add this kind of insight to our telemetry data, by capturing whether the command running is in the context of a Workers + Assets project. + + N.B. With these changes in place we will be capturing the Workers + Assets context for all commands, not just wrangler dev --remote. + +### Patch Changes + +- [#7581](https://github.com/cloudflare/workers-sdk/pull/7581) [`cac7fa6`](https://github.com/cloudflare/workers-sdk/commit/cac7fa6160ecc70d8f188de1f494a07c0e1e9626) Thanks [@vicb](https://github.com/vicb)! - chore(wrangler): update unenv dependency version + + unenv now uses the workerd implementation on node:dns + See the [unjs/unenv#376](https://github.com/unjs/unenv/pull/376) + +- [#7625](https://github.com/cloudflare/workers-sdk/pull/7625) [`d8fb032`](https://github.com/cloudflare/workers-sdk/commit/d8fb032ba24ac284147dc481c28ab8dbcf7a9d72) Thanks [@vicb](https://github.com/vicb)! - feat(wrangler): use unenv builtin dependency resolution + + Moving away from `require.resolve()` to handle unenv aliased packages. + Using the unenv builtin resolution will allow us to drop the .cjs file from the preset + and to override the base path so that we can test the dev version of the preset. + +- [#7533](https://github.com/cloudflare/workers-sdk/pull/7533) [`755a27c`](https://github.com/cloudflare/workers-sdk/commit/755a27c7a5d7f35cb5f05ab2e12af6d64ce323fb) Thanks [@danielgek](https://github.com/danielgek)! - Add warning about the browser rendering not available on local + +- [#7614](https://github.com/cloudflare/workers-sdk/pull/7614) [`8abb43f`](https://github.com/cloudflare/workers-sdk/commit/8abb43fcdf0c506fa6268a7f07aa31b398b7daf2) Thanks [@vicb](https://github.com/vicb)! - chore(wrangler): update unenv dependency version + + The updated unenv contains a fix for the module resolution, + see . + That bug prevented us from using unenv module resolution, + see . + +- Updated dependencies [[`b4e0af1`](https://github.com/cloudflare/workers-sdk/commit/b4e0af163548ee8cc0aefc9165f67a0f83ea94d4)]: + - miniflare@3.20241230.0 + ## 3.99.0 ### Minor Changes diff --git a/packages/wrangler/CONTRIBUTING.md b/packages/wrangler/CONTRIBUTING.md index aa8a4f80e484..66a1f8a6da0d 100644 --- a/packages/wrangler/CONTRIBUTING.md +++ b/packages/wrangler/CONTRIBUTING.md @@ -152,11 +152,9 @@ feat: implement the `wrangler versions deploy` command This command allows users to deploy a multiple versions of their Worker. -Note: while in open-beta, the `--experimental-versions` flag is required. - For interactive use (to be prompted for all options), run: -- `wrangler versions deploy --x-versions` +- `wrangler versions deploy` For non-interactive use, run with CLI args (and `--yes` to accept defaults): diff --git a/packages/wrangler/e2e/dev-registry.test.ts b/packages/wrangler/e2e/dev-registry.test.ts index 273cf7cb041f..77da2d88f3af 100644 --- a/packages/wrangler/e2e/dev-registry.test.ts +++ b/packages/wrangler/e2e/dev-registry.test.ts @@ -99,27 +99,25 @@ describe("unstable_dev()", () => { const childWorker = await unstable_dev( "${child.replaceAll("\\", "/")}/src/index.ts", { - configPath: "${child.replaceAll("\\", "/")}/wrangler.toml", experimental: { disableExperimentalWarning: true, }, } ); + // Wait long enough for the child to register itself on the Worker Registry + // before we boot up the parent that needs to know about it. await setTimeout(2000) const parentWorker = await unstable_dev( "src/index.ts", { - configPath: "wrangler.toml", experimental: { disableExperimentalWarning: true, }, } ); - await setTimeout(2000) - console.log(await parentWorker.fetch("/").then(r => r.text())) process.exit(0); diff --git a/packages/wrangler/e2e/dev-with-resources.test.ts b/packages/wrangler/e2e/dev-with-resources.test.ts index 1910d82e7d18..f464c54b9ab2 100644 --- a/packages/wrangler/e2e/dev-with-resources.test.ts +++ b/packages/wrangler/e2e/dev-with-resources.test.ts @@ -8,6 +8,9 @@ import WebSocket from "ws"; import { WranglerE2ETestHelper } from "./helpers/e2e-wrangler-test"; import { generateResourceName } from "./helpers/generate-resource-name"; +const port = await getPort(); +const inspectorPort = await getPort(); + const RUNTIMES = [ { flags: "", runtime: "local" }, { flags: "--remote", runtime: "remote" }, @@ -60,7 +63,9 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); let res = await fetch(url); @@ -98,7 +103,9 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { }); `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); let res = await fetch(url); expect(await res.text()).toBe("service worker"); @@ -147,7 +154,9 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.json()).toEqual({ @@ -158,8 +167,6 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { }); it("starts inspector and allows debugging", async () => { - const inspectorPort = await getPort(); - await helper.seed({ "wrangler.toml": dedent` name = "${workerName}" @@ -173,7 +180,7 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { `, }); const worker = helper.runLongLived( - `wrangler dev ${flags} --inspector-port=${inspectorPort}` + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` ); await worker.waitForReady(); const inspectorUrl = new URL(`ws://127.0.0.1:${inspectorPort}`); @@ -198,7 +205,7 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { `, }); const worker = helper.runLongLived( - `wrangler dev ${flags} --local-protocol=https` + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort} --local-protocol=https` ); const { url } = await worker.waitForReady(); const parsedURL = new URL(url); @@ -224,7 +231,7 @@ describe.sequential.each(RUNTIMES)("Core: $flags", ({ runtime, flags }) => { }); // TODO(soon): explore using `--host` for remote mode in this test const worker = helper.runLongLived( - `wrangler dev ${flags} --local-upstream=example.com` + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort} --local-upstream=example.com` ); const { url } = await worker.waitForReady(); const res = await fetch(url); @@ -270,7 +277,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { }); `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.json()).toEqual({ @@ -298,7 +307,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { }); `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.text()).toBe("3"); @@ -330,7 +341,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.text()).toBe("existing-value"); @@ -399,7 +412,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.text()).toBe("

👋

"); @@ -432,7 +447,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.text()).toBe("existing-value"); @@ -483,7 +500,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { `wrangler d1 execute ${d1ResourceFlags} DB --file schema.sql` ); // D1 defaults to `--local`, so we deliberately use `flags`, not `resourceFlags` - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); expect(await res.json()).toEqual([{ key: "key1", value: "value1" }]); @@ -551,7 +570,7 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { }); const worker = helper.runLongLived( - `wrangler dev ${flags} --experimental-vectorize-bind-to-prod` + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort} --experimental-vectorize-bind-to-prod` ); const { url } = await worker.waitForReady(); const res = await fetch(url); @@ -585,7 +604,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); @@ -619,7 +640,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); await fetch(url); await worker.readUntil(/✉️/); @@ -659,7 +682,9 @@ describe.sequential.each(RUNTIMES)("Bindings: $flags", ({ runtime, flags }) => { } `, }); - const worker = helper.runLongLived(`wrangler dev ${flags}`); + const worker = helper.runLongLived( + `wrangler dev ${flags} --port ${port} --inspector-port ${inspectorPort}` + ); const { url } = await worker.waitForReady(); const res = await fetch(url); diff --git a/packages/wrangler/e2e/pages-dev.test.ts b/packages/wrangler/e2e/pages-dev.test.ts index 6045bd0e1949..6971cae67d82 100644 --- a/packages/wrangler/e2e/pages-dev.test.ts +++ b/packages/wrangler/e2e/pages-dev.test.ts @@ -9,115 +9,117 @@ import { WranglerE2ETestHelper } from "./helpers/e2e-wrangler-test"; import { fetchText } from "./helpers/fetch-text"; import { normalizeOutput } from "./helpers/normalize"; -describe.each([{ cmd: "wrangler pages dev" }])("Pages $cmd", ({ cmd }) => { - it("should warn if no [--compatibility_date] command line arg was specified", async () => { - const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "_worker.js": dedent` +const port = await getPort(); +const inspectorPort = await getPort(); + +describe.sequential.each([{ cmd: "wrangler pages dev" }])( + "Pages $cmd", + ({ cmd }) => { + it("should warn if no [--compatibility_date] command line arg was specified", async () => { + const helper = new WranglerE2ETestHelper(); + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Testing [--compatibility_date]") } }`, + }); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); + + const currentDate = new Date().toISOString().substring(0, 10); + const output = worker.currentOutput.replaceAll( + currentDate, + "" + ); + expect(output).toContain( + `No compatibility_date was specified. Using today's date: .` + ); + expect(output).toContain( + `❯❯ Add one to your Wrangler configuration file: compatibility_date = "", or` + ); + expect(output).toContain( + `❯❯ Pass it in your terminal: wrangler pages dev [] --compatibility-date=` + ); + + const text = await fetchText(url); + expect(text).toBe("Testing [--compatibility_date]"); }); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port} .`); - const { url } = await worker.waitForReady(); - - const currentDate = new Date().toISOString().substring(0, 10); - const output = worker.currentOutput.replaceAll( - currentDate, - "" - ); - expect(output).toContain( - `No compatibility_date was specified. Using today's date: .` - ); - expect(output).toContain( - `❯❯ Add one to your Wrangler configuration file: compatibility_date = "", or` - ); - expect(output).toContain( - `❯❯ Pass it in your terminal: wrangler pages dev [] --compatibility-date=` - ); - - const text = await fetchText(url); - expect(text).toBe("Testing [--compatibility_date]"); - }); - - it("should warn that [--experimental-local] is no longer required, if specified", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - const worker = helper.runLongLived( - `${cmd} --port ${port} . --experimental-local` - ); - await helper.seed({ - "_worker.js": dedent` + + it("should warn that [--experimental-local] is no longer required, if specified", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} . --experimental-local` + ); + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Testing [--experimental-local]") } }`, + }); + const { url } = await worker.waitForReady(); + const text = await fetchText(url); + expect(text).toBe("Testing [--experimental-local]"); + expect(await worker.currentOutput).toContain( + `--experimental-local is no longer required and will be removed in a future version` + ); + }); + + it("should show [--service] related warnings if specified as arg in the command line", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} . --service STAGING_SERVICE=test-worker@staging` + ); + + await worker.readUntil( + /Support for service binding environments is experimental/ + ); }); - const { url } = await worker.waitForReady(); - const text = await fetchText(url); - expect(text).toBe("Testing [--experimental-local]"); - expect(await worker.currentOutput).toContain( - `--experimental-local is no longer required and will be removed in a future version` - ); - }); - - it("should show [--service] related warnings if specified as arg in the command line", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - const worker = helper.runLongLived( - `${cmd} --port ${port} . --service STAGING_SERVICE=test-worker@staging` - ); - - await worker.readUntil( - /Support for service binding environments is experimental/ - ); - }); - - it("should warn if bindings specified as args in the command line are invalid", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - const worker = helper.runLongLived( - `${cmd} . --port ${port} --service test --kv = --do test --d1 = --r2 =` - ); - await worker.waitForReady(); - expect(await worker.currentOutput).toContain( - `Could not parse Service binding: test` - ); - expect(await worker.currentOutput).toContain( - `Could not parse KV binding: =` - ); - expect(await worker.currentOutput).toContain( - `Could not parse Durable Object binding: test` - ); - expect(await worker.currentOutput).toContain( - `Could not parse R2 binding: =` - ); - expect(await worker.currentOutput).toContain( - `Could not parse D1 binding: =` - ); - }); - - it("should use bindings specified as args in the command line", async () => { - const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "_worker.js": dedent` + + it("should warn if bindings specified as args in the command line are invalid", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived( + `${cmd} --inspector-port ${inspectorPort} . --port ${port} --service test --kv = --do test --d1 = --r2 =` + ); + await worker.waitForReady(); + expect(await worker.currentOutput).toContain( + `Could not parse Service binding: test` + ); + expect(await worker.currentOutput).toContain( + `Could not parse KV binding: =` + ); + expect(await worker.currentOutput).toContain( + `Could not parse Durable Object binding: test` + ); + expect(await worker.currentOutput).toContain( + `Could not parse R2 binding: =` + ); + expect(await worker.currentOutput).toContain( + `Could not parse D1 binding: =` + ); + }); + + it("should use bindings specified as args in the command line", async () => { + const helper = new WranglerE2ETestHelper(); + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Hello world") } }`, - }); - const port = await getPort(); - const worker = helper.runLongLived( - `${cmd} . --port ${port} --service TEST_SERVICE=test-worker --kv TEST_KV --do TEST_DO=TestDurableObject@a --d1 TEST_D1 --r2 TEST_R2` - ); - await worker.waitForReady(); - expect(normalizeOutput(worker.currentOutput)).toContain( - dedent`Your worker has access to the following bindings: + }); + const worker = helper.runLongLived( + `${cmd} --inspector-port ${inspectorPort} . --port ${port} --service TEST_SERVICE=test-worker --kv TEST_KV --do TEST_DO=TestDurableObject@a --d1 TEST_D1 --r2 TEST_R2` + ); + await worker.waitForReady(); + expect(normalizeOutput(worker.currentOutput)).toContain( + dedent`Your worker has access to the following bindings: - Durable Objects: - TEST_DO: TestDurableObject (defined in a [not connected]) - KV Namespaces: @@ -129,176 +131,180 @@ describe.each([{ cmd: "wrangler pages dev" }])("Pages $cmd", ({ cmd }) => { - Services: - TEST_SERVICE: test-worker [not connected] ` - ); - }); + ); + }); - it("should support wrangler.toml", async () => { - const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "public/_worker.js": dedent` + it("should support wrangler.toml", async () => { + const helper = new WranglerE2ETestHelper(); + await helper.seed({ + "public/_worker.js": dedent` export default { async fetch(request, env) { return new Response("Pages supports wrangler.toml ⚡️⚡️") } }`, - "wrangler.toml": dedent` + "wrangler.toml": dedent` name = "pages-project" pages_build_output_dir = "public" compatibility_date = "2023-01-01" `, - }); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port}`); - const { url } = await worker.waitForReady(); + }); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort}` + ); + const { url } = await worker.waitForReady(); - const text = await fetchText(url); - expect(text).toBe("Pages supports wrangler.toml ⚡️⚡️"); - }); + const text = await fetchText(url); + expect(text).toBe("Pages supports wrangler.toml ⚡️⚡️"); + }); - it("should recover from syntax error during dev session (_worker)", async () => { - const helper = new WranglerE2ETestHelper(); - const worker = helper.runLongLived(`${cmd} .`); + it("should recover from syntax error during dev session (_worker)", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived(`${cmd} .`); - await helper.seed({ - "_worker.js": dedent` + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Hello World!") } }`, - }); + }); - const { url } = await worker.waitForReady(); + const { url } = await worker.waitForReady(); - await expect(fetch(url).then((r) => r.text())).resolves.toBe( - "Hello World!" - ); + await expect(fetch(url).then((r) => r.text())).resolves.toBe( + "Hello World!" + ); - await helper.seed({ - "_worker.js": dedent` + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Updated Worker!") } // Syntax Error } }`, - }); + }); - await setTimeout(5_000); + await setTimeout(5_000); - await worker.readUntil(/Failed to build/); + await worker.readUntil(/Failed to build/); - // And then make sure Wrangler hasn't crashed - await helper.seed({ - "_worker.js": dedent` + // And then make sure Wrangler hasn't crashed + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Updated Worker!") } }`, - }); - await worker.waitForReload(); + }); + await worker.waitForReload(); - await expect(fetch(url).then((r) => r.text())).resolves.toBe( - "Updated Worker!" - ); - }); + await expect(fetch(url).then((r) => r.text())).resolves.toBe( + "Updated Worker!" + ); + }); - it("should recover from syntax error during dev session (Functions)", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port} .`); + it("should recover from syntax error during dev session (Functions)", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); - await helper.seed({ - "functions/_middleware.js": dedent` + await helper.seed({ + "functions/_middleware.js": dedent` export async function onRequest() { return new Response("Hello World!") }`, - }); + }); - const { url } = await worker.waitForReady(); + const { url } = await worker.waitForReady(); - await expect(fetch(url).then((r) => r.text())).resolves.toBe( - "Hello World!" - ); + await expect(fetch(url).then((r) => r.text())).resolves.toBe( + "Hello World!" + ); - await helper.seed({ - "functions/_middleware.js": dedent` + await helper.seed({ + "functions/_middleware.js": dedent` export async function onRequest() { return new Response("Updated Worker!") } // Syntax Error }`, - }); + }); - await setTimeout(5_000); + await setTimeout(5_000); - await worker.readUntil(/Failed to build Functions/); + await worker.readUntil(/Failed to build Functions/); - // And then make sure Wrangler hasn't crashed - await helper.seed({ - "functions/_middleware.js": dedent` + // And then make sure Wrangler hasn't crashed + await helper.seed({ + "functions/_middleware.js": dedent` export async function onRequest() { return new Response("Updated Worker!") }`, - }); - await worker.waitForReload(); + }); + await worker.waitForReload(); - await expect(fetch(url).then((r) => r.text())).resolves.toBe( - "Updated Worker!" - ); - }); + await expect(fetch(url).then((r) => r.text())).resolves.toBe( + "Updated Worker!" + ); + }); - it("should validate _routes.json during dev session, and fallback to default value", async () => { - const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "functions/foo.ts": dedent` + it("should validate _routes.json during dev session, and fallback to default value", async () => { + const helper = new WranglerE2ETestHelper(); + await helper.seed({ + "functions/foo.ts": dedent` export async function onRequest() { return new Response("FOO"); }`, - "_routes.json": dedent` + "_routes.json": dedent` { "version": 1, "include": ["/foo"], "exclude": [] } `, - }); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port} .`); + }); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); - const { url } = await worker.waitForReady(); + const { url } = await worker.waitForReady(); - const foo = await fetchText(`${url}/foo`); + const foo = await fetchText(`${url}/foo`); - expect(foo).toBe("FOO"); + expect(foo).toBe("FOO"); - // invalid _routes.json because include rule does not start with `/` - await helper.seed({ - "_routes.json": dedent` + // invalid _routes.json because include rule does not start with `/` + await helper.seed({ + "_routes.json": dedent` { "version": 1, "include": ["foo"], "exclude": [] } `, - }); + }); - await worker.readUntil(/FatalError: Invalid _routes.json file found/); - await worker.readUntil(/All rules must start with '\/'/); - }); + await worker.readUntil(/FatalError: Invalid _routes.json file found/); + await worker.readUntil(/All rules must start with '\/'/); + }); - it("should use top-level configuration specified in `wrangler.toml`", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port}`); - await helper.seed({ - "public/_worker.js": dedent` + it("should use top-level configuration specified in `wrangler.toml`", async () => { + const helper = new WranglerE2ETestHelper(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort}` + ); + await helper.seed({ + "public/_worker.js": dedent` export default { async fetch(request, env) { return new Response(env.PAGES + " " + "supports wrangler.toml") } }`, - "wrangler.toml": dedent` + "wrangler.toml": dedent` name = "pages-project" pages_build_output_dir = "public" # commenting this out would result in a warning. If there is no "compatibility_date" @@ -312,45 +318,45 @@ describe.each([{ cmd: "wrangler pages dev" }])("Pages $cmd", ({ cmd }) => { binding = "KV_BINDING_TOML" id = "KV_ID_TOML" `, - }); - const { url } = await worker.waitForReady(); + }); + const { url } = await worker.waitForReady(); - const text = await fetchText(url); + const text = await fetchText(url); - expect(text).toBe("⚡️ Pages ⚡️ supports wrangler.toml"); - expect(normalizeOutput(worker.currentOutput)).toContain( - dedent`Your worker has access to the following bindings: + expect(text).toBe("⚡️ Pages ⚡️ supports wrangler.toml"); + expect(normalizeOutput(worker.currentOutput)).toContain( + dedent`Your worker has access to the following bindings: - KV Namespaces: - KV_BINDING_TOML: KV_ID_TOML (local) - Vars: - PAGES: "⚡️ Pages ⚡️" ` - ); - }); - - it("should merge (with override) `wrangler.toml` configuration with configuration provided via the command line, with command line args taking precedence", async () => { - const helper = new WranglerE2ETestHelper(); - const port = await getPort(); - - const flags = [ - ` --binding VAR1=NEW_VAR_1 VAR3=VAR_3_ARGS`, - ` --kv KV_BINDING_1_TOML=NEW_KV_ID_1 KV_BINDING_3_ARGS=KV_ID_3_ARGS`, - ` --do DO_BINDING_1_TOML=NEW_DO_1@NEW_DO_SCRIPT_1 DO_BINDING_3_ARGS=DO_3_ARGS@DO_SCRIPT_3_ARGS`, - ` --d1 D1_BINDING_1_TOML=NEW_D1_NAME_1 D1_BINDING_3_ARGS=D1_NAME_3_ARGS`, - ` --r2 R2_BINDING_1_TOML=NEW_R2_BUCKET_1 R2_BINDING_3_TOML=R2_BUCKET_3_ARGS`, - ` --service SERVICE_BINDING_1_TOML=NEW_SERVICE_NAME_1 SERVICE_BINDING_3_TOML=SERVICE_NAME_3_ARGS`, - ` --ai AI_BINDING_2_TOML`, - ` --port ${port}`, - ]; - const worker = helper.runLongLived(`${cmd} ${flags.join("")}`); - await helper.seed({ - "public/_worker.js": dedent` + ); + }); + + it("should merge (with override) `wrangler.toml` configuration with configuration provided via the command line, with command line args taking precedence", async () => { + const helper = new WranglerE2ETestHelper(); + + const flags = [ + ` --binding VAR1=NEW_VAR_1 VAR3=VAR_3_ARGS`, + ` --kv KV_BINDING_1_TOML=NEW_KV_ID_1 KV_BINDING_3_ARGS=KV_ID_3_ARGS`, + ` --do DO_BINDING_1_TOML=NEW_DO_1@NEW_DO_SCRIPT_1 DO_BINDING_3_ARGS=DO_3_ARGS@DO_SCRIPT_3_ARGS`, + ` --d1 D1_BINDING_1_TOML=NEW_D1_NAME_1 D1_BINDING_3_ARGS=D1_NAME_3_ARGS`, + ` --r2 R2_BINDING_1_TOML=NEW_R2_BUCKET_1 R2_BINDING_3_TOML=R2_BUCKET_3_ARGS`, + ` --service SERVICE_BINDING_1_TOML=NEW_SERVICE_NAME_1 SERVICE_BINDING_3_TOML=SERVICE_NAME_3_ARGS`, + ` --ai AI_BINDING_2_TOML`, + ]; + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} ${flags.join("")}` + ); + await helper.seed({ + "public/_worker.js": dedent` export default { async fetch(request, env) { return new Response("Pages supports wrangler.toml ⚡️") } }`, - "wrangler.toml": dedent` + "wrangler.toml": dedent` name = "pages-project" pages_build_output_dir = "public" compatibility_date = "2023-01-01" @@ -417,268 +423,275 @@ describe.each([{ cmd: "wrangler pages dev" }])("Pages $cmd", ({ cmd }) => { [ai] binding = "AI_BINDING_1_TOML" `, - }); - await worker.waitForReady(); + }); + await worker.waitForReady(); - // We only care about the list of bindings and warnings, so strip other output - const [prestartOutput] = normalizeOutput(worker.currentOutput).split( - "⎔ Starting local server..." - ); + // We only care about the list of bindings and warnings, so strip other output + const [prestartOutput] = normalizeOutput(worker.currentOutput).split( + "⎔ Starting local server..." + ); - expect(prestartOutput).toMatchSnapshot(); - }); + expect(prestartOutput).toMatchSnapshot(); + }); - it("should pick up wrangler.toml configuration even in cases when `pages_build_output_dir` was not specified, but the command argument was", async () => { - const helper = new WranglerE2ETestHelper(); + it("should pick up wrangler.toml configuration even in cases when `pages_build_output_dir` was not specified, but the command argument was", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "public/_worker.js": dedent` + await helper.seed({ + "public/_worker.js": dedent` export default { async fetch(request, env) { return new Response(env.PAGES_EMOJI + " Pages supports wrangler.toml" + " " + env.PAGES_EMOJI) } }`, - "wrangler.toml": dedent` + "wrangler.toml": dedent` name = "pages-project" compatibility_date = "2023-01-01" [vars] PAGES_EMOJI = "⚡️" `, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`${cmd} --port ${port} public`); - const { url } = await worker.waitForReady(); - await expect(fetchText(url)).resolves.toBe( - "⚡️ Pages supports wrangler.toml ⚡️" - ); - }); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} public` + ); + const { url } = await worker.waitForReady(); + await expect(fetchText(url)).resolves.toBe( + "⚡️ Pages supports wrangler.toml ⚡️" + ); + }); - describe("watch mode", () => { - it("should modify worker during dev session (Functions)", async () => { - const helper = new WranglerE2ETestHelper(); + describe("watch mode", () => { + it("should modify worker during dev session (Functions)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "functions/_middleware.js": dedent` + await helper.seed({ + "functions/_middleware.js": dedent` export async function onRequest() { return new Response("Hello World!") }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let text = await fetchText(url); - expect(text).toBe("Hello World!"); + let text = await fetchText(url); + expect(text).toBe("Hello World!"); - await helper.seed({ - "functions/_middleware.js": dedent` + await helper.seed({ + "functions/_middleware.js": dedent` export async function onRequest() { return new Response("Updated Worker!") }`, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - text = await fetchText(url); - expect(text).toBe("Updated Worker!"); + text = await fetchText(url); + expect(text).toBe("Updated Worker!"); - // Ensure Wrangler doesn't write tmp files in the functions directory—regression test for https://github.com/cloudflare/workers-sdk/issues/7440 - expect( - existsSync(path.join(helper.tmpPath, "functions/.wrangler")) - ).toBeFalsy(); - }); + // Ensure Wrangler doesn't write tmp files in the functions directory—regression test for https://github.com/cloudflare/workers-sdk/issues/7440 + expect( + existsSync(path.join(helper.tmpPath, "functions/.wrangler")) + ).toBeFalsy(); + }); - it("should support modifying dependencies during dev session (Functions)", async () => { - const helper = new WranglerE2ETestHelper(); + it("should support modifying dependencies during dev session (Functions)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "utils/greetings.js": dedent` + await helper.seed({ + "utils/greetings.js": dedent` export const hello = "Hello World!" export const hi = "Hi there!" `, - "functions/greetings/_middleware.js": dedent` + "functions/greetings/_middleware.js": dedent` import { hello } from "../../utils/greetings" export async function onRequest() { return new Response(hello) }`, - "functions/hi.js": dedent` + "functions/hi.js": dedent` import { hi } from "../utils/greetings" export async function onRequest() { return new Response(hi) }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let hello = await fetchText(`${url}/greetings/hello`); - expect(hello).toBe("Hello World!"); + let hello = await fetchText(`${url}/greetings/hello`); + expect(hello).toBe("Hello World!"); - let hi = await fetchText(`${url}/hi`); - expect(hi).toBe("Hi there!"); + let hi = await fetchText(`${url}/hi`); + expect(hi).toBe("Hi there!"); - await helper.seed({ - "utils/greetings.js": dedent` + await helper.seed({ + "utils/greetings.js": dedent` export const hello = "Hey World!" export const hi = "Hey there!" `, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - hello = await fetchText(`${url}/greetings/hello`); - expect(hello).toBe("Hey World!"); + hello = await fetchText(`${url}/greetings/hello`); + expect(hello).toBe("Hey World!"); - hi = await fetchText(`${url}/hi`); - expect(hi).toBe("Hey there!"); - }); + hi = await fetchText(`${url}/hi`); + expect(hi).toBe("Hey there!"); + }); - it("should support modifying external modules during dev session (Functions)", async () => { - const helper = new WranglerE2ETestHelper(); + it("should support modifying external modules during dev session (Functions)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "modules/my-html.html": dedent` + await helper.seed({ + "modules/my-html.html": dedent`

Hello HTML World!

`, - "functions/hello.js": dedent` + "functions/hello.js": dedent` import html from "../modules/my-html.html"; export async function onRequest() { return new Response(html); }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let hello = await fetchText(`${url}/hello`); - expect(hello).toBe("

Hello HTML World!

"); + let hello = await fetchText(`${url}/hello`); + expect(hello).toBe("

Hello HTML World!

"); - await helper.seed({ - "modules/my-html.html": dedent` + await helper.seed({ + "modules/my-html.html": dedent`

Updated HTML!

`, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - hello = await fetchText(`${url}/hello`); - expect(hello).toBe("

Updated HTML!

"); - }); + hello = await fetchText(`${url}/hello`); + expect(hello).toBe("

Updated HTML!

"); + }); - it("should modify worker during dev session (_worker)", async () => { - const helper = new WranglerE2ETestHelper(); + it("should modify worker during dev session (_worker)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "_worker.js": dedent` + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Hello World!") } }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let hello = await fetchText(url); - expect(hello).toBe("Hello World!"); + let hello = await fetchText(url); + expect(hello).toBe("Hello World!"); - await helper.seed({ - "_worker.js": dedent` + await helper.seed({ + "_worker.js": dedent` export default { fetch(request, env) { return new Response("Updated Worker!") } }`, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - hello = await fetchText(url); - expect(hello).toBe("Updated Worker!"); - }); + hello = await fetchText(url); + expect(hello).toBe("Updated Worker!"); + }); - it("should support modifying dependencies during dev session (_worker)", async () => { - const helper = new WranglerE2ETestHelper(); + it("should support modifying dependencies during dev session (_worker)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "pets/bear.js": dedent` + await helper.seed({ + "pets/bear.js": dedent` export const bear = "BEAR!" `, - "_worker.js": dedent` + "_worker.js": dedent` import { bear } from "./pets/bear" export default { fetch(request, env) { return new Response(bear) } }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let bear = await fetchText(url); - expect(bear).toBe("BEAR!"); + let bear = await fetchText(url); + expect(bear).toBe("BEAR!"); - await helper.seed({ - "pets/bear.js": dedent` + await helper.seed({ + "pets/bear.js": dedent` export const bear = "We love BEAR!" `, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - bear = await fetchText(url); - expect(bear).toBe("We love BEAR!"); - }); + bear = await fetchText(url); + expect(bear).toBe("We love BEAR!"); + }); - it("should support modifying external modules during dev session (_worker)", async () => { - const helper = new WranglerE2ETestHelper(); + it("should support modifying external modules during dev session (_worker)", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "graham.html": dedent` + await helper.seed({ + "graham.html": dedent`

Graham the dog

`, - "_worker.js": dedent` + "_worker.js": dedent` import html from "./graham.html" export default { fetch(request, env) { return new Response(html) } }`, - }); + }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - let graham = await fetchText(url); - expect(graham).toBe("

Graham the dog

"); + let graham = await fetchText(url); + expect(graham).toBe("

Graham the dog

"); - await helper.seed({ - "graham.html": dedent` + await helper.seed({ + "graham.html": dedent`

Graham is the bestest doggo

`, - }); + }); - await worker.waitForReload(); + await worker.waitForReload(); - graham = await fetchText(url); - expect(graham).toBe("

Graham is the bestest doggo

"); - }); + graham = await fetchText(url); + expect(graham).toBe("

Graham is the bestest doggo

"); + }); - it("should support modifying _routes.json during dev session", async () => { - const helper = new WranglerE2ETestHelper(); + it("should support modifying _routes.json during dev session", async () => { + const helper = new WranglerE2ETestHelper(); - await helper.seed({ - "_worker.js": dedent` + await helper.seed({ + "_worker.js": dedent` export default { async fetch(request, env) { const url = new URL(request.url); @@ -691,43 +704,45 @@ describe.each([{ cmd: "wrangler pages dev" }])("Pages $cmd", ({ cmd }) => { return new Response("Hello _routes.json") } }`, - "_routes.json": dedent` + "_routes.json": dedent` { "version": 1, "include": ["/foo", "/bar"], "exclude": [] } `, - "index.html": dedent` + "index.html": dedent` hello world `, - }); - const port = await getPort(); - const worker = helper.runLongLived(`wrangler pages dev --port ${port} .`); - const { url } = await worker.waitForReady(); + }); + const worker = helper.runLongLived( + `${cmd} --port ${port} --inspector-port ${inspectorPort} .` + ); + const { url } = await worker.waitForReady(); - const foo = await fetchText(`${url}/foo`); - expect(foo).toBe("foo"); + const foo = await fetchText(`${url}/foo`); + expect(foo).toBe("foo"); - const bar = await fetchText(`${url}/bar`); - expect(bar).toBe("bar"); + const bar = await fetchText(`${url}/bar`); + expect(bar).toBe("bar"); - await helper.seed({ - "_routes.json": dedent` + await helper.seed({ + "_routes.json": dedent` { "version": 1, "include": ["/foo"], "exclude": ["/bar"] } `, - }); - await worker.waitForReload(); + }); + await worker.waitForReload(); - const foo2 = await fetchText(`${url}/foo`); - expect(foo2).toBe("foo"); + const foo2 = await fetchText(`${url}/foo`); + expect(foo2).toBe("foo"); - const bar2 = await fetchText(`${url}/bar`); - expect(bar2).toBe("hello world"); + const bar2 = await fetchText(`${url}/bar`); + expect(bar2).toBe("hello world"); + }); }); - }); -}); + } +); diff --git a/packages/wrangler/e2e/versions.test.ts b/packages/wrangler/e2e/versions.test.ts index 67a460d60ce5..d741f55dd4ce 100644 --- a/packages/wrangler/e2e/versions.test.ts +++ b/packages/wrangler/e2e/versions.test.ts @@ -56,7 +56,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should upload 1st Worker version", async () => { const upload = await helper.run( - `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload" --x-versions` + `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload"` ); versionId1 = matchVersionId(upload.stdout); @@ -74,7 +74,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { }); it("should list 1 version", async () => { - const list = await helper.run(`wrangler versions list --x-versions`); + const list = await helper.run(`wrangler versions list`); expect(normalize(list.stdout)).toMatchInlineSnapshot(` "Version ID: 00000000-0000-0000-0000-000000000000 @@ -97,7 +97,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should deploy 1st Worker version", async () => { const deploy = await helper.run( - `wrangler versions deploy ${versionId1}@100% --message "Deploy via e2e test" --yes --x-versions` + `wrangler versions deploy ${versionId1}@100% --message "Deploy via e2e test" --yes` ); expect(normalize(deploy.stdout)).toMatchInlineSnapshot(` @@ -137,7 +137,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { }); it("should list 1 deployment", async () => { - const list = await helper.run(`wrangler deployments list --x-versions`); + const list = await helper.run(`wrangler deployments list`); expect(normalize(list.stdout)).toMatchInlineSnapshot(` "Created: TIMESTAMP @@ -172,7 +172,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { }); const upload = await helper.run( - `wrangler versions upload --message "Upload AGAIN via e2e test" --tag "e2e-upload-AGAIN" --x-versions` + `wrangler versions upload --message "Upload AGAIN via e2e test" --tag "e2e-upload-AGAIN"` ); versionId2 = matchVersionId(upload.stdout); @@ -188,9 +188,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { Changes to triggers (routes, custom domains, cron schedules, etc) must be applied with the command wrangler triggers deploy" `); - const versionsList = await helper.run( - `wrangler versions list --x-versions` - ); + const versionsList = await helper.run(`wrangler versions list`); expect(normalize(versionsList.stdout)).toMatchInlineSnapshot(` "Version ID: 00000000-0000-0000-0000-000000000000 @@ -219,12 +217,10 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should deploy 2nd Worker version", async () => { const deploy = await helper.run( - `wrangler versions deploy ${versionId2}@100% --message "Deploy AGAIN via e2e test" --yes --x-versions` + `wrangler versions deploy ${versionId2}@100% --message "Deploy AGAIN via e2e test" --yes` ); - const deploymentsList = await helper.run( - `wrangler deployments list --x-versions` - ); + const deploymentsList = await helper.run(`wrangler deployments list`); expect(normalize(deploy.stdout)).toMatchInlineSnapshot(` "╭ Deploy Worker Versions by splitting traffic between multiple versions @@ -296,16 +292,12 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should rollback to implicit Worker version (1st version)", async () => { const rollback = await helper.run( - `wrangler rollback --message "Rollback via e2e test" --yes --x-versions` + `wrangler rollback --message "Rollback via e2e test" --yes` ); - const versionsList = await helper.run( - `wrangler versions list --x-versions` - ); + const versionsList = await helper.run(`wrangler versions list`); - const deploymentsList = await helper.run( - `wrangler deployments list --x-versions` - ); + const deploymentsList = await helper.run(`wrangler deployments list`); expect(normalize(rollback.stdout)).toMatchInlineSnapshot(` "├ Fetching latest deployment @@ -410,16 +402,12 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should rollback to specific Worker version (0th version)", async () => { const rollback = await helper.run( - `wrangler rollback ${versionId0} --message "Rollback to old version" --yes --x-versions` + `wrangler rollback ${versionId0} --message "Rollback to old version" --yes` ); - const versionsList = await helper.run( - `wrangler versions list --x-versions` - ); + const versionsList = await helper.run(`wrangler versions list`); - const deploymentsList = await helper.run( - `wrangler deployments list --x-versions` - ); + const deploymentsList = await helper.run(`wrangler deployments list`); expect(normalize(rollback.stdout)).toMatchInlineSnapshot(` "├ Fetching latest deployment @@ -551,7 +539,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { }); const upload = await helper.run( - `wrangler versions upload --legacy-assets='./public' --x-versions` + `wrangler versions upload --legacy-assets='./public'` ); expect(normalize(upload.output)).toMatchInlineSnapshot(` @@ -586,7 +574,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { `, }); - const upload = await helper.run(`wrangler versions upload --x-versions`); + const upload = await helper.run(`wrangler versions upload`); expect(normalize(upload.output)).toMatchInlineSnapshot(` "X [ERROR] Workers Sites does not support uploading versions through \`wrangler versions upload\`. You must use \`wrangler deploy\` instead. @@ -614,7 +602,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { }); const upload = await helper.run( - `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload-assets" --x-versions` + `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload-assets"` ); expect(normalize(upload.stdout)).toMatchInlineSnapshot(` @@ -638,7 +626,7 @@ describe("versions deploy", { timeout: TIMEOUT }, () => { it("should include version preview url in output file", async () => { const outputFile = path.join(helper.tmpPath, "output.jsonnd"); const upload = await helper.run( - `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload" --x-versions`, + `wrangler versions upload --message "Upload via e2e test" --tag "e2e-upload"`, { env: { ...process.env, diff --git a/packages/wrangler/package.json b/packages/wrangler/package.json index faba4178544b..59b00a16dcb9 100644 --- a/packages/wrangler/package.json +++ b/packages/wrangler/package.json @@ -1,6 +1,6 @@ { "name": "wrangler", - "version": "3.99.0", + "version": "3.100.0", "description": "Command-line interface for all things Cloudflare Workers", "keywords": [ "wrangler", @@ -61,10 +61,10 @@ "generate-json-schema": "pnpm exec ts-json-schema-generator --no-type-check --path src/config/config.ts --type RawConfig --out config-schema.json", "prepublishOnly": "SOURCEMAPS=false pnpm run -w build", "start": "pnpm run bundle && cross-env NODE_OPTIONS=--enable-source-maps ./bin/wrangler.js", - "test": "pnpm run assert-git-version && vitest", + "test": "dotenv -- pnpm run assert-git-version && dotenv -- vitest", "test:ci": "pnpm run test run", "test:debug": "pnpm run test --silent=false --verbose=true", - "test:e2e": "vitest -c ./e2e/vitest.config.mts", + "test:e2e": "dotenv -- vitest -c ./e2e/vitest.config.mts", "test:watch": "pnpm run test --testTimeout=50000 --watch", "type:tests": "tsc -p ./src/__tests__/tsconfig.json && tsc -p ./e2e/tsconfig.json" }, @@ -85,7 +85,7 @@ "selfsigned": "^2.0.1", "source-map": "^0.6.1", "unenv": "npm:unenv-nightly@2.0.0-20241218-183400-5d6aec3", - "workerd": "1.20241218.0", + "workerd": "1.20241230.0", "xxhash-wasm": "^1.0.1" }, "devDependencies": { @@ -95,7 +95,7 @@ "@cloudflare/types": "6.18.4", "@cloudflare/workers-shared": "workspace:*", "@cloudflare/workers-tsconfig": "workspace:*", - "@cloudflare/workers-types": "^4.20241218.0", + "@cloudflare/workers-types": "^4.20241230.0", "@cspotcode/source-map-support": "0.8.1", "@iarna/toml": "^3.0.0", "@microsoft/api-extractor": "^7.47.0", @@ -164,7 +164,7 @@ "yargs": "^17.7.2" }, "peerDependencies": { - "@cloudflare/workers-types": "^4.20241218.0" + "@cloudflare/workers-types": "^4.20241230.0" }, "peerDependenciesMeta": { "@cloudflare/workers-types": { diff --git a/packages/wrangler/src/__tests__/configuration.pages.test.ts b/packages/wrangler/src/__tests__/config/configuration.pages.test.ts similarity index 98% rename from packages/wrangler/src/__tests__/configuration.pages.test.ts rename to packages/wrangler/src/__tests__/config/configuration.pages.test.ts index 9b7ae8155208..42063bf5954b 100644 --- a/packages/wrangler/src/__tests__/configuration.pages.test.ts +++ b/packages/wrangler/src/__tests__/config/configuration.pages.test.ts @@ -1,10 +1,10 @@ import path from "node:path"; -import { normalizeAndValidateConfig } from "../config/validation"; +import { normalizeAndValidateConfig } from "../../config/validation"; import { generateRawConfigForPages, generateRawEnvConfigForPages, -} from "./helpers/generate-wrangler-config"; -import type { RawConfig, RawEnvironment } from "../config"; +} from "../helpers/generate-wrangler-config"; +import type { RawConfig, RawEnvironment } from "../../config"; describe("normalizeAndValidateConfig()", () => { describe("Pages configuration", () => { @@ -30,6 +30,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: undefined, } @@ -130,6 +131,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "preview", } @@ -230,6 +232,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "production", } @@ -349,6 +352,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "unsupported-env-name", } @@ -457,6 +461,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "preview", } @@ -565,6 +570,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "production", } @@ -671,6 +677,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( pagesRawConfig, undefined, + undefined, { env: "unsupported-env-name", } @@ -780,6 +787,7 @@ describe("normalizeAndValidateConfig()", () => { ], }, undefined, + undefined, { env: undefined } ); @@ -800,6 +808,7 @@ describe("normalizeAndValidateConfig()", () => { }, }, undefined, + undefined, { env: "preview", } diff --git a/packages/wrangler/src/__tests__/configuration.test.ts b/packages/wrangler/src/__tests__/config/configuration.test.ts similarity index 97% rename from packages/wrangler/src/__tests__/configuration.test.ts rename to packages/wrangler/src/__tests__/config/configuration.test.ts index 5083e0a84602..f7462f378852 100644 --- a/packages/wrangler/src/__tests__/configuration.test.ts +++ b/packages/wrangler/src/__tests__/config/configuration.test.ts @@ -1,17 +1,17 @@ import * as fs from "fs"; import path from "node:path"; -import { experimental_readRawConfig, readConfig } from "../config"; -import { normalizeAndValidateConfig } from "../config/validation"; -import { run } from "../experimental-flags"; -import { normalizeString } from "./helpers/normalize"; -import { runInTempDir } from "./helpers/run-in-tmp"; -import { writeWranglerConfig } from "./helpers/write-wrangler-config"; +import { experimental_readRawConfig, readConfig } from "../../config"; +import { normalizeAndValidateConfig } from "../../config/validation"; +import { run } from "../../experimental-flags"; +import { normalizeString } from "../helpers/normalize"; +import { runInTempDir } from "../helpers/run-in-tmp"; +import { writeWranglerConfig } from "../helpers/write-wrangler-config"; import type { ConfigFields, RawConfig, RawDevConfig, RawEnvironment, -} from "../config"; +} from "../../config"; describe("readConfig()", () => { runInTempDir(); @@ -49,9 +49,14 @@ describe("readConfig()", () => { describe("normalizeAndValidateConfig()", () => { it("should use defaults for empty configuration", () => { - const { config, diagnostics } = normalizeAndValidateConfig({}, undefined, { - env: undefined, - }); + const { config, diagnostics } = normalizeAndValidateConfig( + {}, + undefined, + undefined, + { + env: undefined, + } + ); expect(config).toEqual({ account_id: undefined, @@ -157,6 +162,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -181,6 +187,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -210,6 +218,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -231,6 +241,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -251,6 +263,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -272,6 +286,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -294,6 +310,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, + { env: undefined } ); @@ -307,9 +325,14 @@ describe("normalizeAndValidateConfig()", () => { compatibility_date: "2024–10–01", // en-dash }; - const result = normalizeAndValidateConfig(expectedConfig, undefined, { - env: undefined, - }); + const result = normalizeAndValidateConfig( + expectedConfig, + undefined, + undefined, + { + env: undefined, + } + ); expect(result.config).toEqual(expect.objectContaining(expectedConfig)); expect(result.diagnostics.hasWarnings()).toBe(false); @@ -327,9 +350,14 @@ describe("normalizeAndValidateConfig()", () => { compatibility_date: "2024—10—01", // em-dash }; - const result = normalizeAndValidateConfig(expectedConfig, undefined, { - env: undefined, - }); + const result = normalizeAndValidateConfig( + expectedConfig, + undefined, + undefined, + { + env: undefined, + } + ); expect(result.config).toEqual(expect.objectContaining(expectedConfig)); expect(result.diagnostics.hasWarnings()).toBe(false); @@ -350,6 +378,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, + { env: undefined } ); @@ -368,9 +398,14 @@ describe("normalizeAndValidateConfig()", () => { compatibility_date: "2024—100—01", // invalid date + em-dash }; - const result = normalizeAndValidateConfig(expectedConfig, undefined, { - env: undefined, - }); + const result = normalizeAndValidateConfig( + expectedConfig, + undefined, + undefined, + { + env: undefined, + } + ); expect(result.config).toEqual(expect.objectContaining(expectedConfig)); expect(result.diagnostics.hasWarnings()).toBe(false); @@ -399,6 +434,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, + { env: undefined } ); @@ -430,6 +467,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, + { env: undefined } ); @@ -466,6 +505,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -500,6 +541,8 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -533,6 +576,8 @@ describe("normalizeAndValidateConfig()", () => { alias: "some silly string", } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -553,6 +598,8 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -573,6 +620,8 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -594,6 +643,8 @@ describe("normalizeAndValidateConfig()", () => { legacy_assets: "path/to/assets", } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -622,6 +673,8 @@ describe("normalizeAndValidateConfig()", () => { legacy_assets: 123, } as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); expect(config.legacy_assets).toBeUndefined(); @@ -651,6 +704,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, + { env: undefined } ); @@ -685,6 +740,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, + { env: undefined } ); @@ -718,6 +775,8 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", + { env: undefined } ); @@ -746,6 +805,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -778,6 +838,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -805,6 +866,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -831,6 +893,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -858,6 +921,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -884,6 +948,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -908,6 +973,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -931,6 +997,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -968,6 +1035,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -1122,12 +1190,17 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "wrangler.toml", + "wrangler.toml", { env: undefined } ); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion expect({ ...config, tsconfig: normalizePath(config.tsconfig!) }).toEqual( - expect.objectContaining({ ...expectedConfig, main: resolvedMain }) + expect.objectContaining({ + ...expectedConfig, + main: resolvedMain, + topLevelName: expectedConfig.name, + }) ); expect(diagnostics.hasErrors()).toBe(false); expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` @@ -1206,6 +1279,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1287,6 +1361,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1307,6 +1382,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1323,6 +1399,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1342,6 +1419,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1360,6 +1438,7 @@ describe("normalizeAndValidateConfig()", () => { main: "index.js", }, undefined, + undefined, { env: undefined, "dispatch-namespace": "test-namespace" } ); expect(diagnostics.hasWarnings()).toBe(false); @@ -1383,6 +1462,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, path.resolve("project/wrangler.toml"), + path.resolve("project/wrangler.toml"), { env: undefined } ); @@ -1423,6 +1503,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1448,6 +1529,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -1473,6 +1555,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -1510,6 +1593,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, "project/wrangler.toml", + "project/wrangler.toml", { env: undefined } ); @@ -1543,6 +1627,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1557,6 +1642,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1571,6 +1657,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1585,6 +1672,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1599,6 +1687,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1613,6 +1702,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: { bindings: {} } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1627,6 +1717,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: { bindings: "BAD" } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1641,6 +1732,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: { bindings: 999 } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1655,6 +1747,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { durable_objects: { bindings: null } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1698,6 +1791,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1763,6 +1857,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1792,6 +1887,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1835,6 +1931,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1863,6 +1960,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1882,6 +1980,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1909,6 +2008,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1937,6 +2037,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: undefined } ); @@ -1957,6 +2058,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1974,6 +2076,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { browser: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -1988,6 +2091,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { browser: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2002,6 +2106,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { browser: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2016,6 +2121,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { browser: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2033,6 +2139,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { vectorize: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2058,6 +2165,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2076,6 +2184,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { vectorize: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2090,6 +2199,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { vectorize: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2104,6 +2214,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { vectorize: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2121,6 +2232,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { ai: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2135,6 +2247,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { ai: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2149,6 +2262,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { ai: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2163,6 +2277,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { ai: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2180,6 +2295,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { version_metadata: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2194,6 +2310,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { version_metadata: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2208,6 +2325,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { version_metadata: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2222,6 +2340,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { version_metadata: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2238,6 +2357,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { cloudchamber: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2252,6 +2372,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { cloudchamber: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2266,6 +2387,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { cloudchamber: "test" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2280,6 +2402,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { cloudchamber: 22 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2302,6 +2425,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2322,6 +2446,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { kv_namespaces: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2336,6 +2461,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { kv_namespaces: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2350,6 +2476,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { kv_namespaces: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2364,6 +2491,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { kv_namespaces: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2390,6 +2518,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2419,6 +2548,7 @@ describe("normalizeAndValidateConfig()", () => { kv_namespaces: [{ binding: "VALID" }], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ) ); @@ -2447,6 +2577,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2466,6 +2597,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { d1_databases: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2480,6 +2612,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { d1_databases: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2494,6 +2627,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { d1_databases: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2508,6 +2642,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { d1_databases: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2534,6 +2669,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` @@ -2568,6 +2704,7 @@ describe("normalizeAndValidateConfig()", () => { d1_databases: [{ binding: "VALID" }], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ) ); @@ -2582,6 +2719,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { hyperdrive: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2596,6 +2734,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { hyperdrive: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2610,6 +2749,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { hyperdrive: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2624,6 +2764,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { hyperdrive: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2642,6 +2783,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2658,6 +2800,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` @@ -2680,6 +2823,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { queues: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2707,6 +2851,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2753,6 +2898,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2786,6 +2932,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { r2_buckets: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2800,6 +2947,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { r2_buckets: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2814,6 +2962,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { r2_buckets: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2828,6 +2977,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { r2_buckets: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2854,6 +3004,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2883,6 +3034,7 @@ describe("normalizeAndValidateConfig()", () => { d1_databases: [{ binding: "VALID" }], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ) ); @@ -2897,6 +3049,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { services: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2916,6 +3069,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { services: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2935,6 +3089,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { services: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -2954,6 +3109,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { services: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3002,6 +3158,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3038,6 +3195,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { analytics_engine_datasets: {} } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3052,6 +3210,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { analytics_engine_datasets: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3066,6 +3225,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { analytics_engine_datasets: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3080,6 +3240,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { analytics_engine_datasets: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3104,6 +3265,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3126,6 +3288,7 @@ describe("normalizeAndValidateConfig()", () => { dispatch_namespaces: "just a string", } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3167,6 +3330,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); expect(diagnostics.hasWarnings()).toBe(false); @@ -3265,6 +3429,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); expect(diagnostics.hasWarnings()).toBe(false); @@ -3299,6 +3464,7 @@ describe("normalizeAndValidateConfig()", () => { mtls_certificates: "just a string", } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3341,6 +3507,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3373,6 +3540,7 @@ describe("normalizeAndValidateConfig()", () => { // @ts-expect-error purposely using an invalid value { pipelines: {} }, undefined, + undefined, { env: undefined } ); @@ -3388,6 +3556,7 @@ describe("normalizeAndValidateConfig()", () => { // @ts-expect-error purposely using an invalid value { pipelines: "BAD" }, undefined, + undefined, { env: undefined } ); @@ -3403,6 +3572,7 @@ describe("normalizeAndValidateConfig()", () => { // @ts-expect-error purposely using an invalid value { pipelines: 999 }, undefined, + undefined, { env: undefined } ); @@ -3418,6 +3588,7 @@ describe("normalizeAndValidateConfig()", () => { // @ts-expect-error purposely using an invalid value { pipelines: null }, undefined, + undefined, { env: undefined } ); @@ -3439,6 +3610,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3458,6 +3630,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` @@ -3480,6 +3653,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: [] } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3497,6 +3671,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: "BAD" } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3514,6 +3689,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: 999 } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3531,6 +3707,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: null } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3548,6 +3725,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: {} } satisfies RawConfig, undefined, + undefined, { env: undefined } ); @@ -3569,6 +3747,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3587,6 +3766,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { bindings: {} } } as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3604,6 +3784,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { bindings: "BAD" } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3621,6 +3802,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { bindings: 999 } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3638,6 +3820,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { bindings: null } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3668,6 +3851,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3695,6 +3879,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { metadata: [] } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3712,6 +3897,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { metadata: "BAD" } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3729,6 +3915,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { metadata: 999 } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3746,6 +3933,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { metadata: null } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3765,6 +3953,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { unsafe: { bindings: [] } } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -3778,6 +3967,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { placement: { mode: "off", hint: "wnam" } }, undefined, + undefined, { env: undefined } ); @@ -3791,6 +3981,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { placement: { mode: "smart", hint: "wnam" } }, undefined, + undefined, { env: undefined } ); @@ -3814,6 +4005,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -3842,6 +4034,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -3862,6 +4055,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics, config } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "dev", } @@ -3891,9 +4085,14 @@ describe("normalizeAndValidateConfig()", () => { it("should error if we specify an environment that does not match the named environments", () => { const rawConfig: RawConfig = { env: { ENV1: {} } }; - const { diagnostics } = normalizeAndValidateConfig(rawConfig, undefined, { - env: "DEV", - }); + const { diagnostics } = normalizeAndValidateConfig( + rawConfig, + undefined, + undefined, + { + env: "DEV", + } + ); expect(diagnostics.renderErrors()).toMatchInlineSnapshot(` "Processing wrangler configuration: - No environment found in configuration with name \\"DEV\\". @@ -3948,6 +4147,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { ...rawConfig, env: { dev: {} } }, undefined, + undefined, { env: "dev" } ); @@ -3956,6 +4156,7 @@ describe("normalizeAndValidateConfig()", () => { ...rawConfig, main: resolvedMain, name: "mock-name-dev", + topLevelName: "mock-name", }) ); expect(diagnostics.hasErrors()).toBe(false); @@ -4028,11 +4229,16 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); expect(config).toEqual( - expect.objectContaining({ ...rawEnv, main: resolvedMain }) + expect.objectContaining({ + ...rawEnv, + main: resolvedMain, + topLevelName: "mock-name", + }) ); expect(diagnostics.hasErrors()).toBe(false); expect(diagnostics.hasWarnings()).toBe(false); @@ -4049,10 +4255,12 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "DEV" } ); expect(config.name).toEqual("mock-name"); + expect(config.topLevelName).toEqual("mock-name"); expect(diagnostics.hasErrors()).toBe(false); expect(diagnostics.hasWarnings()).toBe(true); expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` @@ -4074,6 +4282,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "DEV" } ); @@ -4107,6 +4316,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "DEV" } ); @@ -4139,6 +4349,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "DEV" } ); @@ -4171,6 +4382,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "DEV" } ); @@ -4225,6 +4437,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4299,6 +4512,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { env: { ENV1: expectedConfig } }, undefined, + undefined, { env: "ENV1" } ); @@ -4347,6 +4561,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -4363,6 +4578,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -4395,6 +4611,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: undefined } ); @@ -4429,6 +4646,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4454,6 +4672,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4489,6 +4708,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4538,6 +4758,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4563,6 +4784,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { durable_objects: [] } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4579,6 +4801,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { durable_objects: "BAD" } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4595,6 +4818,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { durable_objects: 999 } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4611,6 +4835,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { durable_objects: null } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4627,6 +4852,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { durable_objects: {} } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4645,6 +4871,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { durable_objects: { bindings: {} } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4663,6 +4890,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { durable_objects: { bindings: "BAD" } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4681,6 +4909,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { durable_objects: { bindings: 999 } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4699,6 +4928,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { durable_objects: { bindings: null } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4732,6 +4962,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4782,6 +5013,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { env: { ENV1: expectedConfig } }, undefined, + undefined, { env: "ENV1" } ); @@ -4811,6 +5043,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { env: { ENV1: expectedConfig as unknown as RawConfig } }, undefined, + undefined, { env: "ENV1" } ); @@ -4856,6 +5089,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( { env: { ENV1: expectedConfig as unknown as RawConfig } }, undefined, + undefined, { env: "ENV1" } ); @@ -4881,6 +5115,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { kv_namespaces: {} } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4897,6 +5132,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { kv_namespaces: "BAD" } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4913,6 +5149,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { kv_namespaces: 999 } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4929,6 +5166,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { kv_namespaces: null } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4960,6 +5198,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4983,6 +5222,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { r2_buckets: {} } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -4999,6 +5239,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { r2_buckets: "BAD" } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5015,6 +5256,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { r2_buckets: 999 } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5031,6 +5273,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { r2_buckets: null } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5062,6 +5305,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5087,6 +5331,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { analytics_engine_datasets: {} } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5105,6 +5350,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { analytics_engine_datasets: "BAD" } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5123,6 +5369,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { analytics_engine_datasets: 999 } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5141,6 +5388,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { analytics_engine_datasets: null } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5171,6 +5419,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5193,6 +5442,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { unsafe: [] } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5214,6 +5464,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { unsafe: "BAD" } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5235,6 +5486,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { unsafe: 999 } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5256,6 +5508,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { unsafe: null } } } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5277,6 +5530,7 @@ describe("normalizeAndValidateConfig()", () => { const { diagnostics } = normalizeAndValidateConfig( { env: { ENV1: { unsafe: {} } } } as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5298,6 +5552,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { bindings: [] } } }, } as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5316,6 +5571,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { invalid: true } } }, } as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5338,6 +5594,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { bindings: {} } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5361,6 +5618,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { bindings: "BAD" } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5384,6 +5642,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { bindings: 999 } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5407,6 +5666,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { bindings: null } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5445,6 +5705,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5478,6 +5739,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { metadata: [] } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5501,6 +5763,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { metadata: "BAD" } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5524,6 +5787,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { metadata: 999 } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5547,6 +5811,7 @@ describe("normalizeAndValidateConfig()", () => { env: { ENV1: { unsafe: { metadata: null } } }, } as unknown as RawConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5572,6 +5837,7 @@ describe("normalizeAndValidateConfig()", () => { tail_consumers: "this sure isn't an array", } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5604,6 +5870,7 @@ describe("normalizeAndValidateConfig()", () => { ], } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5630,6 +5897,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5656,6 +5924,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5682,6 +5951,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5705,6 +5975,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5725,6 +5996,7 @@ describe("normalizeAndValidateConfig()", () => { }, } satisfies RawConfig, undefined, + undefined, { env: undefined } ); @@ -5746,6 +6018,7 @@ describe("normalizeAndValidateConfig()", () => { }, } as unknown as RawConfig, undefined, + undefined, { env: undefined } ); @@ -5779,6 +6052,7 @@ describe("normalizeAndValidateConfig()", () => { }, }, undefined, + undefined, { env: "ENV1" } ); @@ -5835,6 +6109,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5883,6 +6158,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5928,6 +6204,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: "ENV1" } ); @@ -5968,6 +6245,7 @@ describe("normalizeAndValidateConfig()", () => { const { config, diagnostics } = normalizeAndValidateConfig( expectedConfig, undefined, + undefined, { env: "ENV1" } ); @@ -6018,17 +6296,27 @@ describe("normalizeAndValidateConfig()", () => { }, }; - const result1 = normalizeAndValidateConfig(expectedConfig, undefined, { - env: "ENV1", - }); + const result1 = normalizeAndValidateConfig( + expectedConfig, + undefined, + undefined, + { + env: "ENV1", + } + ); expect(result1.config).toEqual(expect.objectContaining(environment1)); expect(result1.diagnostics.hasErrors()).toBe(false); expect(result1.diagnostics.hasWarnings()).toBe(false); - const result2 = normalizeAndValidateConfig(expectedConfig, undefined, { - env: "ENV2", - }); + const result2 = normalizeAndValidateConfig( + expectedConfig, + undefined, + undefined, + { + env: "ENV2", + } + ); expect(result2.config).toEqual(expect.objectContaining(environment2)); expect(result2.diagnostics.hasErrors()).toBe(false); diff --git a/packages/wrangler/src/__tests__/config/findWranglerConfig.test.ts b/packages/wrangler/src/__tests__/config/findWranglerConfig.test.ts new file mode 100644 index 000000000000..792b5c3d3585 --- /dev/null +++ b/packages/wrangler/src/__tests__/config/findWranglerConfig.test.ts @@ -0,0 +1,278 @@ +import path from "node:path"; +import { findWranglerConfig } from "../../config/config-helpers"; +import { mockConsoleMethods } from "../helpers/mock-console"; +import { normalizeString } from "../helpers/normalize"; +import { runInTempDir } from "../helpers/run-in-tmp"; +import { seed } from "../helpers/seed"; + +describe("config findWranglerConfig()", () => { + runInTempDir(); + const std = mockConsoleMethods(); + const NO_LOGS = { debug: "", err: "", info: "", out: "", warn: "" }; + + describe("(useRedirectIfAvailable: false)", () => { + it.each(["toml", "json", "jsonc"])( + "should find the nearest wrangler.%s to the reference directory", + async (ext) => { + await seed({ + [`wrangler.${ext}`]: "DUMMY", + [`foo/wrangler.${ext}`]: "DUMMY", + [`foo/bar/wrangler.${ext}`]: "DUMMY", + [`foo/bar/qux/holder.txt`]: "DUMMY", + }); + expect(findWranglerConfig(".")).toEqual({ + configPath: path.resolve(`wrangler.${ext}`), + userConfigPath: path.resolve(`wrangler.${ext}`), + }); + expect(findWranglerConfig("./foo")).toEqual({ + configPath: path.resolve(`foo/wrangler.${ext}`), + userConfigPath: path.resolve(`foo/wrangler.${ext}`), + }); + expect(findWranglerConfig("./foo/bar")).toEqual({ + configPath: path.resolve(`foo/bar/wrangler.${ext}`), + userConfigPath: path.resolve(`foo/bar/wrangler.${ext}`), + }); + expect(findWranglerConfig("./foo/bar/qux")).toEqual({ + configPath: path.resolve(`foo/bar/wrangler.${ext}`), + userConfigPath: path.resolve(`foo/bar/wrangler.${ext}`), + }); + expect(std).toEqual(NO_LOGS); + } + ); + + describe.each([ + ["json", "jsonc"], + ["json", "toml"], + ["jsonc", "toml"], + ])("should prefer the wrangler.%s over wrangler.%s", (ext1, ext2) => { + it("in the same directory", async () => { + await seed({ + [`wrangler.${ext1}`]: "DUMMY", + [`wrangler.${ext2}`]: "DUMMY", + }); + expect(findWranglerConfig(".")).toEqual({ + configPath: path.resolve(`wrangler.${ext1}`), + userConfigPath: path.resolve(`wrangler.${ext1}`), + }); + expect(std).toEqual(NO_LOGS); + }); + + it("in different directories", async () => { + await seed({ + [`wrangler.${ext1}`]: "DUMMY", + [`foo/wrangler.${ext2}`]: "DUMMY", + }); + expect(findWranglerConfig("./foo")).toEqual({ + configPath: path.resolve(`wrangler.${ext1}`), + userConfigPath: path.resolve(`wrangler.${ext1}`), + }); + expect(std).toEqual(NO_LOGS); + }); + }); + + it("should return user config path even if a deploy config is found", async () => { + await seed({ + [`wrangler.toml`]: "DUMMY", + [".wrangler/deploy/config.json"]: `{"configPath": "../../dist/wrangler.json" }`, + [`dist/wrangler.json`]: "DUMMY", + }); + expect(findWranglerConfig(".")).toEqual({ + configPath: path.resolve(`wrangler.toml`), + userConfigPath: path.resolve(`wrangler.toml`), + }); + expect(std).toEqual(NO_LOGS); + }); + }); + + describe("(useRedirectIfAvailable: true)", () => { + it("should return redirected config path if no user config and a deploy config is found", async () => { + await seed({ + [".wrangler/deploy/config.json"]: `{"configPath": "../../dist/wrangler.json" }`, + [`dist/wrangler.json`]: "DUMMY", + ["foo/holder.txt"]: "DUMMY", + }); + expect(findWranglerConfig(".", { useRedirectIfAvailable: true })).toEqual( + { + configPath: path.resolve(`dist/wrangler.json`), + } + ); + expect( + findWranglerConfig("./foo", { useRedirectIfAvailable: true }) + ).toEqual({ + configPath: path.resolve(`dist/wrangler.json`), + }); + expect(std).toMatchInlineSnapshot(` + Object { + "debug": "", + "err": "", + "info": "", + "out": "", + "warn": "▲ [WARNING] Using redirected Wrangler configuration. + + Configuration being used: \\"dist/wrangler.json\\" + Original user's configuration: \\"\\" + Deploy configuration file: \\".wrangler/deploy/config.json\\" + + + ▲ [WARNING] Using redirected Wrangler configuration. + + Configuration being used: \\"dist/wrangler.json\\" + Original user's configuration: \\"\\" + Deploy configuration file: \\".wrangler/deploy/config.json\\" + + ", + } + `); + }); + + it("should return redirected config path if matching user config and a deploy config is found", async () => { + await seed({ + [`wrangler.toml`]: "DUMMY", + [".wrangler/deploy/config.json"]: `{"configPath": "../../dist/wrangler.json" }`, + [`dist/wrangler.json`]: "DUMMY", + ["foo/holder.txt"]: "DUMMY", + }); + expect(findWranglerConfig(".", { useRedirectIfAvailable: true })).toEqual( + { + configPath: path.resolve(`dist/wrangler.json`), + userConfigPath: path.resolve(`wrangler.toml`), + } + ); + expect( + findWranglerConfig("./foo", { useRedirectIfAvailable: true }) + ).toEqual({ + configPath: path.resolve(`dist/wrangler.json`), + userConfigPath: path.resolve(`wrangler.toml`), + }); + expect(std).toMatchInlineSnapshot(` + Object { + "debug": "", + "err": "", + "info": "", + "out": "", + "warn": "▲ [WARNING] Using redirected Wrangler configuration. + + Configuration being used: \\"dist/wrangler.json\\" + Original user's configuration: \\"wrangler.toml\\" + Deploy configuration file: \\".wrangler/deploy/config.json\\" + + + ▲ [WARNING] Using redirected Wrangler configuration. + + Configuration being used: \\"dist/wrangler.json\\" + Original user's configuration: \\"wrangler.toml\\" + Deploy configuration file: \\".wrangler/deploy/config.json\\" + + ", + } + `); + }); + + it("should error if deploy config is not valid JSON", async () => { + await seed({ + [".wrangler/deploy/config.json"]: `INVALID JSON`, + }); + + let error; + try { + findWranglerConfig(".", { useRedirectIfAvailable: true }); + } catch (e) { + error = e; + } + + expect(normalizeString(`${error}`)).toMatchInlineSnapshot(` + "Error: Failed to parse the deploy configuration file at .wrangler/deploy/config.json + X [ERROR] InvalidSymbol + + /.wrangler/deploy/config.json:1:0: +  1 │ INVALID JSON + ╵ ~~~~~~~ + + " + `); + expect(std).toEqual(NO_LOGS); + }); + + it("should error if deploy config does not contain a `configPath` property", async () => { + await seed({ + [".wrangler/deploy/config.json"]: `{}`, + }); + + let error; + try { + findWranglerConfig(".", { useRedirectIfAvailable: true }); + } catch (e) { + error = e; + } + + expect(normalizeString(`${error}`)).toMatchInlineSnapshot(` + "Error: A deploy configuration file was found at \\".wrangler/deploy/config.json\\". + But this is not valid - the required \\"configPath\\" property was not found. + Instead this file contains: + \`\`\` + {} + \`\`\`" + `); + expect(std).toEqual(NO_LOGS); + }); + + it("should error if redirected config file does not exist", async () => { + await seed({ + [".wrangler/deploy/config.json"]: `{ "configPath": "missing/wrangler.json" }`, + }); + + let error; + try { + findWranglerConfig(".", { useRedirectIfAvailable: true }); + } catch (e) { + error = e; + } + + expect(normalizeString(`${error}`)).toMatchInlineSnapshot(` + "Error: There is a deploy configuration at \\".wrangler/deploy/config.json\\". + But the redirected configuration path it points to, \\".wrangler/deploy/missing/wrangler.json\\", does not exist." + `); + expect(std).toEqual(NO_LOGS); + }); + + it("should error if deploy config file and user config file do not have the same base path", async () => { + await seed({ + [`foo/wrangler.toml`]: "DUMMY", + ["foo/bar/.wrangler/deploy/config.json"]: `{ "configPath": "../../dist/wrangler.json" }`, + [`foo/bar/dist/wrangler.json`]: "DUMMY", + + [`bar/foo/wrangler.toml`]: "DUMMY", + ["bar/.wrangler/deploy/config.json"]: `{ "configPath": "../../dist/wrangler.json" }`, + [`bar/dist/wrangler.json`]: "DUMMY", + }); + + let error; + try { + findWranglerConfig("foo/bar", { useRedirectIfAvailable: true }); + } catch (e) { + error = e; + } + + expect(normalizeString(`${error}`)).toMatchInlineSnapshot(` + "Error: Found both a user configuration file at \\"foo/wrangler.toml\\" + and a deploy configuration file at \\"foo/bar/.wrangler/deploy/config.json\\". + But these do not share the same base path so it is not clear which should be used." + `); + expect(std).toEqual(NO_LOGS); + + try { + error = undefined; + findWranglerConfig("bar/foo", { useRedirectIfAvailable: true }); + } catch (e) { + error = e; + } + + expect(normalizeString(`${error}`)).toMatchInlineSnapshot(` + "Error: Found both a user configuration file at \\"bar/foo/wrangler.toml\\" + and a deploy configuration file at \\"bar/.wrangler/deploy/config.json\\". + But these do not share the same base path so it is not clear which should be used." + `); + expect(std).toEqual(NO_LOGS); + }); + }); +}); diff --git a/packages/wrangler/src/__tests__/d1/export.test.ts b/packages/wrangler/src/__tests__/d1/export.test.ts index 4b9809f936e2..2e60d524f5bb 100644 --- a/packages/wrangler/src/__tests__/d1/export.test.ts +++ b/packages/wrangler/src/__tests__/d1/export.test.ts @@ -18,11 +18,17 @@ describe("export", () => { const { setIsTTY } = useMockIsTTY(); it("should throw if output is missing", async () => { - await expect(runWrangler("d1 export db --local")).rejects.toThrowError( + await expect(runWrangler("d1 export db")).rejects.toThrowError( `Missing required argument: output` ); }); + it("should throw if local and remote are both set", async () => { + await expect( + runWrangler("d1 export db --local --remote --output test-local.sql") + ).rejects.toThrowError("Arguments local and remote are mutually exclusive"); + }); + it("should handle local", async () => { setIsTTY(false); writeWranglerConfig({ @@ -32,7 +38,7 @@ describe("export", () => { }); // Verify the basic command works with an empty DB - await runWrangler("d1 export db --local --output test-local.sql"); + await runWrangler("d1 export db --output test-local.sql"); expect(fs.readFileSync("test-local.sql", "utf8")).toBe( "PRAGMA defer_foreign_keys=TRUE;" ); @@ -47,7 +53,7 @@ describe("export", () => { INSERT INTO bar (value) VALUES ('aaa'),('bbb'),('ccc'); ` ); - await runWrangler("d1 execute db --local --file data.sql"); + await runWrangler("d1 execute db --file data.sql"); // SQL output expectations const create_foo = "CREATE TABLE foo(id INTEGER PRIMARY KEY, value TEXT);"; @@ -64,7 +70,7 @@ describe("export", () => { ]; // Full export - await runWrangler("d1 export db --local --output test-full.sql"); + await runWrangler("d1 export db --output test-full.sql"); expect(fs.readFileSync("test-full.sql", "utf8")).toBe( [ "PRAGMA defer_foreign_keys=TRUE;", @@ -76,17 +82,13 @@ describe("export", () => { ); // Schema only - await runWrangler( - "d1 export db --local --output test-schema.sql --no-data" - ); + await runWrangler("d1 export db --output test-schema.sql --no-data"); expect(fs.readFileSync("test-schema.sql", "utf8")).toBe( ["PRAGMA defer_foreign_keys=TRUE;", create_foo, create_bar].join("\n") ); // Data only - await runWrangler( - "d1 export db --local --output test-data.sql --no-schema" - ); + await runWrangler("d1 export db --output test-data.sql --no-schema"); expect(fs.readFileSync("test-data.sql", "utf8")).toBe( ["PRAGMA defer_foreign_keys=TRUE;", ...insert_foo, ...insert_bar].join( "\n" @@ -94,9 +96,7 @@ describe("export", () => { ); // Foo only - await runWrangler( - "d1 export db --local --output test-data.sql --table foo" - ); + await runWrangler("d1 export db --output test-data.sql --table foo"); expect(fs.readFileSync("test-data.sql", "utf8")).toBe( ["PRAGMA defer_foreign_keys=TRUE;", create_foo, ...insert_foo].join("\n") ); diff --git a/packages/wrangler/src/__tests__/d1/migrate.test.ts b/packages/wrangler/src/__tests__/d1/migrate.test.ts index 47c7412494c9..9d858a7f320b 100644 --- a/packages/wrangler/src/__tests__/d1/migrate.test.ts +++ b/packages/wrangler/src/__tests__/d1/migrate.test.ts @@ -1,4 +1,3 @@ -import { cwd } from "process"; import { http, HttpResponse } from "msw"; import { reinitialiseAuthTokens } from "../../user"; import { mockAccountId, mockApiToken } from "../helpers/mock-account-id"; @@ -47,9 +46,7 @@ describe("migrate", () => { // If we get to the point where we are checking for migrations then we have not been asked to log in. await expect( runWrangler("d1 migrations apply DATABASE") - ).rejects.toThrowError( - `No migrations present at ${cwd().replaceAll("\\", "/")}/migrations.` - ); + ).rejects.toThrowError(`No migrations present at /migrations.`); }); it("should try to read D1 config from wrangler.toml", async () => { @@ -68,9 +65,7 @@ describe("migrate", () => { // If we get to the point where we are checking for migrations then we have not checked wrangler.toml. await expect( runWrangler("d1 migrations apply DATABASE") - ).rejects.toThrowError( - `No migrations present at ${cwd().replaceAll("\\", "/")}/migrations.` - ); + ).rejects.toThrowError(`No migrations present at /migrations.`); }); it("should reject the use of --preview with --local", async () => { @@ -221,9 +216,7 @@ Your database may not be available to serve requests during the migration, conti // If we get to the point where we are checking for migrations then we have not been asked to log in. await expect( runWrangler("d1 migrations list --local DATABASE") - ).rejects.toThrowError( - `No migrations present at ${cwd().replaceAll("\\", "/")}/migrations.` - ); + ).rejects.toThrowError(`No migrations present at /migrations.`); }); it("should use the custom migrations folder when provided", async () => { @@ -241,10 +234,7 @@ Your database may not be available to serve requests during the migration, conti await expect( runWrangler("d1 migrations list --local DATABASE") ).rejects.toThrowError( - `No migrations present at ${cwd().replaceAll( - "\\", - "/" - )}/my-migrations-go-here.` + `No migrations present at /my-migrations-go-here.` ); }); @@ -276,9 +266,7 @@ Your database may not be available to serve requests during the migration, conti // If we get to the point where we are checking for migrations then we have not checked wrangler.toml. await expect( runWrangler("d1 migrations list DATABASE") - ).rejects.toThrowError( - `No migrations present at ${cwd().replaceAll("\\", "/")}/migrations.` - ); + ).rejects.toThrowError(`No migrations present at /migrations.`); }); }); }); diff --git a/packages/wrangler/src/__tests__/deploy.test.ts b/packages/wrangler/src/__tests__/deploy.test.ts index ee73d119c714..eea34c238892 100644 --- a/packages/wrangler/src/__tests__/deploy.test.ts +++ b/packages/wrangler/src/__tests__/deploy.test.ts @@ -10420,7 +10420,7 @@ export default{ fs.writeFileSync( "index.js", ` - import path from 'path'; + import path from 'node:path'; console.log(path); export default {} ` @@ -10441,7 +10441,7 @@ export default{ } `); expect(fs.readFileSync("dist/index.js", { encoding: "utf-8" })).toContain( - `import path from "path";` + `import path from "node:path";` ); }); diff --git a/packages/wrangler/src/__tests__/deployments.test.ts b/packages/wrangler/src/__tests__/deployments.test.ts index 4590d0c8bcca..0506d07b9416 100644 --- a/packages/wrangler/src/__tests__/deployments.test.ts +++ b/packages/wrangler/src/__tests__/deployments.test.ts @@ -1,12 +1,10 @@ import fs from "node:fs"; -import { http, HttpResponse } from "msw"; import { mockAccountId, mockApiToken } from "./helpers/mock-account-id"; import { mockConsoleMethods } from "./helpers/mock-console"; -import { clearDialogs, mockConfirm, mockPrompt } from "./helpers/mock-dialogs"; -import { useMockIsTTY } from "./helpers/mock-istty"; +import { clearDialogs } from "./helpers/mock-dialogs"; import { - createFetchResult, msw, + mswListNewDeployments, mswSuccessDeploymentDetails, mswSuccessDeployments, mswSuccessDeploymentScriptMetadata, @@ -37,6 +35,7 @@ describe("deployments", () => { beforeEach(() => { msw.use( + mswListNewDeployments, ...mswSuccessDeployments, ...mswSuccessOauthHandlers, ...mswSuccessUserHandlers, @@ -74,82 +73,8 @@ describe("deployments", () => { }); describe("deployments subcommands", () => { - describe("deployments list", () => { - it("should log deployments", async () => { - writeWranglerConfig(); - - await runWrangler("deployments list --no-x-versions"); - expect(std.out).toMatchInlineSnapshot(` - " - Version ID: Constitution-Class-tag:test-name - Created on: 2021-01-01T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Upload from Wrangler 🤠 - - Version ID: Intrepid-Class-tag:test-name - Created on: 2021-02-02T00:00:00.000000Z - Author: Kathryn-Janeway@federation.org - Source: Rollback from Wrangler 🤠 - Rollback from: MOCK-DEPLOYMENT-ID-1111 - Message: Rolled back for this version - - Version ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Created on: 2021-02-03T00:00:00.000000Z - Author: Kathryn-Janeway@federation.org - Source: Wrangler 🤠 - - Version ID: Galaxy-Class-tag:test-name - Created on: 2021-01-04T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Rollback from Wrangler 🤠 - Rollback from: MOCK-DEPLOYMENT-ID-2222 - 🟩 Active" - `); - }); - - it("should log deployments for script with passed in name option", async () => { - await runWrangler( - "deployments list --name something-else --no-x-versions" - ); - expect(std.out).toMatchInlineSnapshot(` - " - Version ID: Constitution-Class-tag:something-else - Created on: 2021-01-01T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Upload from Wrangler 🤠 - - Version ID: Intrepid-Class-tag:something-else - Created on: 2021-02-02T00:00:00.000000Z - Author: Kathryn-Janeway@federation.org - Source: Rollback from Wrangler 🤠 - Rollback from: MOCK-DEPLOYMENT-ID-1111 - Message: Rolled back for this version - - Version ID: 3mEgaU1T-Intrepid-someThing-tag:something-else - Created on: 2021-02-03T00:00:00.000000Z - Author: Kathryn-Janeway@federation.org - Source: Wrangler 🤠 - - Version ID: Galaxy-Class-tag:something-else - Created on: 2021-01-04T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Rollback from Wrangler 🤠 - Rollback from: MOCK-DEPLOYMENT-ID-2222 - 🟩 Active" - `); - }); - - it("should error on missing script name", async () => { - await expect( - runWrangler("deployments list --no-x-versions") - ).rejects.toMatchInlineSnapshot( - `[Error: Required Worker name missing. Please specify the Worker name in your Wrangler configuration file, or pass it as an argument with \`--name\`]` - ); - }); - }); - describe("deployment view", () => { - it("should error with no --no-x-versions flag", async () => { + it("should error with no flag", async () => { writeWranglerConfig(); await expect( @@ -158,257 +83,6 @@ describe("deployments", () => { `[Error: \`wrangler deployments view \` has been renamed \`wrangler versions view [version-id]\`. Please use that command instead.]` ); }); - - it("should log deployment details", async () => { - writeWranglerConfig(); - - await runWrangler("deployments view 1701-E --no-x-versions"); - - expect(std.out).toMatchInlineSnapshot(` - " - Version ID: 1701-E - Created on: 2021-01-01T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Wrangler 🤠 - ------------------------------------------------------------ - Author ID: Picard-Gamma-6-0-7-3 - Usage Model: bundled - Handlers: fetch - --------------------------bindings-------------------------- - None - " - `); - }); - - it("should log deployment details with bindings", async () => { - writeWranglerConfig(); - - await runWrangler("deployments view bindings-tag --no-x-versions"); - - expect(std.out).toMatchInlineSnapshot(` - " - Version ID: 1701-E - Created on: 2021-01-01T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Wrangler 🤠 - ------------------------------------------------------------ - Author ID: Picard-Gamma-6-0-7-3 - Usage Model: bundled - Handlers: fetch - --------------------------bindings-------------------------- - [[r2_buckets]] - binding = \\"MY_BUCKET\\" - bucket_name = \\"testr2\\" - - " - `); - }); - - it("should automatically log latest deployment details", async () => { - writeWranglerConfig(); - - await runWrangler("deployments view --no-x-versions"); - - expect(std.out).toMatchInlineSnapshot(` - " - Version ID: 1701-E - Created on: 2021-01-01T00:00:00.000000Z - Author: Jean-Luc-Picard@federation.org - Source: Wrangler 🤠 - ------------------------------------------------------------ - Author ID: Picard-Gamma-6-0-7-3 - Usage Model: bundled - Handlers: fetch - --------------------------bindings-------------------------- - None - " - `); - }); - }); - - describe("rollback", () => { - const { setIsTTY } = useMockIsTTY(); - const requests = { count: 0 }; - beforeEach(() => { - setIsTTY(true); - requests.count = 0; - msw.use( - http.put( - "*/accounts/:accountID/workers/scripts/:scriptName", - ({ request }) => { - const url = new URL(request.url); - - expect(url.searchParams.get("rollback_to")).toMatch( - /^3mEgaU1T-Intrepid-someThing-tag:/ - ); - - requests.count++; - - return HttpResponse.json( - createFetchResult({ - created_on: "2222-11-18T16:40:48.50545Z", - modified_on: "2222-01-20T18:08:47.464024Z", - id: "space_craft_1", - tag: "alien_tech_001", - tags: ["hyperdrive", "laser_cannons", "shields"], - deployment_id: "galactic_mission_alpha", - logpush: true, - etag: "13a3240e8fb414561b0366813b0b8f42b3e6cfa0d9e70e99835dae83d0d8a794", - handlers: [ - "interstellar_communication", - "hyperspace_navigation", - ], - last_deployed_from: "spaceport_alpha", - usage_model: "intergalactic", - script: `addEventListener('interstellar_communication', event =\u003e - { event.respondWith(transmit(event.request)) } - )`, - size: "1 light-year", - }) - ); - }, - { once: true } - ) - ); - }); - - it("should successfully rollback and output a success message", async () => { - mockConfirm({ - text: "This deployment 3mEgaU1T will immediately replace the current deployment and become the active deployment across all your deployed routes and domains. However, your local development environment will not be affected by this rollback. Note: Rolling back to a previous deployment will not rollback any of the bound resources (Durable Object, D1, R2, KV, etc).", - result: true, - }); - - mockPrompt({ - text: "Please provide a message for this rollback (120 characters max)", - result: "", - }); - - writeWranglerConfig(); - await runWrangler( - "rollback 3mEgaU1T-Intrepid-someThing-tag:test-name --no-x-versions" - ); - expect(std.out).toMatchInlineSnapshot(` - " - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); - - it("should early exit from rollback if user denies continuing", async () => { - mockConfirm({ - text: "This deployment 3mEgaU1T will immediately replace the current deployment and become the active deployment across all your deployed routes and domains. However, your local development environment will not be affected by this rollback. Note: Rolling back to a previous deployment will not rollback any of the bound resources (Durable Object, D1, R2, KV, etc).", - result: false, - }); - - writeWranglerConfig(); - await runWrangler( - "rollback 3mEgaU1T-Intrpid-someThing-tag:test-name --no-x-versions" - ); - expect(std.out).toMatchInlineSnapshot(`""`); - - expect(requests.count).toEqual(0); - }); - - it("should skip prompt automatically in rollback if in a non-TTY environment", async () => { - setIsTTY(false); - - writeWranglerConfig(); - await runWrangler( - "rollback 3mEgaU1T-Intrepid-someThing-tag:test-name --no-x-versions" - ); - expect(std.out).toMatchInlineSnapshot(` - "? This deployment 3mEgaU1T will immediately replace the current deployment and become the active deployment across all your deployed routes and domains. However, your local development environment will not be affected by this rollback. Note: Rolling back to a previous deployment will not rollback any of the bound resources (Durable Object, D1, R2, KV, etc). - 🤖 Using fallback value in non-interactive context: yes - ? Please provide a message for this rollback (120 characters max) - 🤖 Using default value in non-interactive context: - - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); - - it("should skip prompt automatically in rollback if message flag is provided", async () => { - writeWranglerConfig(); - await runWrangler( - `rollback 3mEgaU1T-Intrepid-someThing-tag:test-name --message "test" --no-x-versions` - ); - expect(std.out).toMatchInlineSnapshot(` - " - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); - - it("should skip prompt automatically in rollback with empty message", async () => { - writeWranglerConfig(); - await runWrangler( - `rollback 3mEgaU1T-Intrepid-someThing-tag:test-name --message "test" --no-x-versions` - ); - expect(std.out).toMatchInlineSnapshot(` - " - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); - - it("should automatically rollback to previous deployment when id is not specified", async () => { - mockConfirm({ - text: "This deployment 3mEgaU1T will immediately replace the current deployment and become the active deployment across all your deployed routes and domains. However, your local development environment will not be affected by this rollback. Note: Rolling back to a previous deployment will not rollback any of the bound resources (Durable Object, D1, R2, KV, etc).", - result: true, - }); - - mockPrompt({ - text: "Please provide a message for this rollback (120 characters max)", - result: "", - }); - - writeWranglerConfig(); - await runWrangler("rollback --no-x-versions"); - expect(std.out).toMatchInlineSnapshot(` - " - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:test-name - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); - - it("should require a worker name", async () => { - await expect(runWrangler("rollback")).rejects.toMatchInlineSnapshot( - `[Error: You need to provide a name for your Worker. Either pass it as a cli arg with \`--name \` or in your configuration file as \`name = ""\`]` - ); - - expect(requests.count).toEqual(0); - }); - - it("should automatically rollback to previous deployment with specified name", async () => { - mockConfirm({ - text: "This deployment 3mEgaU1T will immediately replace the current deployment and become the active deployment across all your deployed routes and domains. However, your local development environment will not be affected by this rollback. Note: Rolling back to a previous deployment will not rollback any of the bound resources (Durable Object, D1, R2, KV, etc).", - result: true, - }); - - mockPrompt({ - text: "Please provide a message for this rollback (120 characters max)", - result: "", - }); - - await runWrangler("rollback --name something-else --no-x-versions"); - expect(std.out).toMatchInlineSnapshot(` - " - Successfully rolled back to Deployment ID: 3mEgaU1T-Intrepid-someThing-tag:something-else - Current Version ID: galactic_mission_alpha" - `); - - expect(requests.count).toEqual(1); - }); }); }); }); diff --git a/packages/wrangler/src/__tests__/dev.test.ts b/packages/wrangler/src/__tests__/dev.test.ts index a3d063658acb..b53e14a1f226 100644 --- a/packages/wrangler/src/__tests__/dev.test.ts +++ b/packages/wrangler/src/__tests__/dev.test.ts @@ -1879,6 +1879,23 @@ describe.sequential("wrangler dev", () => { }); }); + describe("`browser rendering binding", () => { + it("should show error when running locally", async () => { + writeWranglerConfig({ + browser: { + binding: "MYBROWSER", + }, + }); + fs.writeFileSync("index.js", `export default {};`); + + await expect( + runWrangler("dev index.js") + ).rejects.toThrowErrorMatchingInlineSnapshot( + "[Error: Browser Rendering is not supported locally. Please use `wrangler dev --remote` instead.]" + ); + }); + }); + it("should error helpfully if pages_build_output_dir is set", async () => { writeWranglerConfig({ pages_build_output_dir: "dist", name: "test" }); await expect(runWrangler("dev")).rejects.toThrowErrorMatchingInlineSnapshot( diff --git a/packages/wrangler/src/__tests__/get-entry.test.ts b/packages/wrangler/src/__tests__/get-entry.test.ts index aa938d5054dc..6801f867b11e 100644 --- a/packages/wrangler/src/__tests__/get-entry.test.ts +++ b/packages/wrangler/src/__tests__/get-entry.test.ts @@ -131,6 +131,7 @@ describe("getEntry()", () => { ...defaultWranglerConfig, main: "src/index.ts", configPath: "other-worker/wrangler.toml", + userConfigPath: "other-worker/wrangler.toml", }, "deploy" ); diff --git a/packages/wrangler/src/__tests__/helpers/normalize.ts b/packages/wrangler/src/__tests__/helpers/normalize.ts index b07664de31b7..fe0c4473549e 100644 --- a/packages/wrangler/src/__tests__/helpers/normalize.ts +++ b/packages/wrangler/src/__tests__/helpers/normalize.ts @@ -5,7 +5,7 @@ export function normalizeString(input: string): string { return normalizeErrorMarkers( replaceByte( stripTrailingWhitespace( - normalizeSlashes(normalizeTempDirs(stripTimings(input))) + normalizeSlashes(normalizeCwd(normalizeTempDirs(stripTimings(input)))) ) ) ); @@ -29,6 +29,13 @@ function normalizeSlashes(str: string): string { return str.replace(/\\/g, "/"); } +/** + * Replace any use of the current working directory with `` to avoid cross OS issues. + */ +function normalizeCwd(str: string): string { + return str.replaceAll(process.cwd(), ""); +} + /** * Strip "timing data" out of the `stdout` string, since this is not always deterministic. * diff --git a/packages/wrangler/src/__tests__/helpers/write-wrangler-config.ts b/packages/wrangler/src/__tests__/helpers/write-wrangler-config.ts index b81b8d091f67..24cb803d9b1a 100644 --- a/packages/wrangler/src/__tests__/helpers/write-wrangler-config.ts +++ b/packages/wrangler/src/__tests__/helpers/write-wrangler-config.ts @@ -1,4 +1,5 @@ -import * as fs from "fs"; +import * as fs from "node:fs"; +import { dirname } from "node:path"; import { formatConfigSnippet } from "../../config"; import type { RawConfig } from "../../config"; @@ -7,6 +8,7 @@ export function writeWranglerConfig( config: RawConfig = {}, path = "./wrangler.toml" ) { + fs.mkdirSync(dirname(path), { recursive: true }); fs.writeFileSync( path, formatConfigSnippet( diff --git a/packages/wrangler/src/__tests__/metrics.test.ts b/packages/wrangler/src/__tests__/metrics.test.ts index 4dfa9f580013..d0b5ac3d18b9 100644 --- a/packages/wrangler/src/__tests__/metrics.test.ts +++ b/packages/wrangler/src/__tests__/metrics.test.ts @@ -197,8 +197,6 @@ describe("metrics", () => { args: { xJsonConfig: true, j: true, - xVersions: true, - xGradualRollouts: true, search: [""], }, }; diff --git a/packages/wrangler/src/__tests__/pages/pages-build-env.test.ts b/packages/wrangler/src/__tests__/pages/pages-build-env.test.ts index 8895ac532b7f..fb5035049d3d 100644 --- a/packages/wrangler/src/__tests__/pages/pages-build-env.test.ts +++ b/packages/wrangler/src/__tests__/pages/pages-build-env.test.ts @@ -1,11 +1,12 @@ /* eslint-disable turbo/no-undeclared-env-vars */ -import { readFileSync, writeFileSync } from "node:fs"; +import { mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { logger } from "../../logger"; import { EXIT_CODE_INVALID_PAGES_CONFIG, EXIT_CODE_NO_CONFIG_FOUND, } from "../../pages/errors"; import { mockConsoleMethods } from "../helpers/mock-console"; +import { normalizeString } from "../helpers/normalize"; import { runInTempDir } from "../helpers/run-in-tmp"; import { runWrangler } from "../helpers/run-wrangler"; import { writeWranglerConfig } from "../helpers/write-wrangler-config"; @@ -391,4 +392,34 @@ describe("pages build env", () => { `"{\\"vars\\":{\\"VAR1\\":\\"PREVIEW_VALUE1\\",\\"VAR2\\":\\"PREVIEW_VALUE2\\",\\"PREVIEW_VAR3\\":\\"PREVIEW_VALUE3\\"},\\"pages_build_output_dir\\":\\"dist\\"}"` ); }); + + it("should render output directory path relative to project directory, even if wrangler config is redirected", async () => { + vi.stubEnv("PAGES_ENVIRONMENT", ""); + writeWranglerConfig( + { + // Note this path is relative to the "generated" wrangler.json + pages_build_output_dir: "./dist", + }, + "build/wrangler.json" + ); + mkdirSync(".wrangler/deploy", { recursive: true }); + writeFileSync( + ".wrangler/deploy/config.json", + JSON.stringify({ configPath: "../../build/wrangler.json" }) + ); + + await runWrangler("pages functions build-env . --outfile data.json"); + expect(std.out).toMatchInlineSnapshot(` + "Checking for configuration in a Wrangler configuration file (BETA) + + Found wrangler.json file. Reading build configuration... + pages_build_output_dir: build/dist + Build environment variables: (none found)" + `); + expect( + normalizeString( + JSON.parse(readFileSync("data.json", "utf8")).pages_build_output_dir + ) + ).toEqual("build/dist"); + }); }); diff --git a/packages/wrangler/src/__tests__/pipelines.test.ts b/packages/wrangler/src/__tests__/pipelines.test.ts index 6e07edbf3b1a..41179d8106de 100644 --- a/packages/wrangler/src/__tests__/pipelines.test.ts +++ b/packages/wrangler/src/__tests__/pipelines.test.ts @@ -292,12 +292,9 @@ describe("pipelines", () => { OPTIONS --secret-access-key The R2 service token Access Key to write data [string] --access-key-id The R2 service token Secret Key to write data [string] - --batch-max-mb The approximate maximum size of a batch before flush in megabytes - Default: 10 [number] - --batch-max-rows The approximate maximum size of a batch before flush in rows - Default: 10000 [number] - --batch-max-seconds The approximate maximum duration of a batch before flush in seconds - Default: 15 [number] + --batch-max-mb The approximate maximum size (in megabytes) for each batch before flushing (range: 1 - 100) [number] + --batch-max-rows The approximate maximum number of rows in a batch before flushing (range: 100 - 1000000) [number] + --batch-max-seconds The approximate maximum age (in seconds) of a batch before flushing (range: 1 - 300) [number] --transform The worker and entrypoint of the PipelineTransform implementation in the format \\"worker.entrypoint\\" Default: No transformation worker [string] --compression Sets the compression format of output files diff --git a/packages/wrangler/src/__tests__/rollback.test.ts b/packages/wrangler/src/__tests__/rollback.test.ts index f3702c098770..3ceed3f5c8e6 100644 --- a/packages/wrangler/src/__tests__/rollback.test.ts +++ b/packages/wrangler/src/__tests__/rollback.test.ts @@ -131,7 +131,7 @@ describe("rollback", () => { }); await runWrangler( - "rollback --name script-name --version-id rollback-version --x-versions" + "rollback --name script-name --version-id rollback-version" ); // Unable to test stdout as the output has weird whitespace. Causing lint to fail with "no-irregular-whitespace" @@ -188,7 +188,7 @@ describe("rollback", () => { }); await runWrangler( - "rollback --name script-name --version-id rollback-version --x-versions" + "rollback --name script-name --version-id rollback-version" ); // Unable to test stdout as the output has weird whitespace. Causing lint to fail with "no-irregular-whitespace" @@ -228,7 +228,7 @@ describe("rollback", () => { mockPostDeployment(true); await runWrangler( - "rollback --name script-name --version-id rollback-version --x-versions" + "rollback --name script-name --version-id rollback-version" ); // Unable to test stdout as the output has weird whitespace. Causing lint to fail with "no-irregular-whitespace" diff --git a/packages/wrangler/src/__tests__/type-generation.test.ts b/packages/wrangler/src/__tests__/type-generation.test.ts index 79268ed7dbc9..435f178a15bc 100644 --- a/packages/wrangler/src/__tests__/type-generation.test.ts +++ b/packages/wrangler/src/__tests__/type-generation.test.ts @@ -256,13 +256,12 @@ describe("generateTypes()", () => { `); }); - it("should show a warning when no custom config file is detected", async () => { - await runWrangler("types -c hello.toml"); - expect(std.warn).toMatchInlineSnapshot(` - "▲ [WARNING] No config file detected (at hello.toml), aborting - - " - `); + it("should error when a specified custom config file is missing", async () => { + await expect(() => + runWrangler("types -c hello.toml") + ).rejects.toMatchInlineSnapshot( + `[ParseError: Could not read file: hello.toml]` + ); }); it("should respect the top level -c|--config flag", async () => { diff --git a/packages/wrangler/src/__tests__/versions/deployments/deployments.list.test.ts b/packages/wrangler/src/__tests__/versions/deployments/deployments.list.test.ts index d79c96ecec91..637982b763df 100644 --- a/packages/wrangler/src/__tests__/versions/deployments/deployments.list.test.ts +++ b/packages/wrangler/src/__tests__/versions/deployments/deployments.list.test.ts @@ -20,7 +20,7 @@ describe("deployments list", () => { describe("without wrangler.toml", () => { test("fails with no args", async () => { - const result = runWrangler("deployments list --experimental-versions"); + const result = runWrangler("deployments list"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: You need to provide a name for your Worker. Either pass it as a cli arg with \`--name \` or in your configuration file as \`name = ""\`]` @@ -32,9 +32,7 @@ describe("deployments list", () => { }); test("prints deployments to stdout", async () => { - const result = runWrangler( - "deployments list --name test-name --experimental-versions" - ); + const result = runWrangler("deployments list --name test-name"); await expect(result).resolves.toBeUndefined(); @@ -101,9 +99,7 @@ describe("deployments list", () => { }); test("prints deployments to stdout as --json", async () => { - const result = runWrangler( - "deployments list --name test-name --json --experimental-versions" - ); + const result = runWrangler("deployments list --name test-name --json"); await expect(result).resolves.toBeUndefined(); @@ -199,7 +195,7 @@ describe("deployments list", () => { beforeEach(() => writeWranglerConfig()); test("prints deployments to stdout", async () => { - const result = runWrangler("deployments list --experimental-versions"); + const result = runWrangler("deployments list"); await expect(result).resolves.toBeUndefined(); @@ -266,9 +262,7 @@ describe("deployments list", () => { }); test("prints deployments to stdout as --json", async () => { - const result = runWrangler( - "deployments list --json --experimental-versions" - ); + const result = runWrangler("deployments list --json"); await expect(result).resolves.toBeUndefined(); diff --git a/packages/wrangler/src/__tests__/versions/deployments/deployments.status.test.ts b/packages/wrangler/src/__tests__/versions/deployments/deployments.status.test.ts index ec4564fb93fd..8fe477c2caf3 100644 --- a/packages/wrangler/src/__tests__/versions/deployments/deployments.status.test.ts +++ b/packages/wrangler/src/__tests__/versions/deployments/deployments.status.test.ts @@ -20,9 +20,7 @@ describe("deployments list", () => { describe("without wrangler.toml", () => { test("fails with no args", async () => { - const result = runWrangler( - "deployments status --experimental-gradual-rollouts" - ); + const result = runWrangler("deployments status"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: You need to provide a name for your Worker. Either pass it as a cli arg with \`--name \` or in your configuration file as \`name = ""\`]` @@ -34,9 +32,7 @@ describe("deployments list", () => { }); test("prints latest deployment to stdout", async () => { - const result = runWrangler( - "deployments status --name test-name --experimental-gradual-rollouts" - ); + const result = runWrangler("deployments status --name test-name"); await expect(result).resolves.toBeUndefined(); @@ -61,9 +57,7 @@ describe("deployments list", () => { }); test("prints latest deployment to stdout as --json", async () => { - const result = runWrangler( - "deployments status --name test-name --json --experimental-versions" - ); + const result = runWrangler("deployments status --name test-name --json"); await expect(result).resolves.toBeUndefined(); @@ -98,9 +92,7 @@ describe("deployments list", () => { beforeEach(() => writeWranglerConfig()); test("prints latest deployment to stdout", async () => { - const result = runWrangler( - "deployments status --experimental-gradual-rollouts" - ); + const result = runWrangler("deployments status"); await expect(result).resolves.toBeUndefined(); @@ -125,9 +117,7 @@ describe("deployments list", () => { }); test("prints latest deployment to stdout as --json", async () => { - const result = runWrangler( - "deployments status --json --experimental-versions" - ); + const result = runWrangler("deployments status --json"); await expect(result).resolves.toBeUndefined(); diff --git a/packages/wrangler/src/__tests__/versions/deployments/deployments.view.test.ts b/packages/wrangler/src/__tests__/versions/deployments/deployments.view.test.ts index eb3f28ab27da..e136b10e0dbd 100644 --- a/packages/wrangler/src/__tests__/versions/deployments/deployments.view.test.ts +++ b/packages/wrangler/src/__tests__/versions/deployments/deployments.view.test.ts @@ -6,7 +6,7 @@ describe("deployments view", () => { mockConsoleMethods(); test("error when run with no args", async () => { - const result = runWrangler("deployments view --x-versions"); + const result = runWrangler("deployments view"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: \`wrangler deployments view\` has been renamed \`wrangler deployments status\`. Please use that command instead.]` @@ -15,7 +15,7 @@ describe("deployments view", () => { }); test("error when run with positional arg", async () => { - const result = runWrangler("deployments view dummy-id --x-versions"); + const result = runWrangler("deployments view dummy-id"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: \`wrangler deployments view \` has been renamed \`wrangler versions view [version-id]\`. Please use that command instead.]` diff --git a/packages/wrangler/src/__tests__/versions/secrets/bulk.test.ts b/packages/wrangler/src/__tests__/versions/secrets/bulk.test.ts index 8444cd96fb1c..659268bdc4dc 100644 --- a/packages/wrangler/src/__tests__/versions/secrets/bulk.test.ts +++ b/packages/wrangler/src/__tests__/versions/secrets/bulk.test.ts @@ -22,7 +22,7 @@ describe("versions secret bulk", () => { vi.spyOn(readline, "createInterface").mockImplementation( () => null as unknown as Interface ); - await runWrangler(`versions secret bulk --name script-name --x-versions`); + await runWrangler(`versions secret bulk --name script-name`); expect(std.out).toMatchInlineSnapshot( `"🌀 Creating the secrets for the Worker \\"script-name\\" "` ); @@ -59,9 +59,7 @@ describe("versions secret bulk", () => { expect(metadata.keep_assets).toBeTruthy(); }); - await runWrangler( - `versions secret bulk secrets.json --name script-name --x-versions` - ); + await runWrangler(`versions secret bulk secrets.json --name script-name`); expect(std.out).toMatchInlineSnapshot( ` "🌀 Creating the secrets for the Worker \\"script-name\\" @@ -80,9 +78,7 @@ describe("versions secret bulk", () => { await writeFile("wrangler.json", JSON.stringify({ invalid_field: true })); mockSetupApiCalls(); mockPostVersion(); - await runWrangler( - `versions secret bulk secrets.json --name script-name --x-versions` - ); + await runWrangler(`versions secret bulk secrets.json --name script-name`); expect(std.warn).toMatchInlineSnapshot(`""`); expect(std.err).toMatchInlineSnapshot(`""`); }); @@ -112,7 +108,7 @@ describe("versions secret bulk", () => { expect(metadata.keep_assets).toBeTruthy(); }); - await runWrangler(`versions secret bulk --name script-name --x-versions`); + await runWrangler(`versions secret bulk --name script-name`); expect(std.out).toMatchInlineSnapshot( ` "🌀 Creating the secrets for the Worker \\"script-name\\" @@ -129,9 +125,7 @@ describe("versions secret bulk", () => { test("should error on invalid json file", async () => { await writeFile("secrets.json", "not valid json :(", { encoding: "utf8" }); - await runWrangler( - `versions secret bulk secrets.json --name script-name --x-versions` - ); + await runWrangler(`versions secret bulk secrets.json --name script-name`); expect(std.out).toMatchInlineSnapshot( `"🌀 Creating the secrets for the Worker \\"script-name\\" "` ); @@ -163,7 +157,7 @@ describe("versions secret bulk", () => { expect(metadata.keep_assets).toBeTruthy(); }); - await runWrangler(`versions secret bulk --name script-name --x-versions`); + await runWrangler(`versions secret bulk --name script-name`); expect(std.out).toMatchInlineSnapshot( `"🌀 Creating the secrets for the Worker \\"script-name\\" "` ); diff --git a/packages/wrangler/src/__tests__/versions/secrets/delete.test.ts b/packages/wrangler/src/__tests__/versions/secrets/delete.test.ts index dd063c3c8fca..53a6b3d60d21 100644 --- a/packages/wrangler/src/__tests__/versions/secrets/delete.test.ts +++ b/packages/wrangler/src/__tests__/versions/secrets/delete.test.ts @@ -38,9 +38,7 @@ describe("versions secret delete", () => { // We will not be inherting secret_text as that would bring back SECRET expect(metadata.keep_bindings).toStrictEqual(["secret_key"]); }); - await runWrangler( - "versions secret delete SECRET --name script-name --x-versions" - ); + await runWrangler("versions secret delete SECRET --name script-name"); expect(std.out).toMatchInlineSnapshot(` "🌀 Deleting the secret SECRET on the Worker script-name @@ -64,9 +62,7 @@ describe("versions secret delete", () => { expect(metadata.keep_bindings).toStrictEqual(["secret_key"]); }); - await runWrangler( - "versions secret delete SECRET --name script-name --x-versions" - ); + await runWrangler("versions secret delete SECRET --name script-name"); expect(std.out).toMatchInlineSnapshot(` "? Are you sure you want to permanently delete the secret SECRET on the Worker script-name? @@ -93,7 +89,7 @@ describe("versions secret delete", () => { expect(metadata.keep_bindings).toStrictEqual(["secret_key"]); }); - await runWrangler("versions secret delete SECRET --x-versions"); + await runWrangler("versions secret delete SECRET"); expect(std.out).toMatchInlineSnapshot(` "? Are you sure you want to permanently delete the secret SECRET on the Worker script-name? @@ -113,9 +109,7 @@ describe("versions secret delete", () => { mockGetVersion(); mockPostVersion(); - await runWrangler( - "versions secret delete SECRET --name script-name --x-versions" - ); + await runWrangler("versions secret delete SECRET --name script-name"); expect(std.warn).toMatchInlineSnapshot(`""`); expect(std.err).toMatchInlineSnapshot(`""`); diff --git a/packages/wrangler/src/__tests__/versions/secrets/list.test.ts b/packages/wrangler/src/__tests__/versions/secrets/list.test.ts index bda2e6150550..dea533321dec 100644 --- a/packages/wrangler/src/__tests__/versions/secrets/list.test.ts +++ b/packages/wrangler/src/__tests__/versions/secrets/list.test.ts @@ -101,7 +101,7 @@ describe("versions secret list", () => { mockGetDeployments(); mockGetVersion("version-id-1"); - await runWrangler("versions secret list --name script-name --x-versions"); + await runWrangler("versions secret list --name script-name"); expect(std.out).toMatchInlineSnapshot(` "-- Version version-id-1 (100%) secrets -- @@ -118,7 +118,7 @@ describe("versions secret list", () => { mockGetVersion("version-id-1"); mockGetVersion("version-id-2"); - await runWrangler("versions secret list --name script-name --x-versions"); + await runWrangler("versions secret list --name script-name"); expect(std.out).toMatchInlineSnapshot(` "-- Version version-id-1 (50%) secrets -- @@ -141,7 +141,7 @@ describe("versions secret list", () => { mockGetDeployments(); mockGetVersion("version-id-1"); - await runWrangler("versions secret list --x-versions"); + await runWrangler("versions secret list"); expect(std.out).toMatchInlineSnapshot(` "-- Version version-id-1 (100%) secrets -- @@ -245,7 +245,7 @@ describe("versions secret list", () => { mockGetDeployments(); mockGetVersion("version-id-1"); - await runWrangler("versions secret list --latest-version --x-versions"); + await runWrangler("versions secret list --latest-version"); expect(std.out).toMatchInlineSnapshot(` "-- Version version-id-3 (0%) secrets -- @@ -262,7 +262,7 @@ describe("versions secret list", () => { mockGetDeployments(); mockGetVersion("version-id-1"); - await runWrangler("versions secret list --name script-name --x-versions"); + await runWrangler("versions secret list --name script-name"); expect(std.warn).toMatchInlineSnapshot(`""`); expect(std.err).toMatchInlineSnapshot(`""`); diff --git a/packages/wrangler/src/__tests__/versions/secrets/put.test.ts b/packages/wrangler/src/__tests__/versions/secrets/put.test.ts index 7682c0113d2e..eca243dfa91e 100644 --- a/packages/wrangler/src/__tests__/versions/secrets/put.test.ts +++ b/packages/wrangler/src/__tests__/versions/secrets/put.test.ts @@ -43,9 +43,7 @@ describe("versions secret put", () => { ]); expect(metadata.keep_assets).toBeTruthy(); }); - await runWrangler( - "versions secret put NEW_SECRET --name script-name --x-versions" - ); + await runWrangler("versions secret put NEW_SECRET --name script-name"); expect(std.out).toMatchInlineSnapshot(` "🌀 Creating the secret for the Worker \\"script-name\\" @@ -67,10 +65,7 @@ describe("versions secret put", () => { mockSetupApiCalls(); mockPostVersion(); - await runWrangler( - "versions secret put NEW_SECRET --name script-name --x-versions" - ); - + await runWrangler("versions secret put NEW_SECRET --name script-name"); expect(std.warn).toMatchInlineSnapshot(`""`); expect(std.err).toMatchInlineSnapshot(`""`); }); @@ -96,9 +91,7 @@ describe("versions secret put", () => { `secret ` // whitespace & newline being removed ); - await runWrangler( - "versions secret put NEW_SECRET --name script-name --x-versions" - ); + await runWrangler("versions secret put NEW_SECRET --name script-name"); expect(std.out).toMatchInlineSnapshot(` "🌀 Creating the secret for the Worker \\"script-name\\" @@ -131,7 +124,7 @@ describe("versions secret put", () => { ]); expect(metadata.keep_assets).toBeTruthy(); }); - await runWrangler("versions secret put NEW_SECRET --x-versions"); + await runWrangler("versions secret put NEW_SECRET"); expect(std.out).toMatchInlineSnapshot(` "🌀 Creating the secret for the Worker \\"script-name\\" @@ -167,7 +160,7 @@ describe("versions secret put", () => { ).toBe("Deploy a new secret"); }); await runWrangler( - "versions secret put NEW_SECRET --name script-name --message 'Deploy a new secret' --x-versions" + "versions secret put NEW_SECRET --name script-name --message 'Deploy a new secret'" ); expect(std.out).toMatchInlineSnapshot(` @@ -207,7 +200,7 @@ describe("versions secret put", () => { ).toBe("v1"); }); await runWrangler( - "versions secret put NEW_SECRET --name script-name --message 'Deploy a new secret' --tag v1 --x-versions" + "versions secret put NEW_SECRET --name script-name --message 'Deploy a new secret' --tag v1" ); expect(std.out).toMatchInlineSnapshot(` @@ -244,7 +237,7 @@ describe("versions secret put", () => { ).toBe("Deploy a new secret"); }); await runWrangler( - "versions secret put SECRET --name script-name --message 'Deploy a new secret' --x-versions" + "versions secret put SECRET --name script-name --message 'Deploy a new secret'" ); expect(std.out).toMatchInlineSnapshot(` @@ -320,7 +313,7 @@ describe("versions secret put", () => { ).toBe("Deploy a new secret"); }); await runWrangler( - "versions secret put SECRET --name script-name --message 'Deploy a new secret' --x-versions" + "versions secret put SECRET --name script-name --message 'Deploy a new secret'" ); expect(std.out).toMatchInlineSnapshot(` diff --git a/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts b/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts index 29f93f21c26c..1da5f4b14c16 100644 --- a/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts @@ -89,7 +89,7 @@ describe("versions deploy", () => { describe("without wrangler.toml", () => { test("succeeds with --name arg", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --name named-worker --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --name named-worker --yes" ); await expect(result).resolves.toMatchInlineSnapshot(`undefined`); @@ -140,7 +140,7 @@ describe("versions deploy", () => { test("fails without --name arg", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -153,9 +153,7 @@ describe("versions deploy", () => { beforeEach(() => writeWranglerConfig()); test("no args", async () => { - const result = runWrangler( - "versions deploy --yes --experimental-gradual-rollouts" - ); + const result = runWrangler("versions deploy --yes"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: You must select at least 1 version to deploy.]` @@ -188,7 +186,7 @@ describe("versions deploy", () => { test("1 version @ (implicit) 100%", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -235,7 +233,7 @@ describe("versions deploy", () => { test("1 version @ (explicit) 100%", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000@100% --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000@100% --yes" ); await expect(result).resolves.toBeUndefined(); @@ -282,7 +280,7 @@ describe("versions deploy", () => { test("2 versions @ (implicit) 50% each", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -337,7 +335,7 @@ describe("versions deploy", () => { test("1 version @ (explicit) 100%", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000@100% --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000@100% --yes" ); await expect(result).resolves.toBeUndefined(); @@ -384,7 +382,7 @@ describe("versions deploy", () => { test("2 versions @ (explicit) 30% + (implicit) 70%", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000@30% 20000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000@30% 20000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -439,7 +437,7 @@ describe("versions deploy", () => { test("2 versions @ (explicit) 40% + (explicit) 60%", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000@40% 20000000-0000-0000-0000-000000000000@60% --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000@40% 20000000-0000-0000-0000-000000000000@60% --yes" ); await expect(result).resolves.toBeUndefined(); @@ -495,7 +493,7 @@ describe("versions deploy", () => { describe("max versions restrictions (temp)", () => { test("2+ versions fails", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 30000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 30000000-0000-0000-0000-000000000000 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -544,7 +542,7 @@ describe("versions deploy", () => { test("--max-versions allows > 2 versions", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 30000000-0000-0000-0000-000000000000 --max-versions=3 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 20000000-0000-0000-0000-000000000000 30000000-0000-0000-0000-000000000000 --max-versions=3 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -610,7 +608,7 @@ describe("versions deploy", () => { test("with a message", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --message 'My versioned deployment message' --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --message 'My versioned deployment message' --yes" ); await expect(result).resolves.toBeUndefined(); @@ -662,7 +660,7 @@ describe("versions deploy", () => { }); const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -720,7 +718,7 @@ describe("versions deploy", () => { }); const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -785,7 +783,7 @@ describe("versions deploy", () => { }); const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --yes" ); await expect(result).resolves.toBeUndefined(); @@ -840,7 +838,7 @@ describe("versions deploy", () => { test("fails for non-existent versionId", async () => { const result = runWrangler( - "versions deploy ffffffff-ffff-ffff-ffff-ffffffffffff --yes --experimental-gradual-rollouts" + "versions deploy ffffffff-ffff-ffff-ffff-ffffffffffff --yes" ); // TODO: could do with a better error message but this will suffice for now (this error isn't possible in the interactive flow) @@ -872,7 +870,7 @@ describe("versions deploy", () => { test("fails if --percentage > 100", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --percentage 101 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --percentage 101 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -884,7 +882,7 @@ describe("versions deploy", () => { test("fails if --percentage < 0", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --percentage -1 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --percentage -1 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -896,7 +894,7 @@ describe("versions deploy", () => { test("fails if version-spec percentage > 100", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --percentage 101 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --percentage 101 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -908,7 +906,7 @@ describe("versions deploy", () => { test("fails if version-spec percentage < 0", async () => { const result = runWrangler( - "versions deploy 10000000-0000-0000-0000-000000000000 --percentage -1 --yes --experimental-gradual-rollouts" + "versions deploy 10000000-0000-0000-0000-000000000000 --percentage -1 --yes" ); await expect(result).rejects.toMatchInlineSnapshot( diff --git a/packages/wrangler/src/__tests__/versions/versions.help.test.ts b/packages/wrangler/src/__tests__/versions/versions.help.test.ts index 8734ad8ea633..e77a90c88c61 100644 --- a/packages/wrangler/src/__tests__/versions/versions.help.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.help.test.ts @@ -5,78 +5,6 @@ import { runWrangler } from "../helpers/run-wrangler"; describe("versions --help", () => { const std = mockConsoleMethods(); - test("shows generic help w/ --help flag and --no-experimental-versions flag", async () => { - const result = runWrangler("versions --help --no-experimental-versions"); - - await expect(result).resolves.toBeUndefined(); - - expect(std.out).toMatchInlineSnapshot(` - "wrangler - - COMMANDS - wrangler docs [search..] 📚 Open Wrangler's command documentation in your browser - - wrangler init [name] 📥 Initialize a basic Worker - wrangler dev [script] 👂 Start a local server for developing your Worker - wrangler deploy [script] 🆙 Deploy a Worker to Cloudflare [aliases: publish] - wrangler deployments 🚢 List and view the current and past deployments for your Worker - wrangler rollback [deployment-id] 🔙 Rollback a deployment for a Worker - wrangler delete [script] 🗑 Delete a Worker from Cloudflare - wrangler tail [worker] 🦚 Start a log tailing session for a Worker - wrangler secret 🤫 Generate a secret that can be referenced in a Worker - wrangler types [path] 📝 Generate types from bindings and module rules in configuration - - wrangler kv 🗂️ Manage Workers KV Namespaces - wrangler queues 🇶 Manage Workers Queues - wrangler r2 📦 Manage R2 buckets & objects - wrangler d1 🗄 Manage Workers D1 databases - wrangler vectorize 🧮 Manage Vectorize indexes [open beta] - wrangler hyperdrive 🚀 Manage Hyperdrive databases - wrangler pages ⚡️ Configure Cloudflare Pages - wrangler mtls-certificate 🪪 Manage certificates used for mTLS connections - wrangler pubsub 📮 Manage Pub/Sub brokers [private beta] - wrangler dispatch-namespace 🏗️ Manage dispatch namespaces - wrangler ai 🤖 Manage AI models - wrangler workflows 🔁 Manage Workflows [open-beta] - wrangler login 🔓 Login to Cloudflare - wrangler logout 🚪 Logout from Cloudflare - wrangler whoami 🕵️ Retrieve your user information - - GLOBAL FLAGS - -c, --config Path to Wrangler configuration file [string] - -e, --env Environment to use for operations and .env files [string] - -h, --help Show help [boolean] - -v, --version Show version number [boolean] - - Please report any issues to https://github.com/cloudflare/workers-sdk/issues/new/choose" - `); - }); - - test("shows versions help w/ --help and --experimental-versions flag", async () => { - const result = runWrangler("versions --help --experimental-versions"); - - await expect(result).resolves.toBeUndefined(); - - expect(std.out).toMatchInlineSnapshot(` - "wrangler versions - - 🫧 List, view, upload and deploy Versions of your Worker to Cloudflare - - COMMANDS - wrangler versions view View the details of a specific version of your Worker - wrangler versions list List the 10 most recent Versions of your Worker - wrangler versions upload Uploads your Worker code and config as a new Version - wrangler versions deploy [version-specs..] Safely roll out new Versions of your Worker by splitting traffic between multiple Versions - wrangler versions secret Generate a secret that can be referenced in a Worker - - GLOBAL FLAGS - -c, --config Path to Wrangler configuration file [string] - -e, --env Environment to use for operations and .env files [string] - -h, --help Show help [boolean] - -v, --version Show version number [boolean]" - `); - }); - test("shows versions help w/ --help", async () => { const result = runWrangler("versions --help"); @@ -106,14 +34,6 @@ describe("versions --help", () => { describe("versions subhelp", () => { const std = mockConsoleMethods(); - test("fails with --no-experimental-versions flag", async () => { - const result = runWrangler("versions --no-experimental-versions"); - - await expect(result).rejects.toMatchInlineSnapshot( - `[Error: Unknown argument: versions]` - ); - }); - test("shows implicit subhelp", async () => { const result = runWrangler("versions"); @@ -139,82 +59,4 @@ describe("versions subhelp", () => { -v, --version Show version number [boolean]" `); }); - - test("shows implicit subhelp with --experimental-versions flag", async () => { - const result = runWrangler("versions --experimental-versions"); - - await expect(result).resolves.toBeUndefined(); - await setImmediate(); // wait for subhelp - - expect(std.out).toMatchInlineSnapshot(` - "wrangler versions - - 🫧 List, view, upload and deploy Versions of your Worker to Cloudflare - - COMMANDS - wrangler versions view View the details of a specific version of your Worker - wrangler versions list List the 10 most recent Versions of your Worker - wrangler versions upload Uploads your Worker code and config as a new Version - wrangler versions deploy [version-specs..] Safely roll out new Versions of your Worker by splitting traffic between multiple Versions - wrangler versions secret Generate a secret that can be referenced in a Worker - - GLOBAL FLAGS - -c, --config Path to Wrangler configuration file [string] - -e, --env Environment to use for operations and .env files [string] - -h, --help Show help [boolean] - -v, --version Show version number [boolean]" - `); - }); - - test("shows implicit subhelp with --x-versions flag", async () => { - const result = runWrangler("versions --x-versions"); - - await expect(result).resolves.toBeUndefined(); - await setImmediate(); // wait for subhelp - - expect(std.out).toMatchInlineSnapshot(` - "wrangler versions - - 🫧 List, view, upload and deploy Versions of your Worker to Cloudflare - - COMMANDS - wrangler versions view View the details of a specific version of your Worker - wrangler versions list List the 10 most recent Versions of your Worker - wrangler versions upload Uploads your Worker code and config as a new Version - wrangler versions deploy [version-specs..] Safely roll out new Versions of your Worker by splitting traffic between multiple Versions - wrangler versions secret Generate a secret that can be referenced in a Worker - - GLOBAL FLAGS - -c, --config Path to Wrangler configuration file [string] - -e, --env Environment to use for operations and .env files [string] - -h, --help Show help [boolean] - -v, --version Show version number [boolean]" - `); - }); - - test("shows implicit subhelp with --experimental-gradual-rollouts flag", async () => { - const result = runWrangler("versions --experimental-gradual-rollouts"); - - await expect(result).resolves.toBeUndefined(); - await setImmediate(); // wait for subhelp - - expect(std.out).toMatchInlineSnapshot(` - "wrangler versions - - 🫧 List, view, upload and deploy Versions of your Worker to Cloudflare - - COMMANDS - wrangler versions view View the details of a specific version of your Worker - wrangler versions list List the 10 most recent Versions of your Worker - wrangler versions upload Uploads your Worker code and config as a new Version - wrangler versions deploy [version-specs..] Safely roll out new Versions of your Worker by splitting traffic between multiple Versions - wrangler versions secret Generate a secret that can be referenced in a Worker - - GLOBAL FLAGS - -c, --config Path to Wrangler configuration file [string] - -e, --env Environment to use for operations and .env files [string] - -h, --help Show help [boolean] - -v, --version Show version number [boolean]" - `); - }); }); diff --git a/packages/wrangler/src/__tests__/versions/versions.list.test.ts b/packages/wrangler/src/__tests__/versions/versions.list.test.ts index e10009ed5107..056d966156cb 100644 --- a/packages/wrangler/src/__tests__/versions/versions.list.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.list.test.ts @@ -24,9 +24,7 @@ describe("versions list", () => { describe("without wrangler.toml", () => { test("fails with no args", async () => { - const result = runWrangler( - "versions list --json --experimental-versions" - ); + const result = runWrangler("versions list --json"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: You need to provide a name of your worker. Either pass it as a cli arg with \`--name \` or in your config file as \`name = ""\`]` @@ -38,9 +36,7 @@ describe("versions list", () => { }); test("prints versions to stdout", async () => { - const result = runWrangler( - "versions list --name test-name --experimental-versions" - ); + const result = runWrangler("versions list --name test-name"); await expect(result).resolves.toBeUndefined(); @@ -82,9 +78,7 @@ describe("versions list", () => { }); test("prints versions to stdout as --json", async () => { - const result = runWrangler( - "versions list --name test-name --json --experimental-versions" - ); + const result = runWrangler("versions list --name test-name --json"); await expect(result).resolves.toBeUndefined(); @@ -164,7 +158,7 @@ describe("versions list", () => { beforeEach(() => writeWranglerConfig()); test("prints versions to stdout", async () => { - const result = runWrangler("versions list --experimental-versions"); + const result = runWrangler("versions list"); await expect(result).resolves.toBeUndefined(); @@ -204,9 +198,7 @@ describe("versions list", () => { }); test("prints versions to as --json", async () => { - const result = runWrangler( - "versions list --json --experimental-versions" - ); + const result = runWrangler("versions list --json"); await expect(result).resolves.toBeUndefined(); diff --git a/packages/wrangler/src/__tests__/versions/versions.upload.test.ts b/packages/wrangler/src/__tests__/versions/versions.upload.test.ts index 8a2e46fd12c9..1cdeefa362ac 100644 --- a/packages/wrangler/src/__tests__/versions/versions.upload.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.upload.test.ts @@ -86,7 +86,7 @@ describe("versions upload", () => { writeWorkerSource(); setIsTTY(false); - const result = runWrangler("versions upload --x-versions"); + const result = runWrangler("versions upload"); await expect(result).resolves.toBeUndefined(); @@ -124,7 +124,7 @@ describe("versions upload", () => { writeWorkerSource(); setIsTTY(false); - const result = runWrangler("versions upload --x-versions"); + const result = runWrangler("versions upload"); await expect(result).resolves.toBeUndefined(); @@ -156,7 +156,7 @@ describe("versions upload", () => { writeWorkerSource(); setIsTTY(false); - const result = runWrangler("versions upload --x-versions"); + const result = runWrangler("versions upload"); await expect(result).resolves.toBeUndefined(); diff --git a/packages/wrangler/src/__tests__/versions/versions.view.test.ts b/packages/wrangler/src/__tests__/versions/versions.view.test.ts index 9213e9672de8..145145185dbf 100644 --- a/packages/wrangler/src/__tests__/versions/versions.view.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.view.test.ts @@ -20,9 +20,7 @@ describe("versions view", () => { beforeEach(() => msw.use(mswGetVersion())); test("fails with no args", async () => { - const result = runWrangler( - "versions view --experimental-gradual-rollouts" - ); + const result = runWrangler("versions view"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: Not enough non-option arguments: got 0, need at least 1]` @@ -34,9 +32,7 @@ describe("versions view", () => { }); test("fails with --name arg only", async () => { - const result = runWrangler( - "versions view --name test-name --experimental-gradual-rollouts" - ); + const result = runWrangler("versions view --name test-name"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: Not enough non-option arguments: got 0, need at least 1]` @@ -49,7 +45,7 @@ describe("versions view", () => { test("fails with positional version-id arg only", async () => { const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --experimental-gradual-rollouts" + "versions view 10000000-0000-0000-0000-000000000000" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -63,7 +59,7 @@ describe("versions view", () => { test("succeeds with positional version-id arg and --name arg", async () => { const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test-name --experimental-gradual-rollouts" + "versions view 10000000-0000-0000-0000-000000000000 --name test-name" ); await expect(result).resolves.toBeUndefined(); @@ -98,7 +94,7 @@ describe("versions view", () => { test("prints version to stdout as --json", async () => { const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test-name --json --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test-name --json" ); await expect(result).resolves.toBeUndefined(); @@ -165,9 +161,7 @@ describe("versions view", () => { }); test("fails with no args", async () => { - const result = runWrangler( - "versions view --experimental-gradual-rollouts" - ); + const result = runWrangler("versions view"); await expect(result).rejects.toMatchInlineSnapshot( `[Error: Not enough non-option arguments: got 0, need at least 1]` @@ -180,7 +174,7 @@ describe("versions view", () => { test("succeeds with positional version-id arg only", async () => { const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --experimental-gradual-rollouts" + "versions view 10000000-0000-0000-0000-000000000000" ); await expect(result).resolves.toBeUndefined(); @@ -213,7 +207,7 @@ describe("versions view", () => { test("fails with non-existent version-id", async () => { const result = runWrangler( - "versions view ffffffff-ffff-ffff-ffff-ffffffffffff --experimental-gradual-rollouts" + "versions view ffffffff-ffff-ffff-ffff-ffffffffffff" ); await expect(result).rejects.toMatchInlineSnapshot( @@ -227,7 +221,7 @@ describe("versions view", () => { test("prints version to stdout as --json", async () => { const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --json --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --json" ); await expect(result).resolves.toBeUndefined(); @@ -314,7 +308,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); @@ -361,7 +355,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); @@ -410,7 +404,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); @@ -463,7 +457,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); @@ -519,7 +513,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); @@ -651,7 +645,7 @@ describe("versions view", () => { ); const result = runWrangler( - "versions view 10000000-0000-0000-0000-000000000000 --name test --experimental-versions" + "versions view 10000000-0000-0000-0000-000000000000 --name test" ); await expect(result).resolves.toBeUndefined(); diff --git a/packages/wrangler/src/api/dev.ts b/packages/wrangler/src/api/dev.ts index 96ab85891087..78d658a0fb98 100644 --- a/packages/wrangler/src/api/dev.ts +++ b/packages/wrangler/src/api/dev.ts @@ -218,7 +218,6 @@ export async function unstable_dev( logLevel: options?.logLevel ?? defaultLogLevel, port: options?.port ?? 0, experimentalProvision: undefined, - experimentalVersions: undefined, experimentalDevEnv: undefined, experimentalRegistry: fileBasedRegistry, experimentalVectorizeBindToProd: vectorizeBindToProd ?? false, diff --git a/packages/wrangler/src/api/pages/deploy.ts b/packages/wrangler/src/api/pages/deploy.ts index 0ed97aa4ba7d..ed9a88461bef 100644 --- a/packages/wrangler/src/api/pages/deploy.ts +++ b/packages/wrangler/src/api/pages/deploy.ts @@ -160,7 +160,10 @@ export async function deploy({ let config: Config | undefined; try { - config = readPagesConfig({ ...args, env }); + config = readPagesConfig( + { ...args, env }, + { useRedirectIfAvailable: true } + ); } catch (err) { if ( !( diff --git a/packages/wrangler/src/api/startDevWorker/ConfigController.ts b/packages/wrangler/src/api/startDevWorker/ConfigController.ts index 2b085db269e2..23349099b0e0 100644 --- a/packages/wrangler/src/api/startDevWorker/ConfigController.ts +++ b/packages/wrangler/src/api/startDevWorker/ConfigController.ts @@ -67,7 +67,7 @@ async function resolveDevConfig( const localPersistencePath = getLocalPersistencePath( input.dev?.persist, - config.configPath + config ); const { host, routes } = await getHostAndRoutes( @@ -315,6 +315,15 @@ async function resolveConfig( ); } + if ( + extractBindingsOfType("browser", resolved.bindings).length && + !resolved.dev.remote + ) { + throw new UserError( + "Browser Rendering is not supported locally. Please use `wrangler dev --remote` instead." + ); + } + validateAssetsArgsAndConfig(resolved); const services = extractBindingsOfType("service", resolved.bindings); @@ -409,25 +418,29 @@ export class ConfigController extends Controller { const signal = this.#abortController.signal; this.latestInput = input; try { - const fileConfig = readConfig({ - config: input.config, - env: input.env, - "dispatch-namespace": undefined, - "legacy-env": !input.legacy?.enableServiceEnvironments, - remote: input.dev?.remote, - upstreamProtocol: - input.dev?.origin?.secure === undefined - ? undefined - : input.dev?.origin?.secure - ? "https" - : "http", - localProtocol: - input.dev?.server?.secure === undefined - ? undefined - : input.dev?.server?.secure - ? "https" - : "http", - }); + const fileConfig = readConfig( + { + script: input.entrypoint, + config: input.config, + env: input.env, + "dispatch-namespace": undefined, + "legacy-env": !input.legacy?.enableServiceEnvironments, + remote: input.dev?.remote, + upstreamProtocol: + input.dev?.origin?.secure === undefined + ? undefined + : input.dev?.origin?.secure + ? "https" + : "http", + localProtocol: + input.dev?.server?.secure === undefined + ? undefined + : input.dev?.server?.secure + ? "https" + : "http", + }, + { useRedirectIfAvailable: true } + ); if (typeof vitest === "undefined") { void this.#ensureWatchingConfig(fileConfig.configPath); diff --git a/packages/wrangler/src/config/config-helpers.ts b/packages/wrangler/src/config/config-helpers.ts index a02b6a47bf26..11201a86116a 100644 --- a/packages/wrangler/src/config/config-helpers.ts +++ b/packages/wrangler/src/config/config-helpers.ts @@ -1,35 +1,139 @@ -import path from "path"; +import fs from "node:fs"; +import path from "node:path"; import { findUpSync } from "find-up"; +import dedent from "ts-dedent"; +import { UserError } from "../errors"; +import { logger } from "../logger"; +import { formatMessage, ParseError, parseJSONC, readFileSync } from "../parse"; + +export type ResolveConfigPathOptions = { + useRedirectIfAvailable?: boolean; +}; + +export type ConfigPaths = { + /** Absolute path to the actual configuration being used (possibly redirected from the user's config). */ + configPath: string | undefined; + /** Absolute path to the user's configuration, which may not be the same as `configPath` if it was redirected. */ + userConfigPath: string | undefined; +}; /** * Resolve the path to the configuration file, given the `config` and `script` optional command line arguments. * `config` takes precedence, then `script`, then we just use the cwd. + * + * Returns an object with two paths: `configPath` and `userConfigPath`. If defined these are absolute file paths. */ -export function resolveWranglerConfigPath({ - config, - script, -}: { - config?: string; - script?: string; -}): string | undefined { +export function resolveWranglerConfigPath( + { + config, + script, + }: { + config?: string; + script?: string; + }, + options: { useRedirectIfAvailable?: boolean } +): ConfigPaths { if (config !== undefined) { - return config; + return { userConfigPath: config, configPath: config }; } const leafPath = script !== undefined ? path.dirname(script) : process.cwd(); - return findWranglerConfig(leafPath); + + return findWranglerConfig(leafPath, options); } /** - * Find the wrangler config file by searching up the file-system + * Find the wrangler configuration file by searching up the file-system * from the current working directory. */ export function findWranglerConfig( - referencePath: string = process.cwd() -): string | undefined { - return ( + referencePath: string = process.cwd(), + { useRedirectIfAvailable = false } = {} +): ConfigPaths { + const userConfigPath = findUpSync(`wrangler.json`, { cwd: referencePath }) ?? findUpSync(`wrangler.jsonc`, { cwd: referencePath }) ?? - findUpSync(`wrangler.toml`, { cwd: referencePath }) - ); + findUpSync(`wrangler.toml`, { cwd: referencePath }); + + return { + userConfigPath, + configPath: useRedirectIfAvailable + ? findRedirectedWranglerConfig(referencePath, userConfigPath) + : userConfigPath, + }; +} + +/** + * Check whether there is a configuration file that indicates that we should redirect the user configuration. + * @param cwd + * @param userConfigPath + * @returns + */ +function findRedirectedWranglerConfig( + cwd: string, + userConfigPath: string | undefined +) { + const PATH_TO_DEPLOY_CONFIG = ".wrangler/deploy/config.json"; + const deployConfigPath = findUpSync(PATH_TO_DEPLOY_CONFIG, { cwd }); + if (deployConfigPath === undefined) { + return userConfigPath; + } + + let redirectedConfigPath: string | undefined; + const deployConfigFile = readFileSync(deployConfigPath); + try { + const deployConfig: { configPath?: string } = parseJSONC( + deployConfigFile, + deployConfigPath + ); + redirectedConfigPath = + deployConfig.configPath && + path.resolve(path.dirname(deployConfigPath), deployConfig.configPath); + } catch (e) { + throw new UserError( + dedent` + Failed to parse the deploy configuration file at ${path.relative(".", deployConfigPath)} + ${e instanceof ParseError ? formatMessage(e) : e} + ` + ); + } + if (!redirectedConfigPath) { + throw new UserError(dedent` + A deploy configuration file was found at "${path.relative(".", deployConfigPath)}". + But this is not valid - the required "configPath" property was not found. + Instead this file contains: + \`\`\` + ${deployConfigFile} + \`\`\` + `); + } + + if (redirectedConfigPath) { + if (!fs.existsSync(redirectedConfigPath)) { + throw new UserError(dedent` + There is a deploy configuration at "${path.relative(".", deployConfigPath)}". + But the redirected configuration path it points to, "${path.relative(".", redirectedConfigPath)}", does not exist. + `); + } + if (userConfigPath) { + if ( + path.join(path.dirname(userConfigPath), PATH_TO_DEPLOY_CONFIG) !== + deployConfigPath + ) { + throw new UserError(dedent` + Found both a user configuration file at "${path.relative(".", userConfigPath)}" + and a deploy configuration file at "${path.relative(".", deployConfigPath)}". + But these do not share the same base path so it is not clear which should be used. + `); + } + } + + logger.warn(dedent` + Using redirected Wrangler configuration. + Configuration being used: "${path.relative(".", redirectedConfigPath)}" + Original user's configuration: "${userConfigPath ? path.relative(".", userConfigPath) : ""}" + Deploy configuration file: "${path.relative(".", deployConfigPath)}" + `); + return redirectedConfigPath; + } } diff --git a/packages/wrangler/src/config/config.ts b/packages/wrangler/src/config/config.ts index 92f0285d6d35..76212c6ff8cb 100644 --- a/packages/wrangler/src/config/config.ts +++ b/packages/wrangler/src/config/config.ts @@ -22,7 +22,10 @@ import type { CamelCaseKey } from "yargs"; * - `@breaking`: the deprecation/optionality is a breaking change from Wrangler v1. * - `@todo`: there's more work to be done (with details attached). */ -export type Config = ConfigFields & PagesConfigFields & Environment; +export type Config = ComputedFields & + ConfigFields & + PagesConfigFields & + Environment; export type RawConfig = Partial> & PagesConfigFields & @@ -30,9 +33,21 @@ export type RawConfig = Partial> & DeprecatedConfigFields & EnvironmentMap & { $schema?: string }; -export interface ConfigFields { +export interface ComputedFields { + /** The path to the Wrangler configuration file (if any, and possibly redirected from the user Wrangler configuration) used to create this configuration. */ configPath: string | undefined; + /** The path to the user's Wrangler configuration file (if any), which may have been redirected to another file that used to create this configuration. */ + userConfigPath: string | undefined; + /** + * The original top level name for the Worker in the raw configuration. + * + * When a raw configuration has been flattened to a single environment the worker name may have been replaced or transformed. + * It can be useful to know what the top-level name was before the flattening. + */ + topLevelName: string | undefined; +} +export interface ConfigFields { /** * A boolean to enable "legacy" style wrangler environments (from Wrangler v1). * These have been superseded by Services, but there may be projects that won't @@ -282,6 +297,11 @@ export type OnlyCamelCase> = { }; export const defaultWranglerConfig: Config = { + /* COMPUTED_FIELDS */ + configPath: undefined, + userConfigPath: undefined, + topLevelName: undefined, + /*====================================================*/ /* Fields supported by both Workers & Pages */ /*====================================================*/ @@ -326,7 +346,6 @@ export const defaultWranglerConfig: Config = { /* Fields supported by Workers only */ /*====================================================*/ /* TOP-LEVEL ONLY FIELDS */ - configPath: undefined, legacy_env: true, site: undefined, legacy_assets: undefined, diff --git a/packages/wrangler/src/config/index.ts b/packages/wrangler/src/config/index.ts index b1b73f933ad6..cc02893e564b 100644 --- a/packages/wrangler/src/config/index.ts +++ b/packages/wrangler/src/config/index.ts @@ -1,4 +1,5 @@ import fs from "node:fs"; +import path from "node:path"; import TOML from "@iarna/toml"; import dotenv from "dotenv"; import { FatalError, UserError } from "../errors"; @@ -10,6 +11,7 @@ import { isPagesConfig, normalizeAndValidateConfig } from "./validation"; import { validatePagesConfig } from "./validation-pages"; import type { CommonYargsOptions } from "../yargs-types"; import type { Config, OnlyCamelCase, RawConfig } from "./config"; +import type { ResolveConfigPathOptions } from "./config-helpers"; import type { NormalizeAndValidateConfigArgs } from "./validation"; export type { @@ -62,31 +64,35 @@ export function formatConfigSnippet( } } -type ReadConfigCommandArgs = NormalizeAndValidateConfigArgs & { +export type ReadConfigCommandArgs = NormalizeAndValidateConfigArgs & { config?: string; script?: string; }; +export type ReadConfigOptions = ResolveConfigPathOptions & { + hideWarnings?: boolean; +}; + /** * Get the Wrangler configuration; read it from the give `configPath` if available. */ export function readConfig( args: ReadConfigCommandArgs, - options?: { hideWarnings?: boolean } -): Config; -export function readConfig( - args: ReadConfigCommandArgs, - { hideWarnings = false }: { hideWarnings?: boolean } = {} + options: ReadConfigOptions = {} ): Config { - const { rawConfig, configPath } = experimental_readRawConfig(args); + const { rawConfig, configPath, userConfigPath } = experimental_readRawConfig( + args, + options + ); const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, configPath, + userConfigPath, args ); - if (diagnostics.hasWarnings() && !hideWarnings) { + if (diagnostics.hasWarnings() && !options?.hideWarnings) { logger.warn(diagnostics.renderWarnings()); } if (diagnostics.hasErrors()) { @@ -98,23 +104,27 @@ export function readConfig( export function readPagesConfig( args: ReadConfigCommandArgs, - { hideWarnings = false }: { hideWarnings?: boolean } = {} + options: ReadConfigOptions = {} ): Omit & { pages_build_output_dir: string } { let rawConfig: RawConfig; let configPath: string | undefined; + let userConfigPath: string | undefined; try { - ({ rawConfig, configPath } = experimental_readRawConfig(args)); + ({ rawConfig, configPath, userConfigPath } = experimental_readRawConfig( + args, + options + )); } catch (e) { logger.error(e); throw new FatalError( - `Your ${configFileName(configPath)} file is not a valid Pages config file`, + `Your ${configFileName(configPath)} file is not a valid Pages configuration file`, EXIT_CODE_INVALID_PAGES_CONFIG ); } if (!isPagesConfig(rawConfig)) { throw new FatalError( - `Your ${configFileName(configPath)} file is not a valid Pages config file`, + `Your ${configFileName(configPath)} file is not a valid Pages configuration file`, EXIT_CODE_INVALID_PAGES_CONFIG ); } @@ -122,10 +132,11 @@ export function readPagesConfig( const { config, diagnostics } = normalizeAndValidateConfig( rawConfig, configPath, + userConfigPath, args ); - if (diagnostics.hasWarnings() && !hideWarnings) { + if (diagnostics.hasWarnings() && !options.hideWarnings) { logger.warn(diagnostics.renderWarnings()); } if (diagnostics.hasErrors()) { @@ -153,17 +164,25 @@ export function readPagesConfig( } export const experimental_readRawConfig = ( - args: ReadConfigCommandArgs -): { rawConfig: RawConfig; configPath: string | undefined } => { + args: ReadConfigCommandArgs, + options: ReadConfigOptions = {} +): { + rawConfig: RawConfig; + configPath: string | undefined; + userConfigPath: string | undefined; +} => { // Load the configuration from disk if available - const configPath = resolveWranglerConfigPath(args); + const { configPath, userConfigPath } = resolveWranglerConfigPath( + args, + options + ); let rawConfig: RawConfig = {}; if (configPath?.endsWith("toml")) { rawConfig = parseTOML(readFileSync(configPath), configPath); } else if (configPath?.endsWith("json") || configPath?.endsWith("jsonc")) { rawConfig = parseJSONC(readFileSync(configPath), configPath); } - return { rawConfig, configPath }; + return { rawConfig, configPath, userConfigPath }; }; export function withConfig( @@ -182,29 +201,32 @@ export interface DotEnv { parsed: dotenv.DotenvParseOutput; } -function tryLoadDotEnv(path: string): DotEnv | undefined { +function tryLoadDotEnv(basePath: string): DotEnv | undefined { try { - const parsed = dotenv.parse(fs.readFileSync(path)); - return { path, parsed }; + const parsed = dotenv.parse(fs.readFileSync(basePath)); + return { path: basePath, parsed }; } catch (e) { if ((e as { code: string }).code === "ENOENT") { logger.debug( - `.env file not found at "${path}". Continuing... For more details, refer to https://developers.cloudflare.com/workers/wrangler/system-environment-variables/` + `.env file not found at "${path.relative(".", basePath)}". Continuing... For more details, refer to https://developers.cloudflare.com/workers/wrangler/system-environment-variables/` ); } else { - logger.debug(`Failed to load .env file "${path}":`, e); + logger.debug( + `Failed to load .env file "${path.relative(".", basePath)}":`, + e + ); } } } /** - * Loads a dotenv file from , preferring to read . if - * is defined and that file exists. + * Loads a dotenv file from `envPath`, preferring to read `${envPath}.${env}` if + * `env` is defined and that file exists. */ -export function loadDotEnv(path: string, env?: string): DotEnv | undefined { +export function loadDotEnv(envPath: string, env?: string): DotEnv | undefined { if (env === undefined) { - return tryLoadDotEnv(path); + return tryLoadDotEnv(envPath); } else { - return tryLoadDotEnv(`${path}.${env}`) ?? tryLoadDotEnv(path); + return tryLoadDotEnv(`${envPath}.${env}`) ?? tryLoadDotEnv(envPath); } } diff --git a/packages/wrangler/src/config/validation-pages.ts b/packages/wrangler/src/config/validation-pages.ts index a948b7189695..e772ed799b7f 100644 --- a/packages/wrangler/src/config/validation-pages.ts +++ b/packages/wrangler/src/config/validation-pages.ts @@ -36,9 +36,11 @@ const supportedPagesConfigFields = [ "dev", "mtls_certificates", "browser", - // normalizeAndValidateConfig() sets this value - "configPath", "upload_source_maps", + // normalizeAndValidateConfig() sets these values + "configPath", + "userConfigPath", + "topLevelName", ] as const; export function validatePagesConfig( diff --git a/packages/wrangler/src/config/validation.ts b/packages/wrangler/src/config/validation.ts index a36826ed1cc1..581fbb4d2de0 100644 --- a/packages/wrangler/src/config/validation.ts +++ b/packages/wrangler/src/config/validation.ts @@ -61,6 +61,7 @@ export type NormalizeAndValidateConfigArgs = { remote?: boolean; localProtocol?: string; upstreamProtocol?: string; + script?: string; }; const ENGLISH = new Intl.ListFormat("en-US"); @@ -80,6 +81,7 @@ export function isPagesConfig(rawConfig: RawConfig): boolean { export function normalizeAndValidateConfig( rawConfig: RawConfig, configPath: string | undefined, + userConfigPath: string | undefined, args: NormalizeAndValidateConfigArgs ): { config: Config; @@ -266,6 +268,8 @@ export function normalizeAndValidateConfig( // Process the top-level default environment configuration. const config: Config = { configPath, + userConfigPath, + topLevelName: rawConfig.name, pages_build_output_dir: normalizeAndValidatePagesBuildOutputDir( configPath, rawConfig.pages_build_output_dir @@ -328,7 +332,7 @@ function applyPythonConfig( config: Config, args: NormalizeAndValidateConfigArgs ) { - const mainModule = "script" in args ? args.script : config.main; + const mainModule = args.script ?? config.main; if (typeof mainModule === "string" && mainModule.endsWith(".py")) { // Workers with a python entrypoint should have bundling turned off, since all of Wrangler's bundling is JS/TS specific config.no_bundle = true; diff --git a/packages/wrangler/src/core/register-yargs-command.ts b/packages/wrangler/src/core/register-yargs-command.ts index 86c0fb9f10fc..8fca4a91b88b 100644 --- a/packages/wrangler/src/core/register-yargs-command.ts +++ b/packages/wrangler/src/core/register-yargs-command.ts @@ -104,6 +104,8 @@ function createHandler(def: CommandDefinition) { def.behaviour?.provideConfig ?? true ? readConfig(args, { hideWarnings: !(def.behaviour?.printConfigWarnings ?? true), + useRedirectIfAvailable: + def.behaviour?.useConfigRedirectIfAvailable, }) : defaultWranglerConfig, errors: { UserError, FatalError }, diff --git a/packages/wrangler/src/core/types.ts b/packages/wrangler/src/core/types.ts index 1edb9784b733..ff0fc6fce83f 100644 --- a/packages/wrangler/src/core/types.ts +++ b/packages/wrangler/src/core/types.ts @@ -116,6 +116,11 @@ export type CommandDefinition< overrideExperimentalFlags?: ( args: HandlerArgs ) => ExperimentalFlags; + + /** + * If true, then look for a redirect file at `.wrangler/deploy/config.json` and use that to find the Wrangler configuration file. + */ + useConfigRedirectIfAvailable?: boolean; }; /** diff --git a/packages/wrangler/src/d1/execute.ts b/packages/wrangler/src/d1/execute.ts index 60a673843ef3..fdc18ee1d006 100644 --- a/packages/wrangler/src/d1/execute.ts +++ b/packages/wrangler/src/d1/execute.ts @@ -19,7 +19,7 @@ import { requireAuth } from "../user"; import * as options from "./options"; import splitSqlQuery from "./splitter"; import { getDatabaseByNameOrBinding, getDatabaseInfoFromConfig } from "./utils"; -import type { Config, ConfigFields, DevConfig, Environment } from "../config"; +import type { Config } from "../config"; import type { CommonYargsArgv, StrictYargsOptionsToInterface, @@ -197,7 +197,7 @@ export async function executeSql({ }: { local: boolean | undefined; remote: boolean | undefined; - config: ConfigFields & Environment; + config: Config; name: string; shouldPrompt: boolean | undefined; persistTo: string | undefined; @@ -276,7 +276,7 @@ async function executeLocally({ } const id = localDB.previewDatabaseUuid ?? localDB.uuid; - const persistencePath = getLocalPersistencePath(persistTo, config.configPath); + const persistencePath = getLocalPersistencePath(persistTo, config); const d1Persist = path.join(persistencePath, "v3", "d1"); logger.log( diff --git a/packages/wrangler/src/d1/export.ts b/packages/wrangler/src/d1/export.ts index d83be9b30bd6..536285c94146 100644 --- a/packages/wrangler/src/d1/export.ts +++ b/packages/wrangler/src/d1/export.ts @@ -72,14 +72,10 @@ export function Options(yargs: CommonYargsArgv) { type HandlerOptions = StrictYargsOptionsToInterface; export const Handler = async (args: HandlerOptions): Promise => { - const { local, remote, name, output, schema, data, table } = args; + const { remote, name, output, schema, data, table } = args; await printWranglerBanner(); const config = readConfig(args); - if (!local && !remote) { - throw new UserError(`You must specify either --local or --remote`); - } - if (!schema && !data) { throw new UserError(`You cannot specify both --no-schema and --no-data`); } @@ -91,10 +87,10 @@ export const Handler = async (args: HandlerOptions): Promise => { : [table] : []; - if (local) { - return await exportLocal(config, name, output, tables, !schema, !data); - } else { + if (remote) { return await exportRemotely(config, name, output, tables, !schema, !data); + } else { + return await exportLocal(config, name, output, tables, !schema, !data); } }; @@ -117,7 +113,7 @@ async function exportLocal( // TODO: should we allow customising persistence path? // Should it be --persist-to for consistency (even though this isn't persisting anything)? - const persistencePath = getLocalPersistencePath(undefined, config.configPath); + const persistencePath = getLocalPersistencePath(undefined, config); const d1Persist = path.join(persistencePath, "v3", "d1"); logger.log( diff --git a/packages/wrangler/src/d1/migrations/helpers.ts b/packages/wrangler/src/d1/migrations/helpers.ts index 8b4399fec691..bfaca5f78355 100644 --- a/packages/wrangler/src/d1/migrations/helpers.ts +++ b/packages/wrangler/src/d1/migrations/helpers.ts @@ -7,7 +7,7 @@ import { isNonInteractiveOrCI } from "../../is-interactive"; import { logger } from "../../logger"; import { DEFAULT_MIGRATION_PATH } from "../constants"; import { executeSql } from "../execute"; -import type { ConfigFields, DevConfig, Environment } from "../../config"; +import type { Config } from "../../config"; import type { QueryResult } from "../execute"; import type { Migration } from "../types"; @@ -57,7 +57,7 @@ export async function getUnappliedMigrations({ migrationsPath: string; local: boolean | undefined; remote: boolean | undefined; - config: ConfigFields & Environment; + config: Config; name: string; persistTo: string | undefined; preview: boolean | undefined; @@ -92,7 +92,7 @@ type ListAppliedMigrationsProps = { migrationsTableName: string; local: boolean | undefined; remote: boolean | undefined; - config: ConfigFields & Environment; + config: Config; name: string; persistTo: string | undefined; preview: boolean | undefined; @@ -170,7 +170,7 @@ export const initMigrationsTable = async ({ migrationsTableName: string; local: boolean | undefined; remote: boolean | undefined; - config: ConfigFields & Environment; + config: Config; name: string; persistTo: string | undefined; preview: boolean | undefined; diff --git a/packages/wrangler/src/deploy/deploy.ts b/packages/wrangler/src/deploy/deploy.ts index a7c343b4c1fd..8bb9bb9a9fb4 100644 --- a/packages/wrangler/src/deploy/deploy.ts +++ b/packages/wrangler/src/deploy/deploy.ts @@ -107,7 +107,6 @@ type Props = { oldAssetTtl: number | undefined; projectRoot: string | undefined; dispatchNamespace: string | undefined; - experimentalVersions: boolean | undefined; experimentalAutoCreate: boolean; }; @@ -781,7 +780,6 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m } // We can use the new versions/deployments APIs if we: - // * have --x-versions enabled (default, but can be disabled with --no-x-versions) // * are uploading a worker that already exists // * aren't a dispatch namespace deploy // * aren't a service env deploy @@ -789,7 +787,6 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m // * we don't have DO migrations // * we aren't an fpw const canUseNewVersionsDeploymentsApi = - props.experimentalVersions && workerExists && props.dispatchNamespace === undefined && prod && diff --git a/packages/wrangler/src/deploy/index.ts b/packages/wrangler/src/deploy/index.ts index fa8a391815d8..7b4c4fe92689 100644 --- a/packages/wrangler/src/deploy/index.ts +++ b/packages/wrangler/src/deploy/index.ts @@ -2,7 +2,6 @@ import assert from "node:assert"; import path from "node:path"; import { getAssetsOptions, validateAssetsArgsAndConfig } from "../assets"; import { configFileName, readConfig } from "../config"; -import { resolveWranglerConfigPath } from "../config/config-helpers"; import { getEntry } from "../deployment-bundle/entry"; import { UserError } from "../errors"; import { run } from "../experimental-flags"; @@ -271,15 +270,17 @@ async function deployWorker(args: DeployArgs) { ); } - const configPath = resolveWranglerConfigPath(args); - const projectRoot = configPath && path.dirname(configPath); - const config = readConfig(args); + const config = readConfig(args, { useRedirectIfAvailable: true }); if (config.pages_build_output_dir) { throw new UserError( "It looks like you've run a Workers-specific command in a Pages project.\n" + "For Pages, please run `wrangler pages deploy` instead." ); } + // We use the `userConfigPath` to compute the root of a project, + // rather than a redirected (potentially generated) `configPath`. + const projectRoot = + config.userConfigPath && path.dirname(config.userConfigPath); const entry = await getEntry(args, config, "deploy"); @@ -346,11 +347,7 @@ async function deployWorker(args: DeployArgs) { if (!args.dryRun) { assert(accountId, "Missing account ID"); - await verifyWorkerMatchesCITag( - accountId, - name, - path.relative(entry.projectRoot, config.configPath ?? "wrangler.toml") - ); + await verifyWorkerMatchesCITag(accountId, name, config.configPath); } const { sourceMapSize, versionId, workerTag, targets } = await deploy({ config, @@ -386,7 +383,6 @@ async function deployWorker(args: DeployArgs) { oldAssetTtl: args.oldAssetTtl, projectRoot, dispatchNamespace: args.dispatchNamespace, - experimentalVersions: args.experimentalVersions, experimentalAutoCreate: args.experimentalAutoCreate, }); diff --git a/packages/wrangler/src/deployment-bundle/bundle.ts b/packages/wrangler/src/deployment-bundle/bundle.ts index c5f920d03d0a..bb35d4519de8 100644 --- a/packages/wrangler/src/deployment-bundle/bundle.ts +++ b/packages/wrangler/src/deployment-bundle/bundle.ts @@ -5,6 +5,7 @@ import * as esbuild from "esbuild"; import { getBuildConditionsFromEnv, getBuildPlatformFromEnv, + getUnenvResolvePathsFromEnv, } from "../environment-variables/misc-variables"; import { UserError } from "../errors"; import { getFlag } from "../experimental-flags"; @@ -390,6 +391,8 @@ export async function bundleWorker( }, }; + const unenvResolvePaths = getUnenvResolvePathsFromEnv()?.split(","); + const buildOptions: esbuild.BuildOptions & { metafile: true } = { // Don't use entryFile here as the file may have been changed when applying the middleware entryPoints: [entry.file], @@ -435,7 +438,10 @@ export async function bundleWorker( plugins: [ aliasPlugin, moduleCollector.plugin, - ...getNodeJSCompatPlugins(nodejsCompatMode ?? null), + ...getNodeJSCompatPlugins({ + mode: nodejsCompatMode ?? null, + unenvResolvePaths, + }), cloudflareInternalPlugin, buildResultPlugin, ...(plugins || []), diff --git a/packages/wrangler/src/deployment-bundle/entry.ts b/packages/wrangler/src/deployment-bundle/entry.ts index 23ff0dce5661..6b3d0ebd072a 100644 --- a/packages/wrangler/src/deployment-bundle/entry.ts +++ b/packages/wrangler/src/deployment-bundle/entry.ts @@ -60,9 +60,9 @@ export async function getEntry( if (args.script) { paths = resolveEntryWithScript(args.script); } else if (config.main !== undefined) { - paths = resolveEntryWithMain(config.main, config.configPath); + paths = resolveEntryWithMain(config.main, config); } else if (entryPoint) { - paths = resolveEntryWithEntryPoint(entryPoint, config.configPath); + paths = resolveEntryWithEntryPoint(entryPoint, config); } else if ( args.legacyAssets || config.legacy_assets || diff --git a/packages/wrangler/src/deployment-bundle/esbuild-plugins/hybrid-nodejs-compat.ts b/packages/wrangler/src/deployment-bundle/esbuild-plugins/hybrid-nodejs-compat.ts index e5a6ff9960f8..052039086953 100644 --- a/packages/wrangler/src/deployment-bundle/esbuild-plugins/hybrid-nodejs-compat.ts +++ b/packages/wrangler/src/deployment-bundle/esbuild-plugins/hybrid-nodejs-compat.ts @@ -1,25 +1,45 @@ import { builtinModules } from "node:module"; import nodePath from "node:path"; import dedent from "ts-dedent"; -import { cloudflare, env, nodeless } from "unenv"; +import { cloudflare, defineEnv } from "unenv"; import { getBasePath } from "../../paths"; import type { Plugin, PluginBuild } from "esbuild"; const REQUIRED_NODE_BUILT_IN_NAMESPACE = "node-built-in-modules"; const REQUIRED_UNENV_ALIAS_NAMESPACE = "required-unenv-alias"; -export const nodejsHybridPlugin: () => Plugin = () => { - const { alias, inject, external } = env(nodeless, cloudflare); +/** + * ESBuild plugin to apply the unenv preset. + * + * @param unenvResolvePaths Root paths used to resolve absolute paths. + * @returns ESBuild plugin + */ +export function nodejsHybridPlugin(unenvResolvePaths?: string[]): Plugin { + // Get the resolved environment. + const { env } = defineEnv({ + nodeCompat: true, + presets: [cloudflare], + resolve: { + paths: unenvResolvePaths, + }, + }); + const { alias, inject, external } = env; + // Get the unresolved alias. + const unresolvedAlias = defineEnv({ + nodeCompat: true, + presets: [cloudflare], + resolve: false, + }).env.alias; return { name: "hybrid-nodejs_compat", setup(build) { errorOnServiceWorkerFormat(build); handleRequireCallsToNodeJSBuiltins(build); - handleUnenvAliasedPackages(build, alias, external); + handleUnenvAliasedPackages(build, unresolvedAlias, alias, external); handleNodeJSGlobals(build, inject); }, }; -}; +} const NODEJS_MODULES_RE = new RegExp(`^(node:)?(${builtinModules.join("|")})$`); @@ -87,45 +107,41 @@ function handleRequireCallsToNodeJSBuiltins(build: PluginBuild) { ); } +/** + * Handles aliased NPM packages. + * + * @param build ESBuild PluginBuild. + * @param unresolvedAlias Unresolved aliases from the presets. + * @param alias Aliases resolved to absolute paths. + * @param external external modules. + */ function handleUnenvAliasedPackages( build: PluginBuild, + unresolvedAlias: Record, alias: Record, external: string[] ) { - // esbuild expects alias paths to be absolute - const aliasAbsolute: Record = {}; - for (const [module, unresolvedAlias] of Object.entries(alias)) { - try { - aliasAbsolute[module] = require - .resolve(unresolvedAlias) - .replace(/\.cjs$/, ".mjs"); - } catch (e) { - // this is an alias for package that is not installed in the current app => ignore - } - } - - const UNENV_ALIAS_RE = new RegExp( - `^(${Object.keys(aliasAbsolute).join("|")})$` - ); + const UNENV_ALIAS_RE = new RegExp(`^(${Object.keys(alias).join("|")})$`); build.onResolve({ filter: UNENV_ALIAS_RE }, (args) => { - const unresolvedAlias = alias[args.path]; + const unresolved = unresolvedAlias[args.path]; // Convert `require()` calls for NPM packages to a virtual ES Module that can be imported avoiding the require calls. // Note: Does not apply to Node.js packages that are handled in `handleRequireCallsToNodeJSBuiltins` if ( args.kind === "require-call" && - (unresolvedAlias.startsWith("unenv/runtime/npm/") || - unresolvedAlias.startsWith("unenv/runtime/mock/")) + (unresolved.startsWith("unenv/runtime/npm/") || + unresolved.startsWith("unenv/runtime/mock/")) ) { return { path: args.path, namespace: REQUIRED_UNENV_ALIAS_NAMESPACE, }; } + // Resolve the alias to its absolute path and potentially mark it as external return { - path: aliasAbsolute[args.path], - external: external.includes(unresolvedAlias), + path: alias[args.path], + external: external.includes(unresolved), }; }); diff --git a/packages/wrangler/src/deployment-bundle/esbuild-plugins/nodejs-plugins.ts b/packages/wrangler/src/deployment-bundle/esbuild-plugins/nodejs-plugins.ts index b59d8fa190d7..30448b347f9f 100644 --- a/packages/wrangler/src/deployment-bundle/esbuild-plugins/nodejs-plugins.ts +++ b/packages/wrangler/src/deployment-bundle/esbuild-plugins/nodejs-plugins.ts @@ -10,7 +10,13 @@ import type { NodeJSCompatMode } from "miniflare"; /** * Returns the list of ESBuild plugins to use for a given compat mode. */ -export function getNodeJSCompatPlugins(mode: NodeJSCompatMode): Plugin[] { +export function getNodeJSCompatPlugins({ + mode, + unenvResolvePaths, +}: { + mode: NodeJSCompatMode; + unenvResolvePaths?: string[]; +}): Plugin[] { switch (mode) { case "als": return [asyncLocalStoragePlugin, nodejsCompatPlugin(mode)]; @@ -24,7 +30,7 @@ export function getNodeJSCompatPlugins(mode: NodeJSCompatMode): Plugin[] { case "v1": return [nodejsCompatPlugin(mode)]; case "v2": - return [nodejsHybridPlugin()]; + return [nodejsHybridPlugin(unenvResolvePaths)]; case null: return [nodejsCompatPlugin(mode)]; } diff --git a/packages/wrangler/src/deployment-bundle/resolve-entry.ts b/packages/wrangler/src/deployment-bundle/resolve-entry.ts index 2da92e262709..cfb5ef54c55d 100644 --- a/packages/wrangler/src/deployment-bundle/resolve-entry.ts +++ b/packages/wrangler/src/deployment-bundle/resolve-entry.ts @@ -1,5 +1,6 @@ import path from "path"; import { getBasePath } from "../paths"; +import type { Config } from "../config"; export function resolveEntryWithScript(script: string): { absolutePath: string; @@ -12,31 +13,36 @@ export function resolveEntryWithScript(script: string): { export function resolveEntryWithMain( main: string, - configPath?: string + config: Config ): { absolutePath: string; relativePath: string; projectRoot: string; } { - const projectRoot = path.resolve(path.dirname(configPath ?? ".")); - const file = path.resolve(projectRoot, main); - const relativePath = path.relative(projectRoot, file) || "."; - return { absolutePath: file, relativePath, projectRoot }; + // The project root is where the user defined the Worker via the Wrangler configuration (or the current working directory). + // The entry root is the base path used in bundling the source code for the Worker, + // which may be different from the project root if the Wrangler was redirected to use a different Wrangler configuration file. + const projectRoot = path.resolve(path.dirname(config.userConfigPath ?? ".")); + const entryRoot = path.resolve(path.dirname(config.configPath ?? ".")); + const absolutePath = path.resolve(entryRoot, main); + const relativePath = path.relative(entryRoot, absolutePath) || "."; + return { absolutePath, relativePath, projectRoot }; } export function resolveEntryWithEntryPoint( entryPoint: string, - configPath?: string + config: Config ): { absolutePath: string; relativePath: string; projectRoot: string; } { - const projectRoot = path.resolve(path.dirname(configPath ?? ".")); + const projectRoot = path.resolve(path.dirname(config.userConfigPath ?? ".")); + const entryRoot = path.resolve(path.dirname(config.configPath ?? ".")); const file = path.extname(entryPoint) ? path.resolve(entryPoint) : path.resolve(entryPoint, "index.js"); - const relativePath = path.relative(projectRoot, file) || "."; + const relativePath = path.relative(entryRoot, file) || "."; return { absolutePath: file, relativePath, projectRoot }; } diff --git a/packages/wrangler/src/deprecated/index.ts b/packages/wrangler/src/deprecated/index.ts index a2baf0f28281..ae0991669dfe 100644 --- a/packages/wrangler/src/deprecated/index.ts +++ b/packages/wrangler/src/deprecated/index.ts @@ -36,7 +36,6 @@ export async function buildHandler(buildArgs: BuildArgs) { "--outdir=dist", ...(buildArgs.env ? ["--env", buildArgs.env] : []), ...(buildArgs.config ? ["--config", buildArgs.config] : []), - ...(buildArgs.experimentalVersions ? ["--experimental-versions"] : []), ]).parse(); } diff --git a/packages/wrangler/src/dev.ts b/packages/wrangler/src/dev.ts index f2b029641f60..833cc452b10f 100644 --- a/packages/wrangler/src/dev.ts +++ b/packages/wrangler/src/dev.ts @@ -12,7 +12,6 @@ import { } from "./api/startDevWorker/utils"; import { getAssetsOptions } from "./assets"; import { configFileName, formatConfigSnippet } from "./config"; -import { resolveWranglerConfigPath } from "./config/config-helpers"; import { createCommand } from "./core/create-command"; import { validateRoutes } from "./deploy/deploy"; import { validateNodeCompatMode } from "./deployment-bundle/node-compat"; @@ -698,8 +697,6 @@ export async function startDev(args: StartDevOptions) { ); } - const configPath = resolveWranglerConfigPath(args); - const authHook: AsyncHook]> = async ( config ) => { @@ -722,8 +719,8 @@ export async function startDev(args: StartDevOptions) { }; }; - if (Array.isArray(configPath)) { - const runtime = new MultiworkerRuntimeController(configPath.length); + if (Array.isArray(args.config)) { + const runtime = new MultiworkerRuntimeController(args.config.length); const primaryDevEnv = new DevEnv({ runtimes: [runtime] }); @@ -733,7 +730,7 @@ export async function startDev(args: StartDevOptions) { // Set up the primary DevEnv (the one that the ProxyController will connect to) devEnv = [ - await setupDevEnv(primaryDevEnv, configPath[0], authHook, { + await setupDevEnv(primaryDevEnv, args.config[0], authHook, { ...args, disableDevRegistry: true, multiworkerPrimary: true, @@ -743,7 +740,7 @@ export async function startDev(args: StartDevOptions) { // Set up all auxiliary DevEnvs devEnv.push( ...(await Promise.all( - (configPath as string[]).slice(1).map((c) => { + (args.config as string[]).slice(1).map((c) => { return setupDevEnv( new DevEnv({ runtimes: [runtime], @@ -813,7 +810,7 @@ export async function startDev(args: StartDevOptions) { unregisterHotKeys = registerDevHotKeys(devEnv, args); } - await setupDevEnv(devEnv, configPath, authHook, args); + await setupDevEnv(devEnv, args.config, authHook, args); } return { diff --git a/packages/wrangler/src/dev/dev-vars.ts b/packages/wrangler/src/dev/dev-vars.ts index ddd6922f96ba..9d889dfd2598 100644 --- a/packages/wrangler/src/dev/dev-vars.ts +++ b/packages/wrangler/src/dev/dev-vars.ts @@ -12,7 +12,7 @@ import type { Config } from "../config"; * * It is useful during development, to provide these types of variable locally. * When running `wrangler dev` we will look for a file called `.dev.vars`, situated - * next to the Wrangler configuration file (or in the current working directory if there is no + * next to the User's Wrangler configuration file (or in the current working directory if there is no * Wrangler configuration). If the `--env ` option is set, we'll first look for * `.dev.vars.`. * @@ -20,11 +20,13 @@ import type { Config } from "../config"; * bindings provided in the Wrangler configuration file. */ export function getVarsForDev( - config: Pick, + config: Pick, env: string | undefined, silent = false ): Config["vars"] { - const configDir = path.resolve(path.dirname(config.configPath ?? ".")); + const configDir = path.resolve( + config.userConfigPath ? path.dirname(config.userConfigPath) : "." + ); const devVarsPath = path.resolve(configDir, ".dev.vars"); const loaded = loadDotEnv(devVarsPath, env); if (loaded !== undefined) { diff --git a/packages/wrangler/src/dev/get-local-persistence-path.ts b/packages/wrangler/src/dev/get-local-persistence-path.ts index 4263d344703e..591d72c2c899 100644 --- a/packages/wrangler/src/dev/get-local-persistence-path.ts +++ b/packages/wrangler/src/dev/get-local-persistence-path.ts @@ -1,18 +1,15 @@ import path from "node:path"; +import type { Config } from "../config"; +/** + * Get a path to where we shall store persisted state in local dev. + * + * We use the `userConfigPath` rather than the potentially redirected `configPath` + * to decide the path to this directory. + */ export function getLocalPersistencePath( persistTo: string | undefined, - configPath: string | undefined -): string; - -export function getLocalPersistencePath( - persistTo: string | undefined, - configPath: string | undefined -): string | null; - -export function getLocalPersistencePath( - persistTo: string | undefined, - configPath: string | undefined + { userConfigPath }: Config ) { return persistTo ? // If path specified, always treat it as relative to cwd() @@ -20,7 +17,7 @@ export function getLocalPersistencePath( : // Otherwise, treat it as relative to the Wrangler configuration file, // if one can be found, otherwise cwd() path.resolve( - configPath ? path.dirname(configPath) : process.cwd(), + userConfigPath ? path.dirname(userConfigPath) : process.cwd(), ".wrangler/state" ); } diff --git a/packages/wrangler/src/environment-variables/factory.ts b/packages/wrangler/src/environment-variables/factory.ts index 91d481ed3a6f..8a2124386ff7 100644 --- a/packages/wrangler/src/environment-variables/factory.ts +++ b/packages/wrangler/src/environment-variables/factory.ts @@ -25,6 +25,7 @@ type VariableNames = | "WRANGLER_CI_MATCH_TAG" | "WRANGLER_BUILD_CONDITIONS" | "WRANGLER_BUILD_PLATFORM" + | "WRANGLER_UNENV_RESOLVE_PATHS" | "WRANGLER_REGISTRY_PATH"; type DeprecatedNames = diff --git a/packages/wrangler/src/environment-variables/misc-variables.ts b/packages/wrangler/src/environment-variables/misc-variables.ts index a19426a316c0..3af8754f4664 100644 --- a/packages/wrangler/src/environment-variables/misc-variables.ts +++ b/packages/wrangler/src/environment-variables/misc-variables.ts @@ -126,6 +126,19 @@ export const getBuildPlatformFromEnv = getEnvironmentVariableFactory({ variableName: "WRANGLER_BUILD_PLATFORM", }); +/** + * `WRANGLER_UNENV_RESOLVE_PATHS` lists the paths used to resolve unenv. + * + * Note: multiple comma separated paths can be specified. + * + * By default wrangler uses the unenv preset version installed from the package.json. + * + * Setting root paths allow to use a different version of the preset. + */ +export const getUnenvResolvePathsFromEnv = getEnvironmentVariableFactory({ + variableName: "WRANGLER_UNENV_RESOLVE_PATHS", +}); + /** * `WRANGLER_REGISTRY_PATH` specifies the file based dev registry folder */ diff --git a/packages/wrangler/src/generate/index.ts b/packages/wrangler/src/generate/index.ts index b2d9331f6261..849930aaf528 100644 --- a/packages/wrangler/src/generate/index.ts +++ b/packages/wrangler/src/generate/index.ts @@ -57,7 +57,6 @@ export async function generateHandler(args: GenerateArgs) { type: undefined, _: args._, $0: args.$0, - experimentalVersions: args.experimentalVersions, experimentalProvision: args.experimentalProvision, }); } diff --git a/packages/wrangler/src/index.ts b/packages/wrangler/src/index.ts index 0a8f6e8d894c..fbdf287b787d 100644 --- a/packages/wrangler/src/index.ts +++ b/packages/wrangler/src/index.ts @@ -25,12 +25,6 @@ import { isBuildFailure, isBuildFailureFromCause, } from "./deployment-bundle/build-failures"; -import { - commonDeploymentCMDSetup, - deployments, - rollbackDeployment, - viewDeployment, -} from "./deployments"; import { buildHandler, buildOptions, @@ -273,14 +267,6 @@ export function getLegacyScriptName( } export function createCLIParser(argv: string[]) { - const experimentalGradualRollouts = - // original flag -- using internal product name (Gradual Rollouts) -- kept for temp back-compat - !argv.includes("--no-experimental-gradual-rollouts") && - // new flag -- using external product name (Versions) - !argv.includes("--no-experimental-versions") && - // new flag -- shorthand - !argv.includes("--no-x-versions"); - // Type check result against CommonYargsOptions to make sure we've included // all common options const wrangler: CommonYargsArgv = makeCLI(argv) @@ -340,13 +326,6 @@ export function createCLIParser(argv: string[]) { } return true; }) - .option("experimental-versions", { - describe: `Experimental: support Worker Versions`, - type: "boolean", - default: true, - hidden: true, - alias: ["x-versions", "experimental-gradual-rollouts"], - }) .check((args) => { // Grab locally specified env params from `.env` file const loaded = loadDotEnv(".env", args.env); @@ -479,170 +458,79 @@ export function createCLIParser(argv: string[]) { deployHandler ); - if (experimentalGradualRollouts) { - registry.define([ - { command: "wrangler deployments", definition: deploymentsNamespace }, - { - command: "wrangler deployments list", - definition: deploymentsListCommand, - }, - { - command: "wrangler deployments status", - definition: deploymentsStatusCommand, - }, - { - command: "wrangler deployments view", - definition: deploymentsViewCommand, - }, - ]); - registry.registerNamespace("deployments"); - } else { - wrangler.command( - "deployments", - "🚢 List and view the current and past deployments for your Worker", - (yargs) => - yargs - .option("name", { - describe: "The name of your Worker", - type: "string", - }) - .command( - "list", - "Displays the 10 most recent deployments for a Worker", - async (listYargs) => listYargs, - async (listYargs) => { - const { accountId, scriptName, config } = - await commonDeploymentCMDSetup(listYargs); - await deployments(accountId, scriptName, config); - } - ) - .command( - "view [deployment-id]", - "View a deployment", - async (viewYargs) => - viewYargs.positional("deployment-id", { - describe: "The ID of the deployment you want to inspect", - type: "string", - demandOption: false, - }), - async (viewYargs) => { - const { accountId, scriptName, config } = - await commonDeploymentCMDSetup(viewYargs); - - await viewDeployment( - accountId, - scriptName, - config, - viewYargs.deploymentId - ); - } - ) - .command(subHelp) - ); - } + registry.define([ + { command: "wrangler deployments", definition: deploymentsNamespace }, + { + command: "wrangler deployments list", + definition: deploymentsListCommand, + }, + { + command: "wrangler deployments status", + definition: deploymentsStatusCommand, + }, + { + command: "wrangler deployments view", + definition: deploymentsViewCommand, + }, + ]); + registry.registerNamespace("deployments"); - // rollback - const rollbackDescription = "🔙 Rollback a deployment for a Worker"; - - if (experimentalGradualRollouts) { - registry.define([ - { command: "wrangler rollback", definition: versionsRollbackCommand }, - ]); - registry.registerNamespace("rollback"); - } else { - wrangler.command( - "rollback [deployment-id]", - rollbackDescription, - (rollbackYargs) => - rollbackYargs - .positional("deployment-id", { - describe: "The ID of the deployment to rollback to", - type: "string", - demandOption: false, - }) - .option("message", { - alias: "m", - describe: - "Skip confirmation and message prompts, uses provided argument as message", - type: "string", - default: undefined, - }) - .option("name", { - describe: "The name of your Worker", - type: "string", - }), - async (rollbackYargs) => { - const { accountId, scriptName, config } = - await commonDeploymentCMDSetup(rollbackYargs); - - await rollbackDeployment( - accountId, - scriptName, - config, - rollbackYargs.deploymentId, - rollbackYargs.message - ); - } - ); - } + registry.define([ + { command: "wrangler rollback", definition: versionsRollbackCommand }, + ]); + registry.registerNamespace("rollback"); - // versions - if (experimentalGradualRollouts) { - registry.define([ - { - command: "wrangler versions", - definition: versionsNamespace, - }, - { - command: "wrangler versions view", - definition: versionsViewCommand, - }, - { - command: "wrangler versions list", - definition: versionsListCommand, - }, - { - command: "wrangler versions upload", - definition: versionsUploadCommand, - }, - { - command: "wrangler versions deploy", - definition: versionsDeployCommand, - }, - { - command: "wrangler versions secret", - definition: versionsSecretNamespace, - }, - { - command: "wrangler versions secret put", - definition: versionsSecretPutCommand, - }, - { - command: "wrangler versions secret bulk", - definition: versionsSecretBulkCommand, - }, - { - command: "wrangler versions secret delete", - definition: versionsSecretDeleteCommand, - }, - { - command: "wrangler versions secret list", - definition: versionsSecretsListCommand, - }, - ]); - registry.registerNamespace("versions"); - } + registry.define([ + { + command: "wrangler versions", + definition: versionsNamespace, + }, + { + command: "wrangler versions view", + definition: versionsViewCommand, + }, + { + command: "wrangler versions list", + definition: versionsListCommand, + }, + { + command: "wrangler versions upload", + definition: versionsUploadCommand, + }, + { + command: "wrangler versions deploy", + definition: versionsDeployCommand, + }, + { + command: "wrangler versions secret", + definition: versionsSecretNamespace, + }, + { + command: "wrangler versions secret put", + definition: versionsSecretPutCommand, + }, + { + command: "wrangler versions secret bulk", + definition: versionsSecretBulkCommand, + }, + { + command: "wrangler versions secret delete", + definition: versionsSecretDeleteCommand, + }, + { + command: "wrangler versions secret list", + definition: versionsSecretsListCommand, + }, + ]); + registry.registerNamespace("versions"); - // triggers - if (experimentalGradualRollouts) { - wrangler.command( - "triggers", - "🎯 Updates the triggers of your current deployment", - (yargs) => { - return registerTriggersSubcommands(yargs.command(subHelp)); - } - ); - } + wrangler.command( + "triggers", + "🎯 Updates the triggers of your current deployment", + (yargs) => { + return registerTriggersSubcommands(yargs.command(subHelp)); + } + ); // delete wrangler.command( diff --git a/packages/wrangler/src/kv/helpers.ts b/packages/wrangler/src/kv/helpers.ts index 61d271dd4374..10c708989512 100644 --- a/packages/wrangler/src/kv/helpers.ts +++ b/packages/wrangler/src/kv/helpers.ts @@ -436,11 +436,11 @@ export function getKVNamespaceId( // https://devblogs.microsoft.com/typescript/announcing-typescript-5-2/#using-declarations-and-explicit-resource-management export async function usingLocalNamespace( persistTo: string | undefined, - configPath: string | undefined, + config: Config, namespaceId: string, closure: (namespace: ReplaceWorkersTypes) => Promise ): Promise { - const persist = getLocalPersistencePath(persistTo, configPath); + const persist = getLocalPersistencePath(persistTo, config); const persistOptions = buildPersistOptions(persist); const mf = new Miniflare({ script: diff --git a/packages/wrangler/src/kv/index.ts b/packages/wrangler/src/kv/index.ts index 12c0b779c3f6..28889db2f7f4 100644 --- a/packages/wrangler/src/kv/index.ts +++ b/packages/wrangler/src/kv/index.ts @@ -350,7 +350,7 @@ export const kvKeyPutCommand = createCommand({ if (args.local) { await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, (namespace) => namespace.put(key, new Blob([value]).stream(), { @@ -434,7 +434,7 @@ export const kvKeyListCommand = createCommand({ if (args.local) { const listResult = await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, (namespace) => namespace.list({ prefix }) ); @@ -513,7 +513,7 @@ export const kvKeyGetCommand = createCommand({ if (args.local) { const val = await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, async (namespace) => { const stream = await namespace.get(key, "stream"); @@ -598,7 +598,7 @@ export const kvKeyDeleteCommand = createCommand({ if (args.local) { await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, (namespace) => namespace.delete(key) ); @@ -730,7 +730,7 @@ export const kvBulkPutCommand = createCommand({ if (args.local) { await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, async (namespace) => { for (const value of content) { @@ -855,7 +855,7 @@ export const kvBulkDeleteCommand = createCommand({ if (args.local) { await usingLocalNamespace( args.persistTo, - config.configPath, + config, namespaceId, async (namespace) => { for (const key of keysToDelete) { diff --git a/packages/wrangler/src/pages/build-env.ts b/packages/wrangler/src/pages/build-env.ts index 4076f5905675..22bdaa0fa4ef 100644 --- a/packages/wrangler/src/pages/build-env.ts +++ b/packages/wrangler/src/pages/build-env.ts @@ -42,7 +42,9 @@ export const Handler = async (args: PagesBuildEnvArgs) => { "Checking for configuration in a Wrangler configuration file (BETA)\n" ); - const configPath = findWranglerConfig(args.projectDir); + const { configPath } = findWranglerConfig(args.projectDir, { + useRedirectIfAvailable: true, + }); if (!configPath || !existsSync(configPath)) { logger.debug("No Wrangler configuration file found. Exiting."); process.exitCode = EXIT_CODE_NO_CONFIG_FOUND; diff --git a/packages/wrangler/src/pages/build.ts b/packages/wrangler/src/pages/build.ts index 5a5c97ee04f7..0bc011c30e40 100644 --- a/packages/wrangler/src/pages/build.ts +++ b/packages/wrangler/src/pages/build.ts @@ -354,7 +354,9 @@ async function maybeReadPagesConfig( if (!args.projectDirectory || !args.buildMetadataPath) { return; } - const configPath = findWranglerConfig(args.projectDirectory); + const { configPath } = findWranglerConfig(args.projectDirectory, { + useRedirectIfAvailable: true, + }); // Fail early if the config file doesn't exist if (!configPath || !existsSync(configPath)) { return undefined; diff --git a/packages/wrangler/src/pages/deploy.ts b/packages/wrangler/src/pages/deploy.ts index b0b325e91aff..985ef008d0b2 100644 --- a/packages/wrangler/src/pages/deploy.ts +++ b/packages/wrangler/src/pages/deploy.ts @@ -115,7 +115,9 @@ export const Handler = async (args: PagesDeployArgs) => { } let config: Config | undefined; - const configPath = findWranglerConfig(process.cwd()); + const { configPath } = findWranglerConfig(process.cwd(), { + useRedirectIfAvailable: true, + }); try { /* diff --git a/packages/wrangler/src/pages/dev.ts b/packages/wrangler/src/pages/dev.ts index bf3e0a258567..9967059f509d 100644 --- a/packages/wrangler/src/pages/dev.ts +++ b/packages/wrangler/src/pages/dev.ts @@ -303,7 +303,10 @@ export const Handler = async (args: PagesDevArguments) => { // for `dev` we always use the top-level config, which means we need // to read the config file with `env` set to `undefined` - const config = readConfig({ ...args, env: undefined }); + const config = readConfig( + { ...args, env: undefined }, + { useRedirectIfAvailable: true } + ); const resolvedDirectory = args.directory ?? config.pages_build_output_dir; const [_pages, _dev, ...remaining] = args._; const command = remaining; diff --git a/packages/wrangler/src/pages/secret/index.ts b/packages/wrangler/src/pages/secret/index.ts index 11b0e5b33856..fe12608945f6 100644 --- a/packages/wrangler/src/pages/secret/index.ts +++ b/packages/wrangler/src/pages/secret/index.ts @@ -42,7 +42,7 @@ async function pagesProject( ); } let config: Config | undefined; - const configPath = findWranglerConfig(process.cwd()); + const { configPath } = findWranglerConfig(process.cwd()); try { /* diff --git a/packages/wrangler/src/pipelines/client.ts b/packages/wrangler/src/pipelines/client.ts index e8fbdfbad89c..06a7f7e85f3f 100644 --- a/packages/wrangler/src/pipelines/client.ts +++ b/packages/wrangler/src/pipelines/client.ts @@ -46,7 +46,7 @@ export type PipelineUserConfig = { }; batch: { max_duration_s?: number; - max_mb?: number; + max_bytes?: number; max_rows?: number; }; path: { diff --git a/packages/wrangler/src/pipelines/index.ts b/packages/wrangler/src/pipelines/index.ts index f4229b6c3f0c..0a1a2c211b49 100644 --- a/packages/wrangler/src/pipelines/index.ts +++ b/packages/wrangler/src/pipelines/index.ts @@ -116,19 +116,19 @@ function addCreateAndUpdateOptions(yargs: Argv) { }) .option("batch-max-mb", { describe: - "The approximate maximum size of a batch before flush in megabytes \nDefault: 10", + "The approximate maximum size (in megabytes) for each batch before flushing (range: 1 - 100)", type: "number", demandOption: false, }) .option("batch-max-rows", { describe: - "The approximate maximum size of a batch before flush in rows \nDefault: 10000", + "The approximate maximum number of rows in a batch before flushing (range: 100 - 1000000)", type: "number", demandOption: false, }) .option("batch-max-seconds", { describe: - "The approximate maximum duration of a batch before flush in seconds \nDefault: 15", + "The approximate maximum age (in seconds) of a batch before flushing (range: 1 - 300)", type: "number", demandOption: false, }) @@ -211,7 +211,9 @@ export function pipelines(pipelineYargs: CommonYargsArgv) { args.compression === undefined ? "gzip" : args.compression; const batch = { - max_mb: args["batch-max-mb"], + max_bytes: args["batch-max-mb"] + ? args["batch-max-mb"] * 1000 * 1000 // convert to bytes for the API + : undefined, max_duration_s: args["batch-max-seconds"], max_rows: args["batch-max-rows"], }; @@ -401,7 +403,8 @@ export function pipelines(pipelineYargs: CommonYargsArgv) { pipelineConfig.destination.compression.type = args.compression; } if (args["batch-max-mb"]) { - pipelineConfig.destination.batch.max_mb = args["batch-max-mb"]; + pipelineConfig.destination.batch.max_bytes = + args["batch-max-mb"] * 1000 * 1000; // convert to bytes for the API } if (args["batch-max-seconds"]) { pipelineConfig.destination.batch.max_duration_s = diff --git a/packages/wrangler/src/r2/helpers.ts b/packages/wrangler/src/r2/helpers.ts index 25830e4b3fff..43efa2122852 100644 --- a/packages/wrangler/src/r2/helpers.ts +++ b/packages/wrangler/src/r2/helpers.ts @@ -350,14 +350,14 @@ export async function deleteR2Object( export async function usingLocalBucket( persistTo: string | undefined, - configPath: string | undefined, + config: Config, bucketName: string, closure: ( namespace: ReplaceWorkersTypes, mf: Miniflare ) => Promise ): Promise { - const persist = getLocalPersistencePath(persistTo, configPath); + const persist = getLocalPersistencePath(persistTo, config); const persistOptions = buildPersistOptions(persist); const mf = new Miniflare({ modules: true, diff --git a/packages/wrangler/src/r2/object.ts b/packages/wrangler/src/r2/object.ts index fa88dba1896c..dea39df0711f 100644 --- a/packages/wrangler/src/r2/object.ts +++ b/packages/wrangler/src/r2/object.ts @@ -95,7 +95,7 @@ export const r2ObjectGetCommand = createCommand({ if (objectGetYargs.local) { await usingLocalBucket( objectGetYargs.persistTo, - config.configPath, + config, bucket, async (r2Bucket) => { const object = await r2Bucket.get(key); @@ -274,7 +274,7 @@ export const r2ObjectPutCommand = createCommand({ if (local) { await usingLocalBucket( persistTo, - config.configPath, + config, bucket, async (r2Bucket, mf) => { const putOptions: R2PutOptions = { @@ -376,11 +376,8 @@ export const r2ObjectDeleteCommand = createCommand({ logger.log(`Deleting object "${key}" from bucket "${fullBucketName}".`); if (args.local) { - await usingLocalBucket( - args.persistTo, - config.configPath, - bucket, - (r2Bucket) => r2Bucket.delete(key) + await usingLocalBucket(args.persistTo, config, bucket, (r2Bucket) => + r2Bucket.delete(key) ); } else { const accountId = await requireAuth(config); diff --git a/packages/wrangler/src/triggers/deploy.ts b/packages/wrangler/src/triggers/deploy.ts index 946a9d4c74c7..a8e4c06e0ce9 100644 --- a/packages/wrangler/src/triggers/deploy.ts +++ b/packages/wrangler/src/triggers/deploy.ts @@ -28,7 +28,6 @@ type Props = { routes: string[] | undefined; legacyEnv: boolean | undefined; dryRun: boolean | undefined; - experimentalVersions: boolean | undefined; assetsOptions: AssetsOptions | undefined; }; @@ -276,10 +275,7 @@ export default async function triggersDeploy( const deployMs = Date.now() - start - uploadMs; if (deployments.length > 0) { - const msg = props.experimentalVersions - ? `Deployed ${workerName} triggers` - : `Published ${workerName}`; - logger.log(msg, formatTime(deployMs)); + logger.log(`Deployed ${workerName} triggers`, formatTime(deployMs)); const flatTargets = targets.flat().map( // Append protocol only on workers.dev domains diff --git a/packages/wrangler/src/triggers/index.ts b/packages/wrangler/src/triggers/index.ts index d19ed35f5af9..50093a60020c 100644 --- a/packages/wrangler/src/triggers/index.ts +++ b/packages/wrangler/src/triggers/index.ts @@ -78,7 +78,6 @@ async function triggersDeployHandler( routes: args.routes, legacyEnv: isLegacyEnv(config), dryRun: args.dryRun, - experimentalVersions: args.experimentalVersions, assetsOptions, }); } diff --git a/packages/wrangler/src/type-generation/index.ts b/packages/wrangler/src/type-generation/index.ts index 347b55f8cec7..b3ef27d136a7 100644 --- a/packages/wrangler/src/type-generation/index.ts +++ b/packages/wrangler/src/type-generation/index.ts @@ -3,7 +3,6 @@ import { basename, dirname, extname, join, relative, resolve } from "node:path"; import { findUpSync } from "find-up"; import { getNodeCompat } from "miniflare"; import { readConfig } from "../config"; -import { resolveWranglerConfigPath } from "../config/config-helpers"; import { getEntry } from "../deployment-bundle/entry"; import { getVarsForDev } from "../dev/dev-vars"; import { CommandLineArgsError, UserError } from "../errors"; @@ -63,11 +62,11 @@ export async function typesHandler( await printWranglerBanner(); - const configPath = resolveWranglerConfigPath(args); + const config = readConfig(args); if ( - !configPath || - !fs.existsSync(configPath) || - fs.statSync(configPath).isDirectory() + !config.configPath || + !fs.existsSync(config.configPath) || + fs.statSync(config.configPath).isDirectory() ) { logger.warn( `No config file detected${ @@ -77,8 +76,6 @@ export async function typesHandler( return; } - const config = readConfig(args); - // args.xRuntime will be a string if the user passes "--x-include-runtime" or "--x-include-runtime=..." if (typeof args.experimentalIncludeRuntime === "string") { logger.log(`Generating runtime types...`); @@ -89,7 +86,7 @@ export async function typesHandler( }); const tsconfigPath = - config.tsconfig ?? join(dirname(configPath), "tsconfig.json"); + config.tsconfig ?? join(dirname(config.configPath), "tsconfig.json"); const tsconfigTypes = readTsconfigTypes(tsconfigPath); const { mode } = getNodeCompat( config.compatibility_date, @@ -108,7 +105,10 @@ export async function typesHandler( } const secrets = getVarsForDev( - { configPath, vars: {} }, + // We do not want `getVarsForDev()` to merge in the standard vars into the dev vars + // because we want to be able to work with secrets differently to vars. + // So we pass in a fake vars object here. + { ...config, vars: {} }, args.env, true ) as Record; diff --git a/packages/wrangler/src/versions/deploy.ts b/packages/wrangler/src/versions/deploy.ts index ab596e0781d6..74cececee0f9 100644 --- a/packages/wrangler/src/versions/deploy.ts +++ b/packages/wrangler/src/versions/deploy.ts @@ -47,6 +47,10 @@ export const versionsDeployCommand = createCommand({ owner: "Workers: Authoring and Testing", status: "stable", }, + behaviour: { + useConfigRedirectIfAvailable: true, + }, + args: { name: { describe: "Name of the worker", diff --git a/packages/wrangler/src/versions/upload.ts b/packages/wrangler/src/versions/upload.ts index 94f6d2e2c56f..6c12e4b1992a 100644 --- a/packages/wrangler/src/versions/upload.ts +++ b/packages/wrangler/src/versions/upload.ts @@ -282,7 +282,7 @@ export const versionsUploadCommand = createCommand({ }, }, behaviour: { - provideConfig: true, + useConfigRedirectIfAvailable: true, }, handler: async function versionsUploadHandler(args, { config }) { const entry = await getEntry(args, config, "versions upload"); @@ -346,11 +346,7 @@ export const versionsUploadCommand = createCommand({ if (!args.dryRun) { assert(accountId, "Missing account ID"); - await verifyWorkerMatchesCITag( - accountId, - name, - path.relative(entry.projectRoot, config.configPath ?? "wrangler.toml") - ); + await verifyWorkerMatchesCITag(accountId, name, config.configPath); } if (!args.dryRun) { diff --git a/packages/wrangler/src/yargs-types.ts b/packages/wrangler/src/yargs-types.ts index 9397389323c7..edb7b63e51c3 100644 --- a/packages/wrangler/src/yargs-types.ts +++ b/packages/wrangler/src/yargs-types.ts @@ -8,7 +8,6 @@ export interface CommonYargsOptions { v: boolean | undefined; config: string | undefined; env: string | undefined; - "experimental-versions": boolean | undefined; "experimental-provision": boolean | undefined; } diff --git a/packages/wrangler/telemetry.md b/packages/wrangler/telemetry.md index d4dff10386b3..6bf9fd86ff6c 100644 --- a/packages/wrangler/telemetry.md +++ b/packages/wrangler/telemetry.md @@ -12,6 +12,8 @@ Telemetry in Wrangler allows us to better identify bugs and gain visibility on u - What command is being run (e.g. `wrangler deploy`, `wrangler dev`) - Anonymized arguments and flags given to Wrangler (e.g. `wrangler deploy ./src/index.ts --dry-run=true --outdir=dist` would be sent as `wrangler deploy REDACTED --dry-run=true --outdir=REDACTED`) +- Anonymized information about your Worker (e.g. whether or not Workers Assets is being used, whether or not TypeScript is being used) +- Information about your connection to Cloudflare's API (e.g. how long it takes Wrangler to deploy your Worker) - The version of the Wrangler client that is sending the event - The package manager that the Wrangler client is using. (e.g. npm, yarn) - The major version of Node.js that the Wrangler client is running on diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 37f34b80607a..5e07c15094d5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -65,8 +65,8 @@ importers: specifier: ^0.4.0 version: 0.4.0 '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@ianvs/prettier-plugin-sort-imports': specifier: 4.2.1 version: 4.2.1(@vue/compiler-sfc@3.3.4)(prettier@3.2.5) @@ -128,8 +128,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -170,8 +170,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 run-script-os: specifier: ^1.1.6 version: 1.1.6 @@ -233,8 +233,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -303,8 +303,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/node': specifier: ^18.19.59 version: 18.19.59 @@ -343,8 +343,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -364,8 +364,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 undici: specifier: catalog:default version: 5.28.4 @@ -382,8 +382,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/pg': specifier: ^8.11.2 version: 8.11.6 @@ -427,8 +427,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -452,8 +452,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 pages-plugin-example: specifier: workspace:* version: link:../pages-plugin-example @@ -512,8 +512,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -564,8 +564,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 pages-plugin-example: specifier: workspace:* version: link:../pages-plugin-example @@ -603,14 +603,29 @@ importers: specifier: workspace:* version: link:../../packages/wrangler + fixtures/pages-redirected-config: + devDependencies: + '@cloudflare/workers-tsconfig': + specifier: workspace:^ + version: link:../../packages/workers-tsconfig + undici: + specifier: catalog:default + version: 5.28.4 + vitest: + specifier: catalog:default + version: 2.1.8(@types/node@18.19.59)(@vitest/ui@2.1.8)(msw@2.4.3(typescript@5.6.3))(supports-color@9.2.2) + wrangler: + specifier: workspace:* + version: link:../../packages/wrangler + fixtures/pages-simple-assets: devDependencies: '@cloudflare/workers-tsconfig': specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -771,6 +786,21 @@ importers: specifier: workspace:* version: link:../../packages/wrangler + fixtures/redirected-config-worker: + devDependencies: + '@cloudflare/workers-tsconfig': + specifier: workspace:^ + version: link:../../packages/workers-tsconfig + undici: + specifier: catalog:default + version: 5.28.4 + vitest: + specifier: catalog:default + version: 2.1.8(@types/node@18.19.59)(@vitest/ui@2.1.8)(msw@2.4.3(typescript@5.6.3))(supports-color@9.2.2) + wrangler: + specifier: workspace:* + version: link:../../packages/wrangler + fixtures/routing-app: {} fixtures/rules-app: {} @@ -808,8 +838,8 @@ importers: specifier: workspace:* version: link:../../packages/vitest-pool-workers '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/node': specifier: ^18.19.59 version: 18.19.59 @@ -868,8 +898,8 @@ importers: fixtures/worker-ts: devDependencies: '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 wrangler: specifier: workspace:* version: link:../../packages/wrangler @@ -882,8 +912,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -903,8 +933,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -924,8 +954,8 @@ importers: specifier: workspace:* version: link:../../packages/workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -942,8 +972,8 @@ importers: fixtures/workflow: devDependencies: '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -960,8 +990,8 @@ importers: fixtures/workflow-multiple: devDependencies: '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 typescript: specifier: catalog:default version: 5.6.3 @@ -1086,8 +1116,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@iarna/toml': specifier: ^3.0.0 version: 3.0.0 @@ -1215,8 +1245,8 @@ importers: specifier: workspace:* version: link:../eslint-config-worker '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/cookie': specifier: ^0.6.0 version: 0.6.0 @@ -1270,8 +1300,8 @@ importers: specifier: workspace:* version: link:../eslint-config-worker '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 mustache: specifier: ^4.2.0 version: 4.2.0 @@ -1301,8 +1331,8 @@ importers: specifier: ^4.1.0 version: 4.1.0 '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/mime': specifier: ^3.0.4 version: 3.0.4 @@ -1346,8 +1376,8 @@ importers: specifier: catalog:default version: 5.28.4 workerd: - specifier: 1.20241218.0 - version: 1.20241218.0 + specifier: 1.20241230.0 + version: 1.20241230.0 ws: specifier: ^8.18.0 version: 8.18.0 @@ -1368,8 +1398,8 @@ importers: specifier: workspace:* version: link:../workers-shared '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@cloudflare/workflows-shared': specifier: workspace:* version: link:../workflows-shared @@ -1483,8 +1513,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@miniflare/cache': specifier: ^2.14.2 version: 2.14.2 @@ -1517,8 +1547,8 @@ importers: specifier: workspace:* version: link:../eslint-config-worker '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/cookie': specifier: ^0.6.0 version: 0.6.0 @@ -1554,8 +1584,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 wrangler: specifier: workspace:* version: link:../wrangler @@ -1585,8 +1615,8 @@ importers: specifier: workspace:^ version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 esbuild: specifier: 0.17.19 version: 0.17.19 @@ -1621,8 +1651,8 @@ importers: specifier: npm:unenv-nightly@* version: unenv-nightly@2.0.0-20241216-144314-7e05819 workerd: - specifier: ^1.20241216.0 - version: 1.20241216.0 + specifier: ^1.20241230.0 + version: 1.20241230.0 devDependencies: '@types/node': specifier: ^22.10.2 @@ -1633,6 +1663,15 @@ importers: unbuild: specifier: ^2.0.0 version: 2.0.0(typescript@5.6.3)(vue-tsc@2.0.29(typescript@5.6.3)) + undici: + specifier: catalog:default + version: 5.28.4 + vitest: + specifier: catalog:default + version: 2.1.8(@types/node@22.10.2)(@vitest/ui@2.1.8) + wrangler: + specifier: workspace:* + version: link:../wrangler packages/vitest-pool-workers: dependencies: @@ -1668,8 +1707,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/node': specifier: ^18.19.59 version: 18.19.59 @@ -1879,13 +1918,13 @@ importers: version: link:../eslint-config-worker '@cloudflare/vitest-pool-workers': specifier: ^0.5.31 - version: 0.5.31(@cloudflare/workers-types@4.20241218.0)(@vitest/runner@2.1.8)(@vitest/snapshot@2.1.8)(vitest@2.1.8) + version: 0.5.31(@cloudflare/workers-types@4.20241230.0)(@vitest/runner@2.1.8)(@vitest/snapshot@2.1.8)(vitest@2.1.8) '@cloudflare/workers-tsconfig': specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/mime': specifier: ^3.0.4 version: 3.0.4 @@ -1916,8 +1955,8 @@ importers: specifier: workspace:* version: link:../vitest-pool-workers '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/node': specifier: ^18.19.59 version: 18.19.59 @@ -1956,8 +1995,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@types/mime': specifier: ^3.0.4 version: 3.0.4 @@ -2025,8 +2064,8 @@ importers: specifier: npm:unenv-nightly@2.0.0-20241218-183400-5d6aec3 version: unenv-nightly@2.0.0-20241218-183400-5d6aec3 workerd: - specifier: 1.20241218.0 - version: 1.20241218.0 + specifier: 1.20241230.0 + version: 1.20241230.0 xxhash-wasm: specifier: ^1.0.1 version: 1.0.1 @@ -2054,8 +2093,8 @@ importers: specifier: workspace:* version: link:../workers-tsconfig '@cloudflare/workers-types': - specifier: ^4.20241218.0 - version: 4.20241218.0 + specifier: ^4.20241230.0 + version: 4.20241230.0 '@cspotcode/source-map-support': specifier: 0.8.1 version: 0.8.1 @@ -2972,14 +3011,8 @@ packages: cpu: [x64] os: [darwin] - '@cloudflare/workerd-darwin-64@1.20241216.0': - resolution: {integrity: sha512-GreuUuvd1tp34i/I8rv9I6tJTGkLIdUZfPd4Gq7glRntWhZSfeJOlhFHOa/tIil1SrWi1UzXmWeW22DCcUIprA==} - engines: {node: '>=16'} - cpu: [x64] - os: [darwin] - - '@cloudflare/workerd-darwin-64@1.20241218.0': - resolution: {integrity: sha512-8rveQoxtUvlmORKqTWgjv2ycM8uqWox0u9evn3zd2iWKdou5sncFwH517ZRLI3rq9P31ZLmCQBZ0gloFsTeY6w==} + '@cloudflare/workerd-darwin-64@1.20241230.0': + resolution: {integrity: sha512-BZHLg4bbhNQoaY1Uan81O3FV/zcmWueC55juhnaI7NAobiQth9RppadPNpxNAmS9fK2mR5z8xrwMQSQrHmztyQ==} engines: {node: '>=16'} cpu: [x64] os: [darwin] @@ -2990,14 +3023,8 @@ packages: cpu: [arm64] os: [darwin] - '@cloudflare/workerd-darwin-arm64@1.20241216.0': - resolution: {integrity: sha512-wGtzIWx4a8uu4y601Zq/x+5fPoSfJoXEEPkqxhcTs2g22Py60wnG91vAeVa8pZe9SipBozYczXh2OwoeCO1QVg==} - engines: {node: '>=16'} - cpu: [arm64] - os: [darwin] - - '@cloudflare/workerd-darwin-arm64@1.20241218.0': - resolution: {integrity: sha512-be59Ad9nmM9lCkhHqmTs/uZ3JVZt8NJ9Z0PY+B0xnc5z6WwmV2lj0RVLtq7xJhQsQJA189zt5rXqDP6J+2mu7Q==} + '@cloudflare/workerd-darwin-arm64@1.20241230.0': + resolution: {integrity: sha512-lllxycj7EzYoJ0VOJh8M3palUgoonVrILnzGrgsworgWlIpgjfXGS7b41tEGCw6AxSxL9prmTIGtfSPUvn/rjg==} engines: {node: '>=16'} cpu: [arm64] os: [darwin] @@ -3008,14 +3035,8 @@ packages: cpu: [x64] os: [linux] - '@cloudflare/workerd-linux-64@1.20241216.0': - resolution: {integrity: sha512-HRkePwhnb/4r2Bd6SS3n8VWLPnczh2ApKo3j5N0YSVOz/bEJlqEbEnKAUivCb79C3zptTsbsb0tJ4b5uZsaHtw==} - engines: {node: '>=16'} - cpu: [x64] - os: [linux] - - '@cloudflare/workerd-linux-64@1.20241218.0': - resolution: {integrity: sha512-MzpSBcfZXRxrYWxQ4pVDYDrUbkQuM62ssl4ZtHH8J35OAeGsWFAYji6MkS2SpVwVcvacPwJXIF4JSzp4xKImKw==} + '@cloudflare/workerd-linux-64@1.20241230.0': + resolution: {integrity: sha512-Y3mHcW0KghOmWdNZyHYpEOG4Ba/ga8tht5vj1a+WXfagEjMO8Y98XhZUlCaYa9yB7Wh5jVcK5LM2jlO/BLgqpA==} engines: {node: '>=16'} cpu: [x64] os: [linux] @@ -3026,14 +3047,8 @@ packages: cpu: [arm64] os: [linux] - '@cloudflare/workerd-linux-arm64@1.20241216.0': - resolution: {integrity: sha512-5U99Iaj18BEJAEpKr+n2kdOyzCITbmzV0Ld4zMpIw5ZW0R2MHCo1swra84Q+bvElVOK6+7KGhjKQSqyZUF1WWA==} - engines: {node: '>=16'} - cpu: [arm64] - os: [linux] - - '@cloudflare/workerd-linux-arm64@1.20241218.0': - resolution: {integrity: sha512-RIuJjPxpNqvwIs52vQsXeRMttvhIjgg9NLjjFa3jK8Ijnj8c3ZDru9Wqi48lJP07yDFIRr4uDMMqh/y29YQi2A==} + '@cloudflare/workerd-linux-arm64@1.20241230.0': + resolution: {integrity: sha512-IAjhsWPlHzhhkJ6I49sDG6XfMnhPvv0szKGXxTWQK/IWMrbGdHm4RSfNKBSoLQm67jGMIzbmcrX9UIkms27Y1g==} engines: {node: '>=16'} cpu: [arm64] os: [linux] @@ -3044,14 +3059,8 @@ packages: cpu: [x64] os: [win32] - '@cloudflare/workerd-windows-64@1.20241216.0': - resolution: {integrity: sha512-6UtbWgZNFuVyq6d3nKsp3Eb53Ghm2EYObCKTs9TSzV2ZHbovgOIU8BKIlbfJvmkEbG4Q8bbfZkb3QJpG/IwchQ==} - engines: {node: '>=16'} - cpu: [x64] - os: [win32] - - '@cloudflare/workerd-windows-64@1.20241218.0': - resolution: {integrity: sha512-tO1VjlvK3F6Yb2d1jgEy/QBYl//9Pyv3K0j+lq8Eu7qdfm0IgKwSRgDWLept84/qmNsQfausZ4JdNGxTf9xsxQ==} + '@cloudflare/workerd-windows-64@1.20241230.0': + resolution: {integrity: sha512-y5SPIk9iOb2gz+yWtHxoeMnjPnkYQswiCJ480oHC6zexnJLlKTpcmBCjDH1nWCT4pQi8F25gaH8thgElf4NvXQ==} engines: {node: '>=16'} cpu: [x64] os: [win32] @@ -3060,8 +3069,8 @@ packages: resolution: {integrity: sha512-1OvFkNtslaMZAJsaocTmbACApgmWv55uLpNj50Pn2MGcxdAjpqykXJFQw5tKc+lGV9TDZh9oO3Rsk17IEQDzIg==} engines: {node: '>=16.7.0'} - '@cloudflare/workers-types@4.20241218.0': - resolution: {integrity: sha512-Y0brjmJHcAZBXOPI7lU5hbiXglQWniA1kQjot2ata+HFimyjPPcz+4QWBRrmWcMPo0OadR2Vmac7WStDLpvz0w==} + '@cloudflare/workers-types@4.20241230.0': + resolution: {integrity: sha512-dtLD4jY35Lb750cCVyO1i/eIfdZJg2Z0i+B1RYX6BVeRPlgaHx/H18ImKAkYmy0g09Ow8R2jZy3hIxMgXun0WQ==} '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} @@ -3105,8 +3114,8 @@ packages: cpu: [ppc64] os: [aix] - '@esbuild/aix-ppc64@0.24.0': - resolution: {integrity: sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==} + '@esbuild/aix-ppc64@0.24.2': + resolution: {integrity: sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] @@ -3135,8 +3144,8 @@ packages: cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.24.0': - resolution: {integrity: sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==} + '@esbuild/android-arm64@0.24.2': + resolution: {integrity: sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==} engines: {node: '>=18'} cpu: [arm64] os: [android] @@ -3165,8 +3174,8 @@ packages: cpu: [arm] os: [android] - '@esbuild/android-arm@0.24.0': - resolution: {integrity: sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==} + '@esbuild/android-arm@0.24.2': + resolution: {integrity: sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==} engines: {node: '>=18'} cpu: [arm] os: [android] @@ -3195,8 +3204,8 @@ packages: cpu: [x64] os: [android] - '@esbuild/android-x64@0.24.0': - resolution: {integrity: sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==} + '@esbuild/android-x64@0.24.2': + resolution: {integrity: sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==} engines: {node: '>=18'} cpu: [x64] os: [android] @@ -3225,8 +3234,8 @@ packages: cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.24.0': - resolution: {integrity: sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==} + '@esbuild/darwin-arm64@0.24.2': + resolution: {integrity: sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] @@ -3255,8 +3264,8 @@ packages: cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.24.0': - resolution: {integrity: sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==} + '@esbuild/darwin-x64@0.24.2': + resolution: {integrity: sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==} engines: {node: '>=18'} cpu: [x64] os: [darwin] @@ -3285,8 +3294,8 @@ packages: cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.24.0': - resolution: {integrity: sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==} + '@esbuild/freebsd-arm64@0.24.2': + resolution: {integrity: sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] @@ -3315,8 +3324,8 @@ packages: cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.24.0': - resolution: {integrity: sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==} + '@esbuild/freebsd-x64@0.24.2': + resolution: {integrity: sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] @@ -3345,8 +3354,8 @@ packages: cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.24.0': - resolution: {integrity: sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==} + '@esbuild/linux-arm64@0.24.2': + resolution: {integrity: sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==} engines: {node: '>=18'} cpu: [arm64] os: [linux] @@ -3375,8 +3384,8 @@ packages: cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.24.0': - resolution: {integrity: sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==} + '@esbuild/linux-arm@0.24.2': + resolution: {integrity: sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==} engines: {node: '>=18'} cpu: [arm] os: [linux] @@ -3405,8 +3414,8 @@ packages: cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.24.0': - resolution: {integrity: sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==} + '@esbuild/linux-ia32@0.24.2': + resolution: {integrity: sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==} engines: {node: '>=18'} cpu: [ia32] os: [linux] @@ -3435,8 +3444,8 @@ packages: cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.24.0': - resolution: {integrity: sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==} + '@esbuild/linux-loong64@0.24.2': + resolution: {integrity: sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==} engines: {node: '>=18'} cpu: [loong64] os: [linux] @@ -3465,8 +3474,8 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.24.0': - resolution: {integrity: sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==} + '@esbuild/linux-mips64el@0.24.2': + resolution: {integrity: sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] @@ -3495,8 +3504,8 @@ packages: cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.24.0': - resolution: {integrity: sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==} + '@esbuild/linux-ppc64@0.24.2': + resolution: {integrity: sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] @@ -3525,8 +3534,8 @@ packages: cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.24.0': - resolution: {integrity: sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==} + '@esbuild/linux-riscv64@0.24.2': + resolution: {integrity: sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] @@ -3555,8 +3564,8 @@ packages: cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.24.0': - resolution: {integrity: sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==} + '@esbuild/linux-s390x@0.24.2': + resolution: {integrity: sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==} engines: {node: '>=18'} cpu: [s390x] os: [linux] @@ -3585,12 +3594,18 @@ packages: cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.24.0': - resolution: {integrity: sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==} + '@esbuild/linux-x64@0.24.2': + resolution: {integrity: sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==} engines: {node: '>=18'} cpu: [x64] os: [linux] + '@esbuild/netbsd-arm64@0.24.2': + resolution: {integrity: sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.17.19': resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} engines: {node: '>=12'} @@ -3615,14 +3630,14 @@ packages: cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.24.0': - resolution: {integrity: sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==} + '@esbuild/netbsd-x64@0.24.2': + resolution: {integrity: sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.24.0': - resolution: {integrity: sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==} + '@esbuild/openbsd-arm64@0.24.2': + resolution: {integrity: sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] @@ -3651,8 +3666,8 @@ packages: cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.24.0': - resolution: {integrity: sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==} + '@esbuild/openbsd-x64@0.24.2': + resolution: {integrity: sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] @@ -3681,8 +3696,8 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.24.0': - resolution: {integrity: sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==} + '@esbuild/sunos-x64@0.24.2': + resolution: {integrity: sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==} engines: {node: '>=18'} cpu: [x64] os: [sunos] @@ -3711,8 +3726,8 @@ packages: cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.24.0': - resolution: {integrity: sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==} + '@esbuild/win32-arm64@0.24.2': + resolution: {integrity: sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==} engines: {node: '>=18'} cpu: [arm64] os: [win32] @@ -3741,8 +3756,8 @@ packages: cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.24.0': - resolution: {integrity: sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==} + '@esbuild/win32-ia32@0.24.2': + resolution: {integrity: sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==} engines: {node: '>=18'} cpu: [ia32] os: [win32] @@ -3771,8 +3786,8 @@ packages: cpu: [x64] os: [win32] - '@esbuild/win32-x64@0.24.0': - resolution: {integrity: sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==} + '@esbuild/win32-x64@0.24.2': + resolution: {integrity: sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==} engines: {node: '>=18'} cpu: [x64] os: [win32] @@ -5790,8 +5805,8 @@ packages: config-chain@1.1.13: resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} - consola@3.2.3: - resolution: {integrity: sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==} + consola@3.3.3: + resolution: {integrity: sha512-Qil5KwghMzlqd51UXM0b6fyaGHtOC22scxrwrz4A2882LyUMwQjnvaedN1HAeXzphspQ6CpHkzMAWxBTUruDLg==} engines: {node: ^14.18.0 || >=16.10.0} console-control-strings@1.1.0: @@ -6350,8 +6365,8 @@ packages: engines: {node: '>=12'} hasBin: true - esbuild@0.24.0: - resolution: {integrity: sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==} + esbuild@0.24.2: + resolution: {integrity: sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==} engines: {node: '>=18'} hasBin: true @@ -7514,12 +7529,12 @@ packages: javascript-time-ago@2.5.7: resolution: {integrity: sha512-EGvh6K4hpJz0S0aZinbW2EfXDqT/JBB84HfMOFDTzGg7yjpjql9feSgtlG1JQ6b6/NkIxl+PoKSUTEMsatTuTg==} - jiti@1.21.6: - resolution: {integrity: sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==} + jiti@1.21.7: + resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} hasBin: true - jiti@2.4.1: - resolution: {integrity: sha512-yPBThwecp1wS9DmoA4x4KR2h3QoslacnDR8ypuFM962kI4/456Iy1oHx2RAgh4jfZNdn0bctsdadceiBUgpU1g==} + jiti@2.4.2: + resolution: {integrity: sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==} hasBin: true jju@1.4.0: @@ -8625,8 +8640,8 @@ packages: resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} engines: {node: '>= 0.4'} - postcss-calc@10.0.2: - resolution: {integrity: sha512-DT/Wwm6fCKgpYVI7ZEWuPJ4az8hiEHtCUeYjZXqU7Ou4QqYh1Df2yCQ7Ca6N7xqKPFkxN3fhf+u9KSoOCJNAjg==} + postcss-calc@10.1.0: + resolution: {integrity: sha512-uQ/LDGsf3mgsSUEXmAt3VsCSHR3aKqtEIkmB+4PhzYwRYOW5MZs/GhCCFpsOtJJkP6EC6uGipbrnaTjqaJZcJw==} engines: {node: ^18.12 || ^20.9 || >=22.0} peerDependencies: postcss: ^8.4.38 @@ -8785,6 +8800,10 @@ packages: resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==} engines: {node: '>=4'} + postcss-selector-parser@7.0.0: + resolution: {integrity: sha512-9RbEr1Y7FFfptd/1eEdntyjMwLeghW1bHX9GWjXo19vx4ytPQhANltvVxDggzJl7mnWM+dX28kb6cyS/4iQjlQ==} + engines: {node: '>=4'} + postcss-svgo@7.0.1: resolution: {integrity: sha512-0WBUlSL4lhD9rA5k1e5D8EN5wCEyZD6HJk0jIvRxl+FDVOMlJ7DePHYWGGVc5QRqrJ3/06FTXM0bxjmJpmTPSA==} engines: {node: ^18.12.0 || ^20.9.0 || >= 18} @@ -10432,13 +10451,8 @@ packages: engines: {node: '>=16'} hasBin: true - workerd@1.20241216.0: - resolution: {integrity: sha512-q92hkfZ0ZmH6DrcQ426AqJR0KyG6NRAUNUT3Kvpzk76rLHzw6pvVeU9exATkqnwk5K3LQK6l1asuSsBDdXsPpw==} - engines: {node: '>=16'} - hasBin: true - - workerd@1.20241218.0: - resolution: {integrity: sha512-7Z3D4vOVChMz9mWDffE299oQxUWm/pbkeAWx1btVamPcAK/2IuoNBhwflWo3jyuKuxvYuFAdIucgYxc8ICqXiA==} + workerd@1.20241230.0: + resolution: {integrity: sha512-EgixXP0JGXGq6J9lz17TKIZtfNDUvJNG+cl9paPMfZuYWT920fFpBx+K04YmnbQRLnglsivF1GT9pxh1yrlWhg==} engines: {node: '>=16'} hasBin: true @@ -11978,7 +11992,7 @@ snapshots: lodash.memoize: 4.1.2 marked: 0.3.19 - '@cloudflare/vitest-pool-workers@0.5.31(@cloudflare/workers-types@4.20241218.0)(@vitest/runner@2.1.8)(@vitest/snapshot@2.1.8)(vitest@2.1.8)': + '@cloudflare/vitest-pool-workers@0.5.31(@cloudflare/workers-types@4.20241230.0)(@vitest/runner@2.1.8)(@vitest/snapshot@2.1.8)(vitest@2.1.8)': dependencies: '@vitest/runner': 2.1.8 '@vitest/snapshot': 2.1.8 @@ -11989,7 +12003,7 @@ snapshots: miniflare: 3.20241106.1 semver: 7.5.4 vitest: 2.1.8(@types/node@18.19.59)(@vitest/ui@2.1.8)(msw@2.4.3(typescript@5.6.3))(supports-color@9.2.2) - wrangler: 3.90.0(@cloudflare/workers-types@4.20241218.0) + wrangler: 3.90.0(@cloudflare/workers-types@4.20241230.0) zod: 3.22.3 transitivePeerDependencies: - '@cloudflare/workers-types' @@ -12000,46 +12014,31 @@ snapshots: '@cloudflare/workerd-darwin-64@1.20241106.1': optional: true - '@cloudflare/workerd-darwin-64@1.20241216.0': - optional: true - - '@cloudflare/workerd-darwin-64@1.20241218.0': + '@cloudflare/workerd-darwin-64@1.20241230.0': optional: true '@cloudflare/workerd-darwin-arm64@1.20241106.1': optional: true - '@cloudflare/workerd-darwin-arm64@1.20241216.0': - optional: true - - '@cloudflare/workerd-darwin-arm64@1.20241218.0': + '@cloudflare/workerd-darwin-arm64@1.20241230.0': optional: true '@cloudflare/workerd-linux-64@1.20241106.1': optional: true - '@cloudflare/workerd-linux-64@1.20241216.0': - optional: true - - '@cloudflare/workerd-linux-64@1.20241218.0': + '@cloudflare/workerd-linux-64@1.20241230.0': optional: true '@cloudflare/workerd-linux-arm64@1.20241106.1': optional: true - '@cloudflare/workerd-linux-arm64@1.20241216.0': - optional: true - - '@cloudflare/workerd-linux-arm64@1.20241218.0': + '@cloudflare/workerd-linux-arm64@1.20241230.0': optional: true '@cloudflare/workerd-windows-64@1.20241106.1': optional: true - '@cloudflare/workerd-windows-64@1.20241216.0': - optional: true - - '@cloudflare/workerd-windows-64@1.20241218.0': + '@cloudflare/workerd-windows-64@1.20241230.0': optional: true '@cloudflare/workers-shared@0.8.0': @@ -12047,7 +12046,7 @@ snapshots: mime: 3.0.0 zod: 3.22.3 - '@cloudflare/workers-types@4.20241218.0': {} + '@cloudflare/workers-types@4.20241230.0': {} '@colors/colors@1.5.0': optional: true @@ -12087,7 +12086,7 @@ snapshots: '@esbuild/aix-ppc64@0.21.5': optional: true - '@esbuild/aix-ppc64@0.24.0': + '@esbuild/aix-ppc64@0.24.2': optional: true '@esbuild/android-arm64@0.17.19': @@ -12102,7 +12101,7 @@ snapshots: '@esbuild/android-arm64@0.21.5': optional: true - '@esbuild/android-arm64@0.24.0': + '@esbuild/android-arm64@0.24.2': optional: true '@esbuild/android-arm@0.17.19': @@ -12117,7 +12116,7 @@ snapshots: '@esbuild/android-arm@0.21.5': optional: true - '@esbuild/android-arm@0.24.0': + '@esbuild/android-arm@0.24.2': optional: true '@esbuild/android-x64@0.17.19': @@ -12132,7 +12131,7 @@ snapshots: '@esbuild/android-x64@0.21.5': optional: true - '@esbuild/android-x64@0.24.0': + '@esbuild/android-x64@0.24.2': optional: true '@esbuild/darwin-arm64@0.17.19': @@ -12147,7 +12146,7 @@ snapshots: '@esbuild/darwin-arm64@0.21.5': optional: true - '@esbuild/darwin-arm64@0.24.0': + '@esbuild/darwin-arm64@0.24.2': optional: true '@esbuild/darwin-x64@0.17.19': @@ -12162,7 +12161,7 @@ snapshots: '@esbuild/darwin-x64@0.21.5': optional: true - '@esbuild/darwin-x64@0.24.0': + '@esbuild/darwin-x64@0.24.2': optional: true '@esbuild/freebsd-arm64@0.17.19': @@ -12177,7 +12176,7 @@ snapshots: '@esbuild/freebsd-arm64@0.21.5': optional: true - '@esbuild/freebsd-arm64@0.24.0': + '@esbuild/freebsd-arm64@0.24.2': optional: true '@esbuild/freebsd-x64@0.17.19': @@ -12192,7 +12191,7 @@ snapshots: '@esbuild/freebsd-x64@0.21.5': optional: true - '@esbuild/freebsd-x64@0.24.0': + '@esbuild/freebsd-x64@0.24.2': optional: true '@esbuild/linux-arm64@0.17.19': @@ -12207,7 +12206,7 @@ snapshots: '@esbuild/linux-arm64@0.21.5': optional: true - '@esbuild/linux-arm64@0.24.0': + '@esbuild/linux-arm64@0.24.2': optional: true '@esbuild/linux-arm@0.17.19': @@ -12222,7 +12221,7 @@ snapshots: '@esbuild/linux-arm@0.21.5': optional: true - '@esbuild/linux-arm@0.24.0': + '@esbuild/linux-arm@0.24.2': optional: true '@esbuild/linux-ia32@0.17.19': @@ -12237,7 +12236,7 @@ snapshots: '@esbuild/linux-ia32@0.21.5': optional: true - '@esbuild/linux-ia32@0.24.0': + '@esbuild/linux-ia32@0.24.2': optional: true '@esbuild/linux-loong64@0.17.19': @@ -12252,7 +12251,7 @@ snapshots: '@esbuild/linux-loong64@0.21.5': optional: true - '@esbuild/linux-loong64@0.24.0': + '@esbuild/linux-loong64@0.24.2': optional: true '@esbuild/linux-mips64el@0.17.19': @@ -12267,7 +12266,7 @@ snapshots: '@esbuild/linux-mips64el@0.21.5': optional: true - '@esbuild/linux-mips64el@0.24.0': + '@esbuild/linux-mips64el@0.24.2': optional: true '@esbuild/linux-ppc64@0.17.19': @@ -12282,7 +12281,7 @@ snapshots: '@esbuild/linux-ppc64@0.21.5': optional: true - '@esbuild/linux-ppc64@0.24.0': + '@esbuild/linux-ppc64@0.24.2': optional: true '@esbuild/linux-riscv64@0.17.19': @@ -12297,7 +12296,7 @@ snapshots: '@esbuild/linux-riscv64@0.21.5': optional: true - '@esbuild/linux-riscv64@0.24.0': + '@esbuild/linux-riscv64@0.24.2': optional: true '@esbuild/linux-s390x@0.17.19': @@ -12312,7 +12311,7 @@ snapshots: '@esbuild/linux-s390x@0.21.5': optional: true - '@esbuild/linux-s390x@0.24.0': + '@esbuild/linux-s390x@0.24.2': optional: true '@esbuild/linux-x64@0.17.19': @@ -12327,7 +12326,10 @@ snapshots: '@esbuild/linux-x64@0.21.5': optional: true - '@esbuild/linux-x64@0.24.0': + '@esbuild/linux-x64@0.24.2': + optional: true + + '@esbuild/netbsd-arm64@0.24.2': optional: true '@esbuild/netbsd-x64@0.17.19': @@ -12342,10 +12344,10 @@ snapshots: '@esbuild/netbsd-x64@0.21.5': optional: true - '@esbuild/netbsd-x64@0.24.0': + '@esbuild/netbsd-x64@0.24.2': optional: true - '@esbuild/openbsd-arm64@0.24.0': + '@esbuild/openbsd-arm64@0.24.2': optional: true '@esbuild/openbsd-x64@0.17.19': @@ -12360,7 +12362,7 @@ snapshots: '@esbuild/openbsd-x64@0.21.5': optional: true - '@esbuild/openbsd-x64@0.24.0': + '@esbuild/openbsd-x64@0.24.2': optional: true '@esbuild/sunos-x64@0.17.19': @@ -12375,7 +12377,7 @@ snapshots: '@esbuild/sunos-x64@0.21.5': optional: true - '@esbuild/sunos-x64@0.24.0': + '@esbuild/sunos-x64@0.24.2': optional: true '@esbuild/win32-arm64@0.17.19': @@ -12390,7 +12392,7 @@ snapshots: '@esbuild/win32-arm64@0.21.5': optional: true - '@esbuild/win32-arm64@0.24.0': + '@esbuild/win32-arm64@0.24.2': optional: true '@esbuild/win32-ia32@0.17.19': @@ -12405,7 +12407,7 @@ snapshots: '@esbuild/win32-ia32@0.21.5': optional: true - '@esbuild/win32-ia32@0.24.0': + '@esbuild/win32-ia32@0.24.2': optional: true '@esbuild/win32-x64@0.17.19': @@ -12420,7 +12422,7 @@ snapshots: '@esbuild/win32-x64@0.21.5': optional: true - '@esbuild/win32-x64@0.24.0': + '@esbuild/win32-x64@0.24.2': optional: true '@eslint-community/eslint-utils@4.4.0(eslint@8.57.0)': @@ -14161,7 +14163,7 @@ snapshots: '@vue/compiler-core': 3.3.4 '@vue/shared': 3.3.4 estree-walker: 2.0.2 - magic-string: 0.30.11 + magic-string: 0.30.14 '@vue/shared@3.3.4': {} @@ -14851,7 +14853,7 @@ snapshots: citty@0.1.6: dependencies: - consola: 3.2.3 + consola: 3.3.3 cjs-module-lexer@1.2.3: {} @@ -14997,7 +14999,7 @@ snapshots: ini: 1.3.8 proto-list: 1.2.4 - consola@3.2.3: {} + consola@3.3.3: {} console-control-strings@1.1.0: {} @@ -15109,7 +15111,7 @@ snapshots: css-declaration-sorter: 7.2.0(postcss@8.4.49) cssnano-utils: 5.0.0(postcss@8.4.49) postcss: 8.4.49 - postcss-calc: 10.0.2(postcss@8.4.49) + postcss-calc: 10.1.0(postcss@8.4.49) postcss-colormin: 7.0.2(postcss@8.4.49) postcss-convert-values: 7.0.4(postcss@8.4.49) postcss-discard-comments: 7.0.3(postcss@8.4.49) @@ -15720,32 +15722,33 @@ snapshots: '@esbuild/win32-ia32': 0.21.5 '@esbuild/win32-x64': 0.21.5 - esbuild@0.24.0: + esbuild@0.24.2: optionalDependencies: - '@esbuild/aix-ppc64': 0.24.0 - '@esbuild/android-arm': 0.24.0 - '@esbuild/android-arm64': 0.24.0 - '@esbuild/android-x64': 0.24.0 - '@esbuild/darwin-arm64': 0.24.0 - '@esbuild/darwin-x64': 0.24.0 - '@esbuild/freebsd-arm64': 0.24.0 - '@esbuild/freebsd-x64': 0.24.0 - '@esbuild/linux-arm': 0.24.0 - '@esbuild/linux-arm64': 0.24.0 - '@esbuild/linux-ia32': 0.24.0 - '@esbuild/linux-loong64': 0.24.0 - '@esbuild/linux-mips64el': 0.24.0 - '@esbuild/linux-ppc64': 0.24.0 - '@esbuild/linux-riscv64': 0.24.0 - '@esbuild/linux-s390x': 0.24.0 - '@esbuild/linux-x64': 0.24.0 - '@esbuild/netbsd-x64': 0.24.0 - '@esbuild/openbsd-arm64': 0.24.0 - '@esbuild/openbsd-x64': 0.24.0 - '@esbuild/sunos-x64': 0.24.0 - '@esbuild/win32-arm64': 0.24.0 - '@esbuild/win32-ia32': 0.24.0 - '@esbuild/win32-x64': 0.24.0 + '@esbuild/aix-ppc64': 0.24.2 + '@esbuild/android-arm': 0.24.2 + '@esbuild/android-arm64': 0.24.2 + '@esbuild/android-x64': 0.24.2 + '@esbuild/darwin-arm64': 0.24.2 + '@esbuild/darwin-x64': 0.24.2 + '@esbuild/freebsd-arm64': 0.24.2 + '@esbuild/freebsd-x64': 0.24.2 + '@esbuild/linux-arm': 0.24.2 + '@esbuild/linux-arm64': 0.24.2 + '@esbuild/linux-ia32': 0.24.2 + '@esbuild/linux-loong64': 0.24.2 + '@esbuild/linux-mips64el': 0.24.2 + '@esbuild/linux-ppc64': 0.24.2 + '@esbuild/linux-riscv64': 0.24.2 + '@esbuild/linux-s390x': 0.24.2 + '@esbuild/linux-x64': 0.24.2 + '@esbuild/netbsd-arm64': 0.24.2 + '@esbuild/netbsd-x64': 0.24.2 + '@esbuild/openbsd-arm64': 0.24.2 + '@esbuild/openbsd-x64': 0.24.2 + '@esbuild/sunos-x64': 0.24.2 + '@esbuild/win32-arm64': 0.24.2 + '@esbuild/win32-ia32': 0.24.2 + '@esbuild/win32-x64': 0.24.2 escalade@3.1.2: {} @@ -17050,9 +17053,9 @@ snapshots: dependencies: relative-time-format: 1.1.4 - jiti@1.21.6: {} + jiti@1.21.7: {} - jiti@2.4.1: {} + jiti@2.4.2: {} jju@1.4.0: {} @@ -17514,8 +17517,8 @@ snapshots: citty: 0.1.6 cssnano: 7.0.6(postcss@8.4.49) defu: 6.1.4 - esbuild: 0.24.0 - jiti: 1.21.6 + esbuild: 0.24.2 + jiti: 1.21.7 mlly: 1.7.3 pathe: 1.1.2 pkg-types: 1.2.1 @@ -18183,10 +18186,10 @@ snapshots: possible-typed-array-names@1.0.0: {} - postcss-calc@10.0.2(postcss@8.4.49): + postcss-calc@10.1.0(postcss@8.4.49): dependencies: postcss: 8.4.49 - postcss-selector-parser: 6.1.2 + postcss-selector-parser: 7.0.0 postcss-value-parser: 4.2.0 postcss-colormin@7.0.2(postcss@8.4.49): @@ -18331,6 +18334,11 @@ snapshots: cssesc: 3.0.0 util-deprecate: 1.0.2 + postcss-selector-parser@7.0.0: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + postcss-svgo@7.0.1(postcss@8.4.49): dependencies: postcss: 8.4.49 @@ -19734,12 +19742,12 @@ snapshots: '@rollup/pluginutils': 5.1.0(rollup@3.29.5) chalk: 5.3.0 citty: 0.1.6 - consola: 3.2.3 + consola: 3.3.3 defu: 6.1.4 esbuild: 0.19.12 globby: 13.2.2 hookable: 5.5.3 - jiti: 1.21.6 + jiti: 1.21.7 magic-string: 0.30.14 mkdist: 1.6.0(typescript@5.6.3)(vue-tsc@2.0.29(typescript@5.6.3)) mlly: 1.7.3 @@ -19817,7 +19825,7 @@ snapshots: '@babel/types': 7.26.3 citty: 0.1.6 defu: 6.1.4 - jiti: 2.4.1 + jiti: 2.4.2 knitwork: 1.2.0 scule: 1.3.0 transitivePeerDependencies: @@ -19930,6 +19938,23 @@ snapshots: - supports-color - terser + vite-node@2.1.8(@types/node@22.10.2): + dependencies: + cac: 6.7.14 + debug: 4.3.7(supports-color@9.2.2) + es-module-lexer: 1.5.4 + pathe: 1.1.2 + vite: 5.0.12(@types/node@22.10.2) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + vite-plugin-dts@4.0.1(@types/node@18.19.59)(rollup@4.9.6)(typescript@5.6.3)(vite@5.0.12(@types/node@18.19.59)): dependencies: '@microsoft/api-extractor': 7.47.4(@types/node@18.19.59) @@ -19970,6 +19995,15 @@ snapshots: '@types/node': 18.19.59 fsevents: 2.3.3 + vite@5.0.12(@types/node@22.10.2): + dependencies: + esbuild: 0.19.12 + postcss: 8.4.33 + rollup: 4.9.6 + optionalDependencies: + '@types/node': 22.10.2 + fsevents: 2.3.3 + vitest-websocket-mock@0.4.0(vitest@2.1.8): dependencies: '@vitest/utils': 2.1.3 @@ -20011,6 +20045,41 @@ snapshots: - supports-color - terser + vitest@2.1.8(@types/node@22.10.2)(@vitest/ui@2.1.8): + dependencies: + '@vitest/expect': 2.1.8 + '@vitest/mocker': 2.1.8(msw@2.4.3(typescript@5.6.3))(vite@5.0.12(@types/node@18.19.59)) + '@vitest/pretty-format': 2.1.8 + '@vitest/runner': 2.1.8 + '@vitest/snapshot': 2.1.8 + '@vitest/spy': 2.1.8 + '@vitest/utils': 2.1.8 + chai: 5.1.2 + debug: 4.3.7(supports-color@9.2.2) + expect-type: 1.1.0 + magic-string: 0.30.14 + pathe: 1.1.2 + std-env: 3.8.0 + tinybench: 2.9.0 + tinyexec: 0.3.1 + tinypool: 1.0.1 + tinyrainbow: 1.2.0 + vite: 5.0.12(@types/node@22.10.2) + vite-node: 2.1.8(@types/node@22.10.2) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 22.10.2 + '@vitest/ui': 2.1.8(vitest@2.1.8) + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - stylus + - sugarss + - supports-color + - terser + vsce@2.15.0: dependencies: azure-devops-node-api: 11.2.0 @@ -20177,25 +20246,17 @@ snapshots: '@cloudflare/workerd-linux-arm64': 1.20241106.1 '@cloudflare/workerd-windows-64': 1.20241106.1 - workerd@1.20241216.0: - optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20241216.0 - '@cloudflare/workerd-darwin-arm64': 1.20241216.0 - '@cloudflare/workerd-linux-64': 1.20241216.0 - '@cloudflare/workerd-linux-arm64': 1.20241216.0 - '@cloudflare/workerd-windows-64': 1.20241216.0 - - workerd@1.20241218.0: + workerd@1.20241230.0: optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20241218.0 - '@cloudflare/workerd-darwin-arm64': 1.20241218.0 - '@cloudflare/workerd-linux-64': 1.20241218.0 - '@cloudflare/workerd-linux-arm64': 1.20241218.0 - '@cloudflare/workerd-windows-64': 1.20241218.0 + '@cloudflare/workerd-darwin-64': 1.20241230.0 + '@cloudflare/workerd-darwin-arm64': 1.20241230.0 + '@cloudflare/workerd-linux-64': 1.20241230.0 + '@cloudflare/workerd-linux-arm64': 1.20241230.0 + '@cloudflare/workerd-windows-64': 1.20241230.0 workerpool@6.5.1: {} - wrangler@3.90.0(@cloudflare/workers-types@4.20241218.0): + wrangler@3.90.0(@cloudflare/workers-types@4.20241230.0): dependencies: '@cloudflare/kv-asset-handler': 0.3.4 '@cloudflare/workers-shared': 0.8.0 @@ -20217,7 +20278,7 @@ snapshots: workerd: 1.20241106.1 xxhash-wasm: 1.0.1 optionalDependencies: - '@cloudflare/workers-types': 4.20241218.0 + '@cloudflare/workers-types': 4.20241230.0 fsevents: 2.3.3 transitivePeerDependencies: - bufferutil diff --git a/tools/e2e/__tests__/common.test.ts b/tools/e2e/__tests__/common.test.ts index 264b12b9b8fa..e68ec6db5f0a 100644 --- a/tools/e2e/__tests__/common.test.ts +++ b/tools/e2e/__tests__/common.test.ts @@ -1,10 +1,14 @@ import { getGlobalDispatcher, MockAgent, setGlobalDispatcher } from "undici"; import { afterEach, beforeEach, describe, it } from "vitest"; import { + deleteDatabase, + deleteKVNamespace, deleteProject, deleteWorker, + listTmpDatabases, listTmpE2EProjects, listTmpE2EWorkers, + listTmpKVNamespaces, } from "../common"; const originalAccountID = process.env.CLOUDFLARE_ACCOUNT_ID; @@ -108,6 +112,160 @@ describe("deleteProject()", () => { }); }); +describe("listTmpKVNamespaces()", () => { + it("makes a REST request and returns a filtered list of kv namespaces", async ({ + expect, + }) => { + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces`, + method: "GET", + query: { + per_page: 100, + page: 1, + direction: "asc", + order: "title", + }, + }) + .reply( + 200, + JSON.stringify({ + result: [ + { id: "kv-tmp-e2e", title: "kv-1" }, + { id: "kv-2", title: "kv-2" }, + { id: "tmp_e2e", title: "kv-3" }, + { id: "kv-4", title: "kv-4" }, + { id: "kv-5", title: "kv-5" }, + { id: "kv-6", title: "kv-6" }, + { id: "tmp_e2e_kv", title: "kv-7" }, + { id: "kv-8", title: "kv-8" }, + { id: "kv-9", title: "kv-9" }, + { id: "kv-10", title: "kv-10" }, + ...Array(90).fill({ id: "kv-10", title: "kv-10" }), + ], + }) + ); + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces`, + method: "GET", + query: { + per_page: 100, + page: 2, + direction: "asc", + order: "title", + }, + }) + .reply( + 200, + JSON.stringify({ + result: [{ id: "kv-tmp-e2e-11", title: "kv-11" }], + }) + ); + + const result = await listTmpKVNamespaces(); + + expect(result.map((p) => p.id)).toMatchInlineSnapshot(`[]`); + }); +}); + +describe("deleteKVNamespace()", () => { + it("makes a REST request to delete the given project", async () => { + const MOCK_KV = "tmp_e2e_kv"; + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces/${MOCK_KV}`, + method: "DELETE", + }) + .reply(200, JSON.stringify({ result: [] })); + await deleteKVNamespace(MOCK_KV); + }); +}); + +describe("listTmpDatabases()", () => { + it("makes a REST request and returns a filtered list of d1 databases", async ({ + expect, + }) => { + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/d1/database`, + method: "GET", + query: { + per_page: 100, + page: 1, + }, + }) + .reply( + 200, + JSON.stringify({ + result: [ + { uuid: "1", name: "db-1", created_at: nowStr }, + { uuid: "2", name: "db-2", created_at: oldTimeStr }, + { uuid: "3", name: "tmp-e2e-db-1", created_at: nowStr }, + { uuid: "4", name: "tmp-e2e-db-2", created_at: oldTimeStr }, + { uuid: "5", name: "db-3", created_at: nowStr }, + { uuid: "6", name: "db-4", created_at: oldTimeStr }, + { uuid: "7", name: "tmp-e2e-db-3", created_at: nowStr }, + { uuid: "8", name: "tmp-e2e-db-4", created_at: oldTimeStr }, + { uuid: "9", name: "db-5", created_at: nowStr }, + { uuid: "10", name: "db-6", created_at: oldTimeStr }, + ...Array(90).fill({ + uuid: "10", + name: "db-6", + created_at: oldTimeStr, + }), + ], + }) + ); + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/d1/database`, + method: "GET", + query: { + per_page: 100, + page: 2, + }, + }) + .reply( + 200, + JSON.stringify({ + result: [ + { uuid: "11", name: "db-11", created_at: nowStr }, + { uuid: "12", name: "db-12", created_at: oldTimeStr }, + ], + }) + ); + + const result = await listTmpDatabases(); + + expect(result.map((p) => p.name)).toMatchInlineSnapshot(` + [ + "tmp-e2e-db-2", + "tmp-e2e-db-4", + ] + `); + }); +}); + +describe("deleteDatabase()", () => { + it("makes a REST request to delete the given project", async () => { + const MOCK_DB = "tmp-e2e-db"; + agent + .get("https://api.cloudflare.com") + .intercept({ + path: `/client/v4/accounts/${MOCK_CLOUDFLARE_ACCOUNT_ID}/d1/database/${MOCK_DB}`, + method: "DELETE", + }) + .reply(200, JSON.stringify({ result: [] })); + await deleteDatabase(MOCK_DB); + }); +}); + describe("listTmpE2EWorkers()", () => { it("makes a REST request and returns a filtered list of workers", async ({ expect, diff --git a/tools/e2e/common.ts b/tools/e2e/common.ts index 4c6288d6adce..dfd8e60310fd 100644 --- a/tools/e2e/common.ts +++ b/tools/e2e/common.ts @@ -19,6 +19,17 @@ export type Worker = { created_on: string; }; +export interface KVNamespaceInfo { + id: string; + title: string; +} + +export type Database = { + created_at: string; + uuid: string; + name: string; +}; + class ApiError extends Error { constructor( readonly url: string, @@ -38,29 +49,45 @@ class FatalError extends Error { const apiFetch = async ( path: string, init = { method: "GET" }, + failSilently = false, queryParams = {} ) => { - const baseUrl = `https://api.cloudflare.com/client/v4/accounts/${process.env.CLOUDFLARE_ACCOUNT_ID}`; - let queryString = new URLSearchParams(queryParams).toString(); - if (queryString) { - queryString = "?" + queryString; - } - const url = `${baseUrl}${path}${queryString}`; + try { + const baseUrl = `https://api.cloudflare.com/client/v4/accounts/${process.env.CLOUDFLARE_ACCOUNT_ID}`; + let queryString = new URLSearchParams(queryParams).toString(); + if (queryString) { + queryString = "?" + queryString; + } + const url = `${baseUrl}${path}${queryString}`; - const response = await fetch(url, { - ...init, - headers: { - Authorization: `Bearer ${process.env.CLOUDFLARE_API_TOKEN}`, - }, - }); + const response = await fetch(url, { + ...init, + headers: { + Authorization: `Bearer ${process.env.CLOUDFLARE_API_TOKEN}`, + }, + }); - if (response.status >= 400) { - throw { url, init, response }; - } + if (response.status >= 400) { + throw { url, init, response }; + } - const json = (await response.json()) as ApiSuccessBody; + const json = (await response.json()) as ApiSuccessBody; - return json.result; + return json.result; + } catch (e) { + if (failSilently) { + return; + } + if (e instanceof ApiError) { + console.error(e.url, e.init); + console.error(`(${e.response.status}) ${e.response.statusText}`); + const body = (await e.response.json()) as ApiErrorBody; + console.error(body.errors); + } else { + console.error(e); + } + throw new FatalError(1); + } }; export const listTmpE2EProjects = async () => { @@ -69,31 +96,14 @@ export const listTmpE2EProjects = async () => { const projects: Project[] = []; while (projects.length % pageSize === 0) { - try { - const res = (await apiFetch( - `/pages/projects`, - { method: "GET" }, - { - per_page: pageSize, - page, - } - )) as Project[]; - projects.push(...res); - page++; - if (res.length < pageSize) { - break; - } - } catch (e) { - if (e instanceof ApiError) { - console.error("Failed to fetch project list"); - console.error(e.url, e.init); - console.error(`(${e.response.status}) ${e.response.statusText}`); - const body = (await e.response.json()) as ApiErrorBody; - console.error(body.errors); - } else { - console.error(e); - } - throw new FatalError(1); + const res = (await apiFetch(`/pages/projects`, { method: "GET" }, false, { + per_page: pageSize, + page, + })) as Project[]; + projects.push(...res); + page++; + if (res.length < pageSize) { + break; } } @@ -106,44 +116,108 @@ export const listTmpE2EProjects = async () => { }; export const deleteProject = async (project: string) => { - try { - await apiFetch(`/pages/projects/${project}`, { + await apiFetch( + `/pages/projects/${project}`, + { method: "DELETE", - }); - } catch { - // Ignore errors - } + }, + true + ); }; export const listTmpE2EWorkers = async () => { - try { - const res = (await apiFetch(`/workers/scripts`, { - method: "GET", - })) as Worker[]; - return res.filter( - (p) => - p.id.startsWith("tmp-e2e-") && - // Workers are more than an hour old - Date.now() - new Date(p.created_on).valueOf() > 1000 * 60 * 60 - ); - } catch (e) { - if (e instanceof ApiError) { - console.error("Failed to fetch workers list"); - console.error(e.url, e.init); - console.error(`(${e.response.status}) ${e.response.statusText}`); - const body = (await e.response.json()) as ApiErrorBody; - console.error(body.errors); - } else { - console.error(e); + const res = (await apiFetch(`/workers/scripts`, { + method: "GET", + })) as Worker[]; + return res.filter( + (p) => + p.id.startsWith("tmp-e2e-") && + // Workers are more than an hour old + Date.now() - new Date(p.created_on).valueOf() > 1000 * 60 * 60 + ); +}; + +export const deleteWorker = async (id: string) => { + await apiFetch( + `/workers/scripts/${id}`, + { + method: "DELETE", + }, + true + ); +}; + +export const listTmpKVNamespaces = async () => { + const pageSize = 100; + let page = 1; + const results: KVNamespaceInfo[] = []; + while (results.length % pageSize === 0) { + const res = (await apiFetch( + `/storage/kv/namespaces`, + { method: "GET" }, + false, + new URLSearchParams({ + per_page: pageSize.toString(), + order: "title", + direction: "asc", + page: page.toString(), + }) + )) as KVNamespaceInfo[]; + page++; + results.push(...res); + if (res.length < pageSize || page > 5) { + break; } - throw new FatalError(1); } + return results.filter( + (kv) => kv.title.includes("tmp-e2e") || kv.title.includes("tmp_e2e") + ); }; -export const deleteWorker = async (id: string) => { - try { - await apiFetch(`/workers/scripts/${id}`, { +export const deleteKVNamespace = async (id: string) => { + await apiFetch( + `/storage/kv/namespaces/${id}`, + { method: "DELETE", - }); - } catch {} + }, + true + ); +}; + +export const listTmpDatabases = async () => { + const pageSize = 100; + let page = 1; + const results: Database[] = []; + while (results.length % pageSize === 0) { + const res = (await apiFetch( + `/d1/database`, + { method: "GET" }, + false, + new URLSearchParams({ + per_page: pageSize.toString(), + page: page.toString(), + }) + )) as Database[]; + page++; + results.push(...res); + + if (res.length < pageSize || page > 5) { + break; + } + } + return results.filter( + (db) => + db.name.includes("tmp-e2e") && // Databases are more than an hour old + Date.now() - new Date(db.created_at).valueOf() > 1000 * 60 * 60 + ); +}; + +export const deleteDatabase = async (id: string) => { + await apiFetch( + `/d1/database/${id}`, + { + method: "DELETE", + }, + true + ); }; diff --git a/tools/e2e/e2eCleanup.ts b/tools/e2e/e2eCleanup.ts index cc0e9d381b53..c6df77c88a7c 100644 --- a/tools/e2e/e2eCleanup.ts +++ b/tools/e2e/e2eCleanup.ts @@ -1,8 +1,12 @@ import { + deleteDatabase, + deleteKVNamespace, deleteProject, deleteWorker, + listTmpDatabases, listTmpE2EProjects, listTmpE2EWorkers, + listTmpKVNamespaces, } from "./common"; if (!process.env.CLOUDFLARE_API_TOKEN) { @@ -22,6 +26,22 @@ run().catch((e) => { }); async function run() { + // KV namespaces don't have a creation timestamp, but deletion will fail if there is a worker bound to it + // so delete these first to avoid interrupting running e2e jobs (unless you are very very unlucky) + const kvNamespacesToDelete = await listTmpKVNamespaces(); + for (const kvNamespace of kvNamespacesToDelete) { + console.log("Deleting KV namespace: " + kvNamespace.title); + await deleteKVNamespace(kvNamespace.id); + } + + if (kvNamespacesToDelete.length === 0) { + console.log(`No KV namespaces to delete.`); + } else { + console.log( + `Successfully deleted ${kvNamespacesToDelete.length} KV namespaces` + ); + } + const projectsToDelete = await listTmpE2EProjects(); for (const project of projectsToDelete) { @@ -47,4 +67,17 @@ async function run() { } else { console.log(`Successfully deleted ${workersToDelete.length} workers`); } + + const d1DatabasesToDelete = await listTmpDatabases(); + for (const db of d1DatabasesToDelete) { + console.log("Deleting D1 database: " + db.name); + await deleteDatabase(db.uuid); + } + if (d1DatabasesToDelete.length === 0) { + console.log(`No D1 databases to delete.`); + } else { + console.log( + `Successfully deleted ${d1DatabasesToDelete.length} D1 databases` + ); + } }