From 6508ea21c396beff8a2de987eaf335bb46bdd89c Mon Sep 17 00:00:00 2001 From: Andy Jessop Date: Tue, 19 Nov 2024 11:36:35 +0100 Subject: [PATCH 1/9] fix: ensure Vitest Pool Workers doesn't error with nodejs_compat_v2 flag (#7278) * fix: ensure Vitest Pool Workers doesn't error with nodejs_compat_v2 flag * chore: remove spurious files * chore: add changeset * chore: consolidate two methods into one * chore: better naming * chore: remove unnecessary comment --- .changeset/hot-dolphins-obey.md | 5 + .../src/pool/compatibility-flag-assertions.ts | 164 +++++++++++ .../vitest-pool-workers/src/pool/index.ts | 101 ++----- .../compatibility-flag-assertions.test.ts | 264 ++++++++++++++++++ .../test/validation.test.ts | 83 ------ 5 files changed, 459 insertions(+), 158 deletions(-) create mode 100644 .changeset/hot-dolphins-obey.md create mode 100644 packages/vitest-pool-workers/src/pool/compatibility-flag-assertions.ts create mode 100644 packages/vitest-pool-workers/test/compatibility-flag-assertions.test.ts diff --git a/.changeset/hot-dolphins-obey.md b/.changeset/hot-dolphins-obey.md new file mode 100644 index 000000000000..7bb9fed4c6e0 --- /dev/null +++ b/.changeset/hot-dolphins-obey.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/vitest-pool-workers": patch +--- + +fix: ensures Vitest Pool Workers doesn't error when using nodejs_compat_v2 flag diff --git a/packages/vitest-pool-workers/src/pool/compatibility-flag-assertions.ts b/packages/vitest-pool-workers/src/pool/compatibility-flag-assertions.ts new file mode 100644 index 000000000000..287ec3f53d67 --- /dev/null +++ b/packages/vitest-pool-workers/src/pool/compatibility-flag-assertions.ts @@ -0,0 +1,164 @@ +/** + * The `CompatibilityFlagAssertions` class provides methods to validate compatibility flags and dates + * within a project's configuration. It ensures that specific flags are either present + * or absent and that compatibility dates meet the required criteria. + */ +export class CompatibilityFlagAssertions { + #compatibilityDate?: string; + #compatibilityFlags: string[]; + #optionsPath: string; + #relativeProjectPath: string; + #relativeWranglerConfigPath?: string; + + constructor(options: CommonOptions) { + this.#compatibilityDate = options.compatibilityDate; + this.#compatibilityFlags = options.compatibilityFlags; + this.#optionsPath = options.optionsPath; + this.#relativeProjectPath = options.relativeProjectPath; + this.#relativeWranglerConfigPath = options.relativeWranglerConfigPath; + } + + /** + * Checks if a specific flag is present in the compatibilityFlags array. + */ + #flagExists(flag: string): boolean { + return this.#compatibilityFlags.includes(flag); + } + + /** + * Constructs the base of the error message. + * + * @example + * In project /path/to/project + * + * @example + * In project /path/to/project's configuration file wrangler.toml + */ + #buildErrorMessageBase(): string { + let message = `In project ${this.#relativeProjectPath}`; + if (this.#relativeWranglerConfigPath) { + message += `'s configuration file ${this.#relativeWranglerConfigPath}`; + } + return message; + } + + /** + * Constructs the configuration path part of the error message. + */ + #buildConfigPath(setting: string): string { + if (this.#relativeWranglerConfigPath) { + return `\`${setting}\``; + } + + const camelCaseSetting = setting.replace(/_(\w)/g, (_, letter) => + letter.toUpperCase() + ); + + return `\`${this.#optionsPath}.${camelCaseSetting}\``; + } + + /** + * Ensures that a specific enable flag is present or that the compatibility date meets the required date. + */ + assertIsEnabled({ + enableFlag, + disableFlag, + defaultOnDate, + }: { + enableFlag: string; + disableFlag: string; + defaultOnDate?: string; + }): AssertionResult { + // If it's disabled by this flag, we can return early. + if (this.#flagExists(disableFlag)) { + const errorMessage = `${this.#buildErrorMessageBase()}, ${this.#buildConfigPath( + "compatibility_flags" + )} must not contain "${disableFlag}".\nThis flag is incompatible with \`@cloudflare/vitest-pool-workers\`.`; + return { isValid: false, errorMessage }; + } + + const enableFlagPresent = this.#flagExists(enableFlag); + const dateSufficient = isDateSufficient( + this.#compatibilityDate, + defaultOnDate + ); + + if (!enableFlagPresent && !dateSufficient) { + let errorMessage = `${this.#buildErrorMessageBase()}, ${this.#buildConfigPath( + "compatibility_flags" + )} must contain "${enableFlag}"`; + + if (defaultOnDate) { + errorMessage += `, or ${this.#buildConfigPath( + "compatibility_date" + )} must be >= "${defaultOnDate}".`; + } + + errorMessage += `\nThis flag is required to use \`@cloudflare/vitest-pool-workers\`.`; + + return { isValid: false, errorMessage }; + } + + return { isValid: true }; + } + + /** + * Ensures that a any one of a given set of flags is present in the compatibility_flags array. + */ + assertAtLeastOneFlagExists(flags: string[]): AssertionResult { + if (flags.length === 0 || flags.some((flag) => this.#flagExists(flag))) { + return { isValid: true }; + } + + const errorMessage = `${this.#buildErrorMessageBase()}, ${this.#buildConfigPath( + "compatibility_flags" + )} must contain one of ${flags.map((flag) => `"${flag}"`).join("/")}.\nEither one of these flags is required to use \`@cloudflare/vitest-pool-workers\`.`; + + return { isValid: false, errorMessage }; + } +} + +/** + * Common options used across all assertion methods. + */ +interface CommonOptions { + compatibilityDate?: string; + compatibilityFlags: string[]; + optionsPath: string; + relativeProjectPath: string; + relativeWranglerConfigPath?: string; +} + +/** + * Result of an assertion method. + */ +interface AssertionResult { + isValid: boolean; + errorMessage?: string; +} + +/** + * Parses a date string into a Date object. + */ +function parseDate(dateStr: string): Date { + const date = new Date(dateStr); + if (isNaN(date.getTime())) { + throw new Error(`Invalid date format: "${dateStr}"`); + } + return date; +} + +/** + * Checks if the compatibility date meets or exceeds the required date. + */ +function isDateSufficient( + compatibilityDate?: string, + defaultOnDate?: string +): boolean { + if (!compatibilityDate || !defaultOnDate) { + return false; + } + const compDate = parseDate(compatibilityDate); + const reqDate = parseDate(defaultOnDate); + return compDate >= reqDate; +} diff --git a/packages/vitest-pool-workers/src/pool/index.ts b/packages/vitest-pool-workers/src/pool/index.ts index 4da971af9b34..19ec83497c36 100644 --- a/packages/vitest-pool-workers/src/pool/index.ts +++ b/packages/vitest-pool-workers/src/pool/index.ts @@ -23,6 +23,7 @@ import { import semverSatisfies from "semver/functions/satisfies.js"; import { createMethodsRPC } from "vitest/node"; import { createChunkingSocket } from "../shared/chunking-socket"; +import { CompatibilityFlagAssertions } from "./compatibility-flag-assertions"; import { OPTIONS_PATH, parseProjectOptions } from "./config"; import { getProjectPath, @@ -319,68 +320,6 @@ const SELF_SERVICE_BINDING = "__VITEST_POOL_WORKERS_SELF_SERVICE"; const LOOPBACK_SERVICE_BINDING = "__VITEST_POOL_WORKERS_LOOPBACK_SERVICE"; const RUNNER_OBJECT_BINDING = "__VITEST_POOL_WORKERS_RUNNER_OBJECT"; -const numericCompare = new Intl.Collator("en", { numeric: true }).compare; - -interface CompatibilityFlagCheckOptions { - // Context to check against - compatibilityFlags: string[]; - compatibilityDate?: string; - relativeProjectPath: string | number; - relativeWranglerConfigPath?: string; - - // Details on flag to check - enableFlag: string; - disableFlag?: string; - defaultOnDate?: string; -} -function assertCompatibilityFlagEnabled(opts: CompatibilityFlagCheckOptions) { - const hasWranglerConfig = opts.relativeWranglerConfigPath !== undefined; - - // Check disable flag (if any) not enabled - if ( - opts.disableFlag !== undefined && - opts.compatibilityFlags.includes(opts.disableFlag) - ) { - let message = `In project ${opts.relativeProjectPath}`; - if (hasWranglerConfig) { - message += `'s configuration file ${opts.relativeWranglerConfigPath}, \`compatibility_flags\` must not contain "${opts.disableFlag}".\nSimilarly`; - // Since the config is merged by this point, we don't know where the - // disable flag came from. So we include both possible locations in the - // error message. Note the enable-flag case doesn't have this problem, as - // we're asking the user to add something to *either* of their configs. - } - message += - `, \`${OPTIONS_PATH}.miniflare.compatibilityFlags\` must not contain "${opts.disableFlag}".\n` + - "This flag is incompatible with `@cloudflare/vitest-pool-workers`."; - throw new Error(message); - } - - // Check flag enabled or compatibility date enables flag by default - const enabledByFlag = opts.compatibilityFlags.includes(opts.enableFlag); - const enabledByDate = - opts.compatibilityDate !== undefined && - opts.defaultOnDate !== undefined && - numericCompare(opts.compatibilityDate, opts.defaultOnDate) >= 0; - if (!(enabledByFlag || enabledByDate)) { - let message = `In project ${opts.relativeProjectPath}`; - if (hasWranglerConfig) { - message += `'s configuration file ${opts.relativeWranglerConfigPath}, \`compatibility_flags\` must contain "${opts.enableFlag}"`; - } else { - message += `, \`${OPTIONS_PATH}.miniflare.compatibilityFlags\` must contain "${opts.enableFlag}"`; - } - if (opts.defaultOnDate !== undefined) { - if (hasWranglerConfig) { - message += `, or \`compatibility_date\` must be >= "${opts.defaultOnDate}"`; - } else { - message += `, or \`${OPTIONS_PATH}.miniflare.compatibilityDate\` must be >= "${opts.defaultOnDate}"`; - } - } - message += - ".\nThis flag is required to use `@cloudflare/vitest-pool-workers`."; - throw new Error(message); - } -} - function buildProjectWorkerOptions( project: Omit ): ProjectWorkers { @@ -400,24 +339,36 @@ function buildProjectWorkerOptions( // of the libraries it depends on expect `require()` to return // `module.exports` directly, rather than `{ default: module.exports }`. runnerWorker.compatibilityFlags ??= []; - assertCompatibilityFlagEnabled({ - compatibilityFlags: runnerWorker.compatibilityFlags, + + const flagAssertions = new CompatibilityFlagAssertions({ compatibilityDate: runnerWorker.compatibilityDate, - relativeProjectPath: project.relativePath, - relativeWranglerConfigPath, - // https://developers.cloudflare.com/workers/configuration/compatibility-dates/#commonjs-modules-do-not-export-a-module-namespace - enableFlag: "export_commonjs_default", - disableFlag: "export_commonjs_namespace", - defaultOnDate: "2022-10-31", - }); - assertCompatibilityFlagEnabled({ compatibilityFlags: runnerWorker.compatibilityFlags, - compatibilityDate: runnerWorker.compatibilityDate, - relativeProjectPath: project.relativePath, + optionsPath: `${OPTIONS_PATH}.miniflare`, + relativeProjectPath: project.relativePath.toString(), relativeWranglerConfigPath, - enableFlag: "nodejs_compat", }); + const assertions = [ + () => + flagAssertions.assertIsEnabled({ + enableFlag: "export_commonjs_default", + disableFlag: "export_commonjs_namespace", + defaultOnDate: "2022-10-31", + }), + () => + flagAssertions.assertAtLeastOneFlagExists([ + "nodejs_compat", + "nodejs_compat_v2", + ]), + ]; + + for (const assertion of assertions) { + const result = assertion(); + if (!result.isValid) { + throw new Error(result.errorMessage); + } + } + // Required for `workerd:unsafe` module. We don't require this flag to be set // as it's experimental, so couldn't be deployed by users. if (!runnerWorker.compatibilityFlags.includes("unsafe_module")) { diff --git a/packages/vitest-pool-workers/test/compatibility-flag-assertions.test.ts b/packages/vitest-pool-workers/test/compatibility-flag-assertions.test.ts new file mode 100644 index 000000000000..35c935791361 --- /dev/null +++ b/packages/vitest-pool-workers/test/compatibility-flag-assertions.test.ts @@ -0,0 +1,264 @@ +import { describe, expect, it } from "vitest"; +import { CompatibilityFlagAssertions } from "../src/pool/compatibility-flag-assertions"; + +describe("FlagAssertions", () => { + const baseOptions = { + optionsPath: "options", + relativeProjectPath: "/path/to/project", + }; + describe("assertDisableFlagNotPresent", () => { + it("returns error message when the flag is present", () => { + const options = { + ...baseOptions, + compatibilityFlags: ["disable-flag", "another-flag"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + }); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must not contain "disable-flag".\nThis flag is incompatible with `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("includes relativeWranglerConfigPath in error message when provided", () => { + const options = { + ...baseOptions, + compatibilityFlags: ["disable-flag"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + }); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must not contain "disable-flag".\nThis flag is incompatible with `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("correctly formats error message when relative Wrangler configPath is present", () => { + const options = { + ...baseOptions, + compatibilityFlags: ["disable-flag"], + relativeWranglerConfigPath: "wrangler.toml", + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + }); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project\'s configuration file wrangler.toml, `compatibility_flags` must not contain "disable-flag".\nThis flag is incompatible with `@cloudflare/vitest-pool-workers`.' + ); + }); + }); + + describe("assertEnableFlagOrCompatibilityDate", () => { + it("returns true when the flag is present", () => { + const options = { + ...baseOptions, + compatibilityDate: "2022-12-31", + compatibilityFlags: ["enable-flag"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + defaultOnDate: "2023-01-01", + disableFlag: "disable-flag", + enableFlag: "enable-flag", + }); + expect(result.isValid).toBe(true); + }); + + it("returns true when compatibility date is sufficient", () => { + const options = { + ...baseOptions, + compatibilityDate: "2023-01-02", + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + defaultOnDate: "2023-01-01", + }); + expect(result.isValid).toBe(true); + }); + + it("returns error message when neither flag is present nor date is sufficient", () => { + const options = { + ...baseOptions, + compatibilityDate: "2022-12-31", + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + defaultOnDate: "2023-01-01", + }); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must contain "enable-flag", or `options.compatibilityDate` must be >= "2023-01-01".\nThis flag is required to use `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("returns error message when compatibilityDate is undefined", () => { + const options = { + ...baseOptions, + compatibilityDate: undefined, + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + defaultOnDate: "2023-01-01", + }); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must contain "enable-flag", or `options.compatibilityDate` must be >= "2023-01-01".\nThis flag is required to use `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("throws error when defaultOnDate is invalid", () => { + const options = { + ...baseOptions, + compatibilityDate: "2023-01-02", + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + expect(() => { + flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + defaultOnDate: "invalid-date", + }); + }).toThrowError('Invalid date format: "invalid-date"'); + }); + + it("throws error when compatibilityDate is invalid", () => { + const options = { + ...baseOptions, + compatibilityDate: "invalid-date", + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + expect(() => { + flagAssertions.assertIsEnabled({ + disableFlag: "disable-flag", + enableFlag: "enable-flag", + defaultOnDate: "2023-01-01", + }); + }).toThrowError('Invalid date format: "invalid-date"'); + }); + }); + + describe("assertAtLeastOneFlagExists", () => { + it("returns true when at least one of the flags is present", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: ["flag1", "flag2"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists(["flag1"]); + expect(result.isValid).toBe(true); + }); + + it("returns true when multiple flags are present", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: ["flag1", "flag2", "flag3"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([ + "flag2", + "flag3", + ]); + expect(result.isValid).toBe(true); + }); + + it("returns false when none of the flags are present", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: ["flag1"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([ + "flag2", + "flag3", + ]); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must contain one of "flag2"/"flag3".\nEither one of these flags is required to use `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("includes relativeWranglerConfigPath in error message when provided", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: [], + relativeWranglerConfigPath: "wrangler.toml", + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([ + "flag2", + "flag3", + ]); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project\'s configuration file wrangler.toml, `compatibility_flags` must contain one of "flag2"/"flag3".\nEither one of these flags is required to use `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("returns true when all flags are present", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: ["flag1", "flag2", "flag3"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([ + "flag1", + "flag2", + "flag3", + ]); + expect(result.isValid).toBe(true); + }); + + it("returns false when compatibilityFlags is empty", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: [], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([ + "flag1", + "flag2", + ]); + expect(result.isValid).toBe(false); + expect(result.errorMessage).toBe( + 'In project /path/to/project, `options.compatibilityFlags` must contain one of "flag1"/"flag2".\nEither one of these flags is required to use `@cloudflare/vitest-pool-workers`.' + ); + }); + + it("returns true when flags array is empty", () => { + const options = { + ...baseOptions, + compatibilityDate: "2020-01-01", + compatibilityFlags: ["flag1"], + }; + const flagAssertions = new CompatibilityFlagAssertions(options); + const result = flagAssertions.assertAtLeastOneFlagExists([]); + expect(result.isValid).toBe(true); + }); + }); +}); diff --git a/packages/vitest-pool-workers/test/validation.test.ts b/packages/vitest-pool-workers/test/validation.test.ts index 78357358c23a..fb44bc787aa3 100644 --- a/packages/vitest-pool-workers/test/validation.test.ts +++ b/packages/vitest-pool-workers/test/validation.test.ts @@ -89,89 +89,6 @@ test( { timeout: 45_000 } ); -test( - "requires specific compatibility flags", - async ({ expect, seed, vitestRun, tmpPath }) => { - const tmpPathName = path.basename(tmpPath); - - // Check messages without Wrangler configuration path defined - await seed({ - "vitest.config.mts": dedent` - import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"; - export default defineWorkersConfig({}); - `, - "index.test.ts": "", - }); - let result = await vitestRun(); - expect(await result.exitCode).toBe(1); - let expected = dedent` - Error: In project ${path.join(tmpPathName, "vitest.config.mts")}, \`test.poolOptions.workers.miniflare.compatibilityFlags\` must contain "export_commonjs_default", or \`test.poolOptions.workers.miniflare.compatibilityDate\` must be >= "2022-10-31". - This flag is required to use \`@cloudflare/vitest-pool-workers\`. - `.replaceAll("\t", " "); - expect(result.stderr).toMatch(expected); - - await seed({ - "vitest.config.mts": dedent` - import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"; - export default defineWorkersConfig({ - test: { - poolOptions: { - workers: { - miniflare: { compatibilityDate: "2024-01-01" } - }, - }, - } - }); - `, - }); - result = await vitestRun(); - expect(await result.exitCode).toBe(1); - expected = dedent` - Error: In project ${path.join(tmpPathName, "vitest.config.mts")}, \`test.poolOptions.workers.miniflare.compatibilityFlags\` must contain "nodejs_compat". - This flag is required to use \`@cloudflare/vitest-pool-workers\`. - `.replaceAll("\t", " "); - expect(result.stderr).toMatch(expected); - - // Check messages with Wrangler configuration path defined - await seed({ - "vitest.config.mts": dedent` - import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"; - export default defineWorkersConfig({ - test: { - poolOptions: { - workers: { - wrangler: { configPath: "./wrangler.toml" } - }, - }, - } - }); - `, - "wrangler.toml": "", - }); - result = await vitestRun(); - expect(await result.exitCode).toBe(1); - expected = dedent` - Error: In project ${path.join(tmpPathName, "vitest.config.mts")}'s configuration file ${path.join(tmpPathName, "wrangler.toml")}, \`compatibility_flags\` must contain "export_commonjs_default", or \`compatibility_date\` must be >= "2022-10-31". - This flag is required to use \`@cloudflare/vitest-pool-workers\`. - `.replaceAll("\t", " "); - expect(result.stderr).toMatch(expected); - - await seed({ - "wrangler.toml": dedent` - compatibility_date = "2024-01-01" - `, - }); - result = await vitestRun(); - expect(await result.exitCode).toBe(1); - expected = dedent` - Error: In project ${path.join(tmpPathName, "vitest.config.mts")}'s configuration file ${path.join(tmpPathName, "wrangler.toml")}, \`compatibility_flags\` must contain "nodejs_compat". - This flag is required to use \`@cloudflare/vitest-pool-workers\`. - `.replaceAll("\t", " "); - expect(result.stderr).toMatch(expected); - }, - { timeout: 45_000 } -); - test( "requires modules entrypoint to use SELF", async ({ expect, seed, vitestRun, tmpPath }) => { From edec41591dcf37262d459568c0f454820b90dbaa Mon Sep 17 00:00:00 2001 From: Phillip Jones Date: Tue, 19 Nov 2024 05:52:57 -0800 Subject: [PATCH 2/9] Added r2 bucket lifecycle command (list, add, remove, set) (#7207) * Added r2 bucket lifecycle command (list, add, remove, set) * Address PR comments: small copy changes, default option for multiselect --- .changeset/hip-cycles-explain.md | 5 + packages/wrangler/src/__tests__/r2.test.ts | 239 +++++++++++- packages/wrangler/src/dialogs.ts | 44 +++ packages/wrangler/src/r2/helpers.ts | 159 ++++++++ packages/wrangler/src/r2/index.ts | 34 +- packages/wrangler/src/r2/lifecycle.ts | 432 +++++++++++++++++++++ 6 files changed, 911 insertions(+), 2 deletions(-) create mode 100644 .changeset/hip-cycles-explain.md create mode 100644 packages/wrangler/src/r2/lifecycle.ts diff --git a/.changeset/hip-cycles-explain.md b/.changeset/hip-cycles-explain.md new file mode 100644 index 000000000000..d5463f4e21ed --- /dev/null +++ b/.changeset/hip-cycles-explain.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +Added r2 bucket lifecycle command to Wrangler including list, add, remove, set diff --git a/packages/wrangler/src/__tests__/r2.test.ts b/packages/wrangler/src/__tests__/r2.test.ts index 8cd3d01e7893..738e05ca0710 100644 --- a/packages/wrangler/src/__tests__/r2.test.ts +++ b/packages/wrangler/src/__tests__/r2.test.ts @@ -1,4 +1,5 @@ import * as fs from "node:fs"; +import { writeFileSync } from "node:fs"; import { http, HttpResponse } from "msw"; import { MAX_UPLOAD_SIZE } from "../r2/constants"; import { actionsForEventCategories } from "../r2/helpers"; @@ -100,6 +101,7 @@ describe("r2", () => { wrangler r2 bucket notification Manage event notification rules for an R2 bucket wrangler r2 bucket domain Manage custom domains for an R2 bucket wrangler r2 bucket dev-url Manage public access via the r2.dev URL for an R2 bucket + wrangler r2 bucket lifecycle Manage lifecycle rules for an R2 bucket GLOBAL FLAGS -j, --experimental-json-config Experimental: support wrangler.json [boolean] @@ -137,6 +139,7 @@ describe("r2", () => { wrangler r2 bucket notification Manage event notification rules for an R2 bucket wrangler r2 bucket domain Manage custom domains for an R2 bucket wrangler r2 bucket dev-url Manage public access via the r2.dev URL for an R2 bucket + wrangler r2 bucket lifecycle Manage lifecycle rules for an R2 bucket GLOBAL FLAGS -j, --experimental-json-config Experimental: support wrangler.json [boolean] @@ -1000,7 +1003,7 @@ binding = \\"testBucket\\"" " wrangler r2 bucket notification list - List event notification rules for a bucket + List event notification rules for an R2 bucket POSITIONALS bucket The name of the R2 bucket to get event notification rules for [string] [required] @@ -1869,6 +1872,240 @@ binding = \\"testBucket\\"" }); }); }); + describe("lifecycle", () => { + const { setIsTTY } = useMockIsTTY(); + mockAccountId(); + mockApiToken(); + describe("list", () => { + it("should list lifecycle rules when they exist", async () => { + const bucketName = "my-bucket"; + const lifecycleRules = [ + { + id: "rule-1", + enabled: true, + conditions: { prefix: "images/" }, + deleteObjectsTransition: { + condition: { + type: "Age", + maxAge: 2592000, + }, + }, + }, + ]; + msw.use( + http.get( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketParam).toEqual(bucketName); + return HttpResponse.json( + createFetchResult({ + rules: lifecycleRules, + }) + ); + }, + { once: true } + ) + ); + await runWrangler(`r2 bucket lifecycle list ${bucketName}`); + expect(std.out).toMatchInlineSnapshot(` + "Listing lifecycle rules for bucket 'my-bucket'... + id: rule-1 + enabled: Yes + prefix: images/ + action: Expire objects after 30 days" + `); + }); + }); + describe("add", () => { + it("it should add a lifecycle rule using command-line arguments", async () => { + const bucketName = "my-bucket"; + const ruleId = "my-rule"; + const prefix = "images/"; + const conditionType = "Age"; + const conditionValue = "30"; + + msw.use( + http.get( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketParam).toEqual(bucketName); + return HttpResponse.json( + createFetchResult({ + rules: [], + }) + ); + }, + { once: true } + ), + http.put( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ request, params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketName).toEqual(bucketParam); + const requestBody = await request.json(); + expect(requestBody).toEqual({ + rules: [ + { + id: ruleId, + enabled: true, + conditions: { prefix: prefix }, + deleteObjectsTransition: { + condition: { + type: conditionType, + maxAge: 2592000, + }, + }, + }, + ], + }); + return HttpResponse.json(createFetchResult({})); + }, + { once: true } + ) + ); + await runWrangler( + `r2 bucket lifecycle add ${bucketName} --id ${ruleId} --prefix ${prefix} --expire-days ${conditionValue}` + ); + expect(std.out).toMatchInlineSnapshot(` + "Adding lifecycle rule 'my-rule' to bucket 'my-bucket'... + ✨ Added lifecycle rule 'my-rule' to bucket 'my-bucket'." + `); + }); + }); + describe("remove", () => { + it("should remove a lifecycle rule as expected", async () => { + const bucketName = "my-bucket"; + const ruleId = "my-rule"; + const lifecycleRules = { + rules: [ + { + id: ruleId, + enabled: true, + conditions: {}, + }, + ], + }; + msw.use( + http.get( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketParam).toEqual(bucketName); + return HttpResponse.json(createFetchResult(lifecycleRules)); + }, + { once: true } + ), + http.put( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ request, params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketName).toEqual(bucketParam); + const requestBody = await request.json(); + expect(requestBody).toEqual({ + rules: [], + }); + return HttpResponse.json(createFetchResult({})); + }, + { once: true } + ) + ); + await runWrangler( + `r2 bucket lifecycle remove ${bucketName} --id ${ruleId}` + ); + expect(std.out).toMatchInlineSnapshot(` + "Removing lifecycle rule 'my-rule' from bucket 'my-bucket'... + Lifecycle rule 'my-rule' removed from bucket 'my-bucket'." + `); + }); + it("should handle removing non-existant rule ID as expected", async () => { + const bucketName = "my-bucket"; + const ruleId = "my-rule"; + const lifecycleRules = { + rules: [], + }; + msw.use( + http.get( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketParam).toEqual(bucketName); + return HttpResponse.json(createFetchResult(lifecycleRules)); + }, + { once: true } + ) + ); + await expect(() => + runWrangler( + `r2 bucket lifecycle remove ${bucketName} --id ${ruleId}` + ) + ).rejects.toThrowErrorMatchingInlineSnapshot( + "[Error: Lifecycle rule with ID 'my-rule' not found in configuration for 'my-bucket'.]" + ); + }); + }); + describe("set", () => { + it("should set lifecycle configuration from a JSON file", async () => { + const bucketName = "my-bucket"; + const filePath = "lifecycle-configuration.json"; + const lifecycleRules = { + rules: [ + { + id: "rule-1", + enabled: true, + conditions: {}, + deleteObjectsTransition: { + condition: { + type: "Age", + maxAge: 2592000, + }, + }, + }, + ], + }; + + writeFileSync(filePath, JSON.stringify(lifecycleRules)); + + setIsTTY(true); + mockConfirm({ + text: `Are you sure you want to overwrite all existing lifecycle rules for bucket '${bucketName}'?`, + result: true, + }); + + msw.use( + http.put( + "*/accounts/:accountId/r2/buckets/:bucketName/lifecycle", + async ({ request, params }) => { + const { accountId, bucketName: bucketParam } = params; + expect(accountId).toEqual("some-account-id"); + expect(bucketName).toEqual(bucketParam); + const requestBody = await request.json(); + expect(requestBody).toEqual({ + ...lifecycleRules, + }); + return HttpResponse.json(createFetchResult({})); + }, + { once: true } + ) + ); + + await runWrangler( + `r2 bucket lifecycle set ${bucketName} --file ${filePath}` + ); + expect(std.out).toMatchInlineSnapshot(` + "Setting lifecycle configuration (1 rules) for bucket 'my-bucket'... + ✨ Set lifecycle configuration for bucket 'my-bucket'." + `); + }); + }); + }); }); describe("r2 object", () => { diff --git a/packages/wrangler/src/dialogs.ts b/packages/wrangler/src/dialogs.ts index 49caa2c9339e..f8ecba8a4cf2 100644 --- a/packages/wrangler/src/dialogs.ts +++ b/packages/wrangler/src/dialogs.ts @@ -130,3 +130,47 @@ export async function select( }); return value; } + +interface MultiSelectOptions { + choices: SelectOption[]; + defaultOptions?: number[]; +} + +export async function multiselect( + text: string, + options: MultiSelectOptions +): Promise { + if (isNonInteractiveOrCI()) { + if (options?.defaultOptions === undefined) { + throw new NoDefaultValueProvided(); + } + + const defaultTitles = options.defaultOptions.map( + (index) => options.choices[index].title + ); + logger.log(`? ${text}`); + + logger.log( + `🤖 ${chalk.dim( + "Using default value(s) in non-interactive context:" + )} ${chalk.white.bold(defaultTitles.join(", "))}` + ); + return options.defaultOptions.map((index) => options.choices[index].value); + } + const { value } = await prompts({ + type: "multiselect", + name: "value", + message: text, + choices: options.choices, + instructions: false, + hint: "- Space to select. Return to submit", + onState: (state) => { + if (state.aborted) { + process.nextTick(() => { + process.exit(1); + }); + } + }, + }); + return value; +} diff --git a/packages/wrangler/src/r2/helpers.ts b/packages/wrangler/src/r2/helpers.ts index 5e5a79e76a46..f0594bb228d9 100644 --- a/packages/wrangler/src/r2/helpers.ts +++ b/packages/wrangler/src/r2/helpers.ts @@ -924,6 +924,165 @@ export async function updateR2DevDomain( return result; } +export interface LifecycleCondition { + type: "Age" | "Date"; + maxAge?: number; + date?: string; +} + +export interface LifecycleRule { + id: string; + enabled: boolean; + conditions: { + prefix?: string; + }; + deleteObjectsTransition?: { + condition: LifecycleCondition; + }; + storageClassTransitions?: Array<{ + condition: LifecycleCondition; + storageClass: "InfrequentAccess"; + }>; + abortMultipartUploadsTransition?: { + condition: LifecycleCondition; + }; +} + +function formatCondition(condition: LifecycleCondition): string { + if (condition.type === "Age" && typeof condition.maxAge === "number") { + const days = condition.maxAge / 86400; // Convert seconds to days + return `after ${days} days`; + } else if (condition.type === "Date" && condition.date) { + const date = new Date(condition.date); + const displayDate = date.toISOString().split("T")[0]; + return `on ${displayDate}`; + } + + return ""; +} + +export function tableFromLifecycleRulesResponse(rules: LifecycleRule[]): { + id: string; + enabled: string; + prefix: string; + action: string; +}[] { + const rows = []; + for (const rule of rules) { + const actions = []; + + if (rule.deleteObjectsTransition) { + const action = "Expire objects"; + const condition = formatCondition(rule.deleteObjectsTransition.condition); + actions.push(`${action} ${condition}`); + } + if ( + rule.storageClassTransitions && + rule.storageClassTransitions.length > 0 + ) { + for (const transition of rule.storageClassTransitions) { + const action = "Transition to Infrequent Access"; + const condition = formatCondition(transition.condition); + actions.push(`${action} ${condition}`); + } + } + if (rule.abortMultipartUploadsTransition) { + const action = "Abort incomplete multipart uploads"; + const condition = formatCondition( + rule.abortMultipartUploadsTransition.condition + ); + actions.push(`${action} ${condition}`); + } + + rows.push({ + id: rule.id, + enabled: rule.enabled ? "Yes" : "No", + prefix: rule.conditions.prefix || "(all prefixes)", + action: actions.join(", ") || "(none)", + }); + } + return rows; +} + +export async function getLifecycleRules( + accountId: string, + bucket: string, + jurisdiction?: string +): Promise { + const headers: HeadersInit = {}; + if (jurisdiction) { + headers["cf-r2-jurisdiction"] = jurisdiction; + } + + const result = await fetchResult<{ rules: LifecycleRule[] }>( + `/accounts/${accountId}/r2/buckets/${bucket}/lifecycle`, + { + method: "GET", + headers, + } + ); + return result.rules; +} + +export async function putLifecycleRules( + accountId: string, + bucket: string, + rules: LifecycleRule[], + jurisdiction?: string +): Promise { + const headers: HeadersInit = { + "Content-Type": "application/json", + }; + if (jurisdiction) { + headers["cf-r2-jurisdiction"] = jurisdiction; + } + + await fetchResult(`/accounts/${accountId}/r2/buckets/${bucket}/lifecycle`, { + method: "PUT", + headers, + body: JSON.stringify({ rules: rules }), + }); +} + +export function formatActionDescription(action: string): string { + switch (action) { + case "expire": + return "expire objects"; + case "transition": + return "transition to Infrequent Access storage class"; + case "abort-multipart": + return "abort incomplete multipart uploads"; + default: + return action; + } +} + +export function isValidDate(dateString: string): boolean { + const regex = /^\d{4}-\d{2}-\d{2}$/; + if (!regex.test(dateString)) { + return false; + } + const date = new Date(`${dateString}T00:00:00.000Z`); + const timestamp = date.getTime(); + if (isNaN(timestamp)) { + return false; + } + const [year, month, day] = dateString.split("-").map(Number); + return ( + date.getUTCFullYear() === year && + date.getUTCMonth() + 1 === month && + date.getUTCDate() === day + ); +} + +export function isNonNegativeNumber(str: string): boolean { + if (str === "") { + return false; + } + const num = Number(str); + return num >= 0; +} + /** * R2 bucket names must only contain alphanumeric and - characters. */ diff --git a/packages/wrangler/src/r2/index.ts b/packages/wrangler/src/r2/index.ts index 4d4398b833df..3e3e7baf476e 100644 --- a/packages/wrangler/src/r2/index.ts +++ b/packages/wrangler/src/r2/index.ts @@ -23,6 +23,7 @@ import { usingLocalBucket, } from "./helpers"; import * as Info from "./info"; +import * as Lifecycle from "./lifecycle"; import * as List from "./list"; import * as Notification from "./notification"; import * as PublicDevUrl from "./public-dev-url"; @@ -576,7 +577,7 @@ export function r2(r2Yargs: CommonYargsArgv, subHelp: SubHelp) { return r2EvNotifyYargs .command( ["list ", "get "], - "List event notification rules for a bucket", + "List event notification rules for an R2 bucket", Notification.ListOptions, Notification.ListHandler ) @@ -651,6 +652,37 @@ export function r2(r2Yargs: CommonYargsArgv, subHelp: SubHelp) { ); } ); + r2BucketYargs.command( + "lifecycle", + "Manage lifecycle rules for an R2 bucket", + (lifecycleYargs) => { + return lifecycleYargs + .command( + "list ", + "List lifecycle rules for an R2 bucket", + Lifecycle.ListOptions, + Lifecycle.ListHandler + ) + .command( + "add ", + "Add a lifecycle rule to an R2 bucket", + Lifecycle.AddOptions, + Lifecycle.AddHandler + ) + .command( + "remove ", + "Remove a lifecycle rule from an R2 bucket", + Lifecycle.RemoveOptions, + Lifecycle.RemoveHandler + ) + .command( + "set ", + "Set the lifecycle configuration for an R2 bucket from a JSON file", + Lifecycle.SetOptions, + Lifecycle.SetHandler + ); + } + ); return r2BucketYargs; }); } diff --git a/packages/wrangler/src/r2/lifecycle.ts b/packages/wrangler/src/r2/lifecycle.ts new file mode 100644 index 000000000000..0daba23fcf57 --- /dev/null +++ b/packages/wrangler/src/r2/lifecycle.ts @@ -0,0 +1,432 @@ +import { readConfig, withConfig } from "../config"; +import { confirm, multiselect, prompt } from "../dialogs"; +import { UserError } from "../errors"; +import isInteractive from "../is-interactive"; +import { logger } from "../logger"; +import { readFileSync } from "../parse"; +import { printWranglerBanner } from "../update-check"; +import { requireAuth } from "../user"; +import formatLabelledValues from "../utils/render-labelled-values"; +import { + formatActionDescription, + getLifecycleRules, + isNonNegativeNumber, + isValidDate, + putLifecycleRules, + tableFromLifecycleRulesResponse, +} from "./helpers"; +import type { + CommonYargsArgv, + StrictYargsOptionsToInterface, +} from "../yargs-types"; +import type { LifecycleRule } from "./helpers"; + +export function ListOptions(yargs: CommonYargsArgv) { + return yargs + .positional("bucket", { + describe: "The name of the R2 bucket to list lifecycle rules for", + type: "string", + demandOption: true, + }) + .option("jurisdiction", { + describe: "The jurisdiction where the bucket exists", + alias: "J", + requiresArg: true, + type: "string", + }); +} + +export async function ListHandler( + args: StrictYargsOptionsToInterface +) { + await printWranglerBanner(); + const config = readConfig(args.config, args); + const accountId = await requireAuth(config); + + const { bucket, jurisdiction } = args; + + logger.log(`Listing lifecycle rules for bucket '${bucket}'...`); + + const lifecycleRules = await getLifecycleRules( + accountId, + bucket, + jurisdiction + ); + + if (lifecycleRules.length === 0) { + logger.log(`There are no lifecycle rules for bucket '${bucket}'.`); + } else { + const tableOutput = tableFromLifecycleRulesResponse(lifecycleRules); + logger.log(tableOutput.map((x) => formatLabelledValues(x)).join("\n\n")); + } +} + +export function AddOptions(yargs: CommonYargsArgv) { + return yargs + .positional("bucket", { + describe: "The name of the R2 bucket to add a lifecycle rule to", + type: "string", + demandOption: true, + }) + .positional("id", { + describe: "A unique identifier for the lifecycle rule", + type: "string", + requiresArg: true, + }) + .positional("prefix", { + describe: + "Prefix condition for the lifecycle rule (leave empty for all prefixes)", + type: "string", + requiresArg: true, + }) + .option("expire-days", { + describe: "Number of days after which objects expire", + type: "number", + requiresArg: true, + }) + .option("expire-date", { + describe: "Date after which objects expire (YYYY-MM-DD)", + type: "number", + requiresArg: true, + }) + .option("ia-transition-days", { + describe: + "Number of days after which objects transition to Infrequent Access storage", + type: "number", + requiresArg: true, + }) + .option("ia-transition-date", { + describe: + "Date after which objects transition to Infrequent Access storage (YYYY-MM-DD)", + type: "string", + requiresArg: true, + }) + .option("abort-multipart-days", { + describe: + "Number of days after which incomplete multipart uploads are aborted", + type: "number", + requiresArg: true, + }) + .option("jurisdiction", { + describe: "The jurisdiction where the bucket exists", + alias: "J", + requiresArg: true, + type: "string", + }) + .option("force", { + describe: "Skip confirmation", + type: "boolean", + alias: "y", + default: false, + }); +} + +export const AddHandler = withConfig< + StrictYargsOptionsToInterface +>( + async ({ + bucket, + expireDays, + expireDate, + iaTransitionDays, + iaTransitionDate, + abortMultipartDays, + jurisdiction, + force, + id, + prefix, + config, + }): Promise => { + await printWranglerBanner(); + const accountId = await requireAuth(config); + + const lifecycleRules = await getLifecycleRules( + accountId, + bucket, + jurisdiction + ); + + if (!id && isInteractive()) { + id = await prompt("Enter a unique identifier for the lifecycle rule"); + } + + if (!id) { + throw new UserError("Must specify a rule ID."); + } + + const newRule: LifecycleRule = { + id: id, + enabled: true, + conditions: {}, + }; + + let selectedActions: string[] = []; + + if (expireDays !== undefined || expireDate !== undefined) { + selectedActions.push("expire"); + } + if (iaTransitionDays !== undefined || iaTransitionDate !== undefined) { + selectedActions.push("transition"); + } + if (abortMultipartDays !== undefined) { + selectedActions.push("abort-multipart"); + } + + if (selectedActions.length === 0 && isInteractive()) { + if (prefix === undefined) { + prefix = await prompt( + "Enter a prefix for the lifecycle rule (leave empty for all prefixes)" + ); + } + const actionChoices = [ + { title: "Expire objects", value: "expire" }, + { + title: "Transition to Infrequent Access storage class", + value: "transition", + }, + { + title: "Abort incomplete multipart uploads", + value: "abort-multipart", + }, + ]; + + selectedActions = await multiselect("Select the actions to apply", { + choices: actionChoices, + }); + } + + if (selectedActions.length === 0) { + throw new UserError("Must specify at least one action."); + } + + for (const action of selectedActions) { + let conditionType: "Age" | "Date"; + let conditionValue: number | string; + + if (action === "abort-multipart") { + if (abortMultipartDays !== undefined) { + conditionValue = abortMultipartDays; + } else { + conditionValue = await prompt( + `Enter the number of days after which to ${formatActionDescription(action)}` + ); + } + if (!isNonNegativeNumber(String(conditionValue))) { + throw new UserError("Must be a positive number."); + } + + conditionType = "Age"; + conditionValue = Number(conditionValue) * 86400; // Convert days to seconds + + newRule.abortMultipartUploadsTransition = { + condition: { + maxAge: conditionValue, + type: conditionType, + }, + }; + } else { + if (expireDays !== undefined) { + conditionType = "Age"; + conditionValue = expireDays; + } else if (iaTransitionDays !== undefined) { + conditionType = "Age"; + conditionValue = iaTransitionDays; + } else if (expireDate !== undefined) { + conditionType = "Date"; + conditionValue = expireDate; + } else if (iaTransitionDate !== undefined) { + conditionType = "Date"; + conditionValue = iaTransitionDate; + } else { + conditionValue = await prompt( + `Enter the number of days or a date (YYYY-MM-DD) after which to ${formatActionDescription(action)}` + ); + if ( + !isNonNegativeNumber(String(conditionValue)) && + !isValidDate(String(conditionValue)) + ) { + throw new UserError( + "Must be a positive number or a valid date in the YYYY-MM-DD format." + ); + } + } + + if (isNonNegativeNumber(String(conditionValue))) { + conditionType = "Age"; + conditionValue = Number(conditionValue) * 86400; // Convert days to seconds + } else if (isValidDate(String(conditionValue))) { + conditionType = "Date"; + const date = new Date(`${conditionValue}T00:00:00.000Z`); + conditionValue = date.toISOString(); + } else { + throw new UserError("Invalid condition input."); + } + + if (action === "expire") { + newRule.deleteObjectsTransition = { + condition: { + [conditionType === "Age" ? "maxAge" : "date"]: conditionValue, + type: conditionType, + }, + }; + } else if (action === "transition") { + newRule.storageClassTransitions = [ + { + condition: { + [conditionType === "Age" ? "maxAge" : "date"]: conditionValue, + type: conditionType, + }, + storageClass: "InfrequentAccess", + }, + ]; + } + } + } + + if (!prefix && !force) { + const confirmedAdd = await confirm( + `Are you sure you want to add lifecycle rule '${id}' to bucket '${bucket}' without a prefix? ` + + `The lifecycle rule will apply to all objects in your bucket.` + ); + if (!confirmedAdd) { + logger.log("Add cancelled."); + return; + } + } + + if (prefix) { + newRule.conditions.prefix = prefix; + } + + lifecycleRules.push(newRule); + logger.log(`Adding lifecycle rule '${id}' to bucket '${bucket}'...`); + await putLifecycleRules(accountId, bucket, lifecycleRules, jurisdiction); + logger.log(`✨ Added lifecycle rule '${id}' to bucket '${bucket}'.`); + } +); + +export function RemoveOptions(yargs: CommonYargsArgv) { + return yargs + .positional("bucket", { + describe: "The name of the R2 bucket to remove a lifecycle rule from", + type: "string", + demandOption: true, + }) + .option("id", { + describe: "The unique identifier of the lifecycle rule to remove", + type: "string", + demandOption: true, + requiresArg: true, + }) + .option("jurisdiction", { + describe: "The jurisdiction where the bucket exists", + alias: "J", + requiresArg: true, + type: "string", + }); +} + +export async function RemoveHandler( + args: StrictYargsOptionsToInterface +) { + await printWranglerBanner(); + const config = readConfig(args.config, args); + const accountId = await requireAuth(config); + + const { bucket, id, jurisdiction } = args; + + const lifecycleRules = await getLifecycleRules( + accountId, + bucket, + jurisdiction + ); + + const index = lifecycleRules.findIndex((rule) => rule.id === id); + + if (index === -1) { + throw new UserError( + `Lifecycle rule with ID '${id}' not found in configuration for '${bucket}'.` + ); + } + + lifecycleRules.splice(index, 1); + + logger.log(`Removing lifecycle rule '${id}' from bucket '${bucket}'...`); + await putLifecycleRules(accountId, bucket, lifecycleRules, jurisdiction); + logger.log(`Lifecycle rule '${id}' removed from bucket '${bucket}'.`); +} + +export function SetOptions(yargs: CommonYargsArgv) { + return yargs + .positional("bucket", { + describe: "The name of the R2 bucket to set lifecycle configuration for", + type: "string", + demandOption: true, + }) + .option("file", { + describe: "Path to the JSON file containing lifecycle configuration", + type: "string", + demandOption: true, + requiresArg: true, + }) + .option("jurisdiction", { + describe: "The jurisdiction where the bucket exists", + alias: "J", + requiresArg: true, + type: "string", + }) + .option("force", { + describe: "Skip confirmation", + type: "boolean", + alias: "y", + default: false, + }); +} + +export async function SetHandler( + args: StrictYargsOptionsToInterface +) { + await printWranglerBanner(); + const config = readConfig(args.config, args); + const accountId = await requireAuth(config); + + const { bucket, file, jurisdiction, force } = args; + let lifecyclePolicy: { rules: LifecycleRule[] }; + try { + lifecyclePolicy = JSON.parse(readFileSync(file)); + } catch (e) { + if (e instanceof Error) { + throw new UserError( + `Failed to read or parse the lifecycle configuration config file: '${e.message}'` + ); + } else { + throw e; + } + } + + if (!lifecyclePolicy.rules || !Array.isArray(lifecyclePolicy.rules)) { + throw new UserError( + "The lifecycle configuration file must contain a 'rules' array." + ); + } + + if (!force) { + const confirmedRemoval = await confirm( + `Are you sure you want to overwrite all existing lifecycle rules for bucket '${bucket}'?` + ); + if (!confirmedRemoval) { + logger.log("Set cancelled."); + return; + } + } + logger.log( + `Setting lifecycle configuration (${lifecyclePolicy.rules.length} rules) for bucket '${bucket}'...` + ); + await putLifecycleRules( + accountId, + bucket, + lifecyclePolicy.rules, + jurisdiction + ); + logger.log(`✨ Set lifecycle configuration for bucket '${bucket}'.`); +} From 0a9707eb6128c0738687f492fdf8f2f28c18103d Mon Sep 17 00:00:00 2001 From: "workers-devprod@cloudflare.com" <116369605+workers-devprod@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:49:04 +0000 Subject: [PATCH 3/9] Version Packages (#7261) Co-authored-by: github-actions[bot] --- .changeset/clever-grapes-sparkle.md | 15 ---------- .changeset/four-mayflies-smash.md | 5 ---- .changeset/hip-cycles-explain.md | 5 ---- .changeset/hot-dolphins-obey.md | 5 ---- .changeset/mighty-beds-grab.md | 5 ---- .changeset/nasty-monkeys-heal.md | 6 ---- .changeset/shaggy-monkeys-visit.md | 9 ------ .changeset/shy-seas-wave.md | 5 ---- .changeset/tricky-bottles-bow.md | 5 ---- packages/create-cloudflare/CHANGELOG.md | 6 ++++ packages/create-cloudflare/package.json | 2 +- packages/vitest-pool-workers/CHANGELOG.md | 10 +++++++ packages/vitest-pool-workers/package.json | 2 +- packages/workflows-shared/CHANGELOG.md | 7 +++++ packages/workflows-shared/package.json | 2 +- packages/wrangler/CHANGELOG.md | 35 +++++++++++++++++++++++ packages/wrangler/package.json | 2 +- 17 files changed, 62 insertions(+), 64 deletions(-) delete mode 100644 .changeset/clever-grapes-sparkle.md delete mode 100644 .changeset/four-mayflies-smash.md delete mode 100644 .changeset/hip-cycles-explain.md delete mode 100644 .changeset/hot-dolphins-obey.md delete mode 100644 .changeset/mighty-beds-grab.md delete mode 100644 .changeset/nasty-monkeys-heal.md delete mode 100644 .changeset/shaggy-monkeys-visit.md delete mode 100644 .changeset/shy-seas-wave.md delete mode 100644 .changeset/tricky-bottles-bow.md diff --git a/.changeset/clever-grapes-sparkle.md b/.changeset/clever-grapes-sparkle.md deleted file mode 100644 index 9456908d61a7..000000000000 --- a/.changeset/clever-grapes-sparkle.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -"wrangler": minor ---- - -Adds [observability.logs] settings to wrangler. This setting lets developers control the settings for logs as an independent dataset enabling more dataset types in the future. The most specific setting will win if any of the datasets are not enabled. - -It also adds the following setting to the logs config - -- `invocation_logs` - set to false to disable invocation logs. Defaults to true. - -```toml -[observability.logs] -enabled = true -invocation_logs = false -``` diff --git a/.changeset/four-mayflies-smash.md b/.changeset/four-mayflies-smash.md deleted file mode 100644 index 5f8dda12833e..000000000000 --- a/.changeset/four-mayflies-smash.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"create-cloudflare": patch ---- - -Adds support for new upcoming bun.lock lockfile diff --git a/.changeset/hip-cycles-explain.md b/.changeset/hip-cycles-explain.md deleted file mode 100644 index d5463f4e21ed..000000000000 --- a/.changeset/hip-cycles-explain.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"wrangler": minor ---- - -Added r2 bucket lifecycle command to Wrangler including list, add, remove, set diff --git a/.changeset/hot-dolphins-obey.md b/.changeset/hot-dolphins-obey.md deleted file mode 100644 index 7bb9fed4c6e0..000000000000 --- a/.changeset/hot-dolphins-obey.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@cloudflare/vitest-pool-workers": patch ---- - -fix: ensures Vitest Pool Workers doesn't error when using nodejs_compat_v2 flag diff --git a/.changeset/mighty-beds-grab.md b/.changeset/mighty-beds-grab.md deleted file mode 100644 index 80503b13836b..000000000000 --- a/.changeset/mighty-beds-grab.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"wrangler": patch ---- - -Include Version Preview URL in Wrangler's output file diff --git a/.changeset/nasty-monkeys-heal.md b/.changeset/nasty-monkeys-heal.md deleted file mode 100644 index faaefe01bea8..000000000000 --- a/.changeset/nasty-monkeys-heal.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"@cloudflare/workflows-shared": patch ---- - -- Fix workflows binding to create a workflow without arguments -- Fix workflows instance.id not working the same way in wrangler local dev as it does in production diff --git a/.changeset/shaggy-monkeys-visit.md b/.changeset/shaggy-monkeys-visit.md deleted file mode 100644 index c390c17c1d8e..000000000000 --- a/.changeset/shaggy-monkeys-visit.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -"wrangler": patch ---- - -fix: only show fetch warning if on old compatibility_date - -Now that we have the `allow_custom_ports` compatibility flag, we only need to show the fetch warnings when that flag is not enabled. - -Fixes https://github.com/cloudflare/workerd/issues/2955 diff --git a/.changeset/shy-seas-wave.md b/.changeset/shy-seas-wave.md deleted file mode 100644 index 8fa075946124..000000000000 --- a/.changeset/shy-seas-wave.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"wrangler": patch ---- - -chore(wrangler): update unenv dependency version diff --git a/.changeset/tricky-bottles-bow.md b/.changeset/tricky-bottles-bow.md deleted file mode 100644 index 4cf91d81f0e7..000000000000 --- a/.changeset/tricky-bottles-bow.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"wrangler": patch ---- - -Default the file based registry (`--x-registry`) to on. This should improve stability of multi-worker development diff --git a/packages/create-cloudflare/CHANGELOG.md b/packages/create-cloudflare/CHANGELOG.md index 17036fa4e0ba..dd9f2d7b3221 100644 --- a/packages/create-cloudflare/CHANGELOG.md +++ b/packages/create-cloudflare/CHANGELOG.md @@ -1,5 +1,11 @@ # create-cloudflare +## 2.32.2 + +### Patch Changes + +- [#6831](https://github.com/cloudflare/workers-sdk/pull/6831) [`0fe018c`](https://github.com/cloudflare/workers-sdk/commit/0fe018c7b85d767de60e9cc88f256f99f17046e3) Thanks [@Cherry](https://github.com/Cherry)! - Adds support for new upcoming bun.lock lockfile + ## 2.32.1 ### Patch Changes diff --git a/packages/create-cloudflare/package.json b/packages/create-cloudflare/package.json index d20a0327d8d9..25f1b43a9582 100644 --- a/packages/create-cloudflare/package.json +++ b/packages/create-cloudflare/package.json @@ -1,6 +1,6 @@ { "name": "create-cloudflare", - "version": "2.32.1", + "version": "2.32.2", "description": "A CLI for creating and deploying new applications to Cloudflare.", "keywords": [ "cloudflare", diff --git a/packages/vitest-pool-workers/CHANGELOG.md b/packages/vitest-pool-workers/CHANGELOG.md index 9e076eeae499..07cab782bd0a 100644 --- a/packages/vitest-pool-workers/CHANGELOG.md +++ b/packages/vitest-pool-workers/CHANGELOG.md @@ -1,5 +1,15 @@ # @cloudflare/vitest-pool-workers +## 0.5.29 + +### Patch Changes + +- [#7278](https://github.com/cloudflare/workers-sdk/pull/7278) [`6508ea2`](https://github.com/cloudflare/workers-sdk/commit/6508ea21c396beff8a2de987eaf335bb46bdd89c) Thanks [@andyjessop](https://github.com/andyjessop)! - fix: ensures Vitest Pool Workers doesn't error when using nodejs_compat_v2 flag + +- Updated dependencies [[`b6cbfbd`](https://github.com/cloudflare/workers-sdk/commit/b6cbfbdd10dfbb732ec12a5c69bd4a74b07de8a0), [`edec415`](https://github.com/cloudflare/workers-sdk/commit/edec41591dcf37262d459568c0f454820b90dbaa), [`941d411`](https://github.com/cloudflare/workers-sdk/commit/941d4110ca84510d235b72b3f98692e4188a7ad4), [`e2e6912`](https://github.com/cloudflare/workers-sdk/commit/e2e6912bcb7a1f6b7f8081b889a4e08be8a740a1), [`09e6e90`](https://github.com/cloudflare/workers-sdk/commit/09e6e905d9825d33b8e90acabb8ff7b962cc908b), [`b4a0e74`](https://github.com/cloudflare/workers-sdk/commit/b4a0e74680440084342477fc9373f9f76ab91c0b)]: + - wrangler@3.88.0 + - miniflare@3.20241106.0 + ## 0.5.28 ### Patch Changes diff --git a/packages/vitest-pool-workers/package.json b/packages/vitest-pool-workers/package.json index 1dc477b0fcb5..22c9650726e5 100644 --- a/packages/vitest-pool-workers/package.json +++ b/packages/vitest-pool-workers/package.json @@ -1,6 +1,6 @@ { "name": "@cloudflare/vitest-pool-workers", - "version": "0.5.28", + "version": "0.5.29", "description": "Workers Vitest integration for writing Vitest unit and integration tests that run inside the Workers runtime", "keywords": [ "cloudflare", diff --git a/packages/workflows-shared/CHANGELOG.md b/packages/workflows-shared/CHANGELOG.md index a7cec8cc85f7..b53dd103d3ec 100644 --- a/packages/workflows-shared/CHANGELOG.md +++ b/packages/workflows-shared/CHANGELOG.md @@ -1,5 +1,12 @@ # @cloudflare/workflows-shared +## 0.1.2 + +### Patch Changes + +- [#7225](https://github.com/cloudflare/workers-sdk/pull/7225) [`bb17205`](https://github.com/cloudflare/workers-sdk/commit/bb17205f1cc357cabc857ab5cad61b6a4f3b8b93) Thanks [@bruxodasilva](https://github.com/bruxodasilva)! - - Fix workflows binding to create a workflow without arguments + - Fix workflows instance.id not working the same way in wrangler local dev as it does in production + ## 0.1.1 ### Patch Changes diff --git a/packages/workflows-shared/package.json b/packages/workflows-shared/package.json index 4b23ab2657b2..cac2c699b78f 100644 --- a/packages/workflows-shared/package.json +++ b/packages/workflows-shared/package.json @@ -1,6 +1,6 @@ { "name": "@cloudflare/workflows-shared", - "version": "0.1.1", + "version": "0.1.2", "private": true, "description": "Package that is used at Cloudflare to power some internal features of Cloudflare Workflows.", "keywords": [ diff --git a/packages/wrangler/CHANGELOG.md b/packages/wrangler/CHANGELOG.md index 4b00cfca5df3..65394c25c1fe 100644 --- a/packages/wrangler/CHANGELOG.md +++ b/packages/wrangler/CHANGELOG.md @@ -1,5 +1,40 @@ # wrangler +## 3.88.0 + +### Minor Changes + +- [#7173](https://github.com/cloudflare/workers-sdk/pull/7173) [`b6cbfbd`](https://github.com/cloudflare/workers-sdk/commit/b6cbfbdd10dfbb732ec12a5c69bd4a74b07de8a0) Thanks [@Ankcorn](https://github.com/Ankcorn)! - Adds [observability.logs] settings to wrangler. This setting lets developers control the settings for logs as an independent dataset enabling more dataset types in the future. The most specific setting will win if any of the datasets are not enabled. + + It also adds the following setting to the logs config + + - `invocation_logs` - set to false to disable invocation logs. Defaults to true. + + ```toml + [observability.logs] + enabled = true + invocation_logs = false + ``` + +- [#7207](https://github.com/cloudflare/workers-sdk/pull/7207) [`edec415`](https://github.com/cloudflare/workers-sdk/commit/edec41591dcf37262d459568c0f454820b90dbaa) Thanks [@jonesphillip](https://github.com/jonesphillip)! - Added r2 bucket lifecycle command to Wrangler including list, add, remove, set + +### Patch Changes + +- [#7243](https://github.com/cloudflare/workers-sdk/pull/7243) [`941d411`](https://github.com/cloudflare/workers-sdk/commit/941d4110ca84510d235b72b3f98692e4188a7ad4) Thanks [@penalosa](https://github.com/penalosa)! - Include Version Preview URL in Wrangler's output file + +- [#7038](https://github.com/cloudflare/workers-sdk/pull/7038) [`e2e6912`](https://github.com/cloudflare/workers-sdk/commit/e2e6912bcb7a1f6b7f8081b889a4e08be8a740a1) Thanks [@petebacondarwin](https://github.com/petebacondarwin)! - fix: only show fetch warning if on old compatibility_date + + Now that we have the `allow_custom_ports` compatibility flag, we only need to show the fetch warnings when that flag is not enabled. + + Fixes https://github.com/cloudflare/workerd/issues/2955 + +- [#7216](https://github.com/cloudflare/workers-sdk/pull/7216) [`09e6e90`](https://github.com/cloudflare/workers-sdk/commit/09e6e905d9825d33b8e90acabb8ff7b962cc908b) Thanks [@vicb](https://github.com/vicb)! - chore(wrangler): update unenv dependency version + +- [#7081](https://github.com/cloudflare/workers-sdk/pull/7081) [`b4a0e74`](https://github.com/cloudflare/workers-sdk/commit/b4a0e74680440084342477fc9373f9f76ab91c0b) Thanks [@penalosa](https://github.com/penalosa)! - Default the file based registry (`--x-registry`) to on. This should improve stability of multi-worker development + +- Updated dependencies []: + - miniflare@3.20241106.0 + ## 3.87.0 ### Minor Changes diff --git a/packages/wrangler/package.json b/packages/wrangler/package.json index 9c049ef70b5a..520c62f804e7 100644 --- a/packages/wrangler/package.json +++ b/packages/wrangler/package.json @@ -1,6 +1,6 @@ { "name": "wrangler", - "version": "3.87.0", + "version": "3.88.0", "description": "Command-line interface for all things Cloudflare Workers", "keywords": [ "wrangler", From f1f508ec1acefd7b409b77ae1070029953cca061 Mon Sep 17 00:00:00 2001 From: Andy Jessop Date: Tue, 19 Nov 2024 19:41:56 +0100 Subject: [PATCH 4/9] chore: change wrangler-devtools name to workers-devtools (#7258) chore: add changeset chore: update lock file chore: update formatting chore: fix snapshot tests --- .changeset/mighty-waves-deny.md | 5 +++++ .github/workflows/deploy-pages-previews.yml | 12 ++++++------ .gitignore | 16 ++++++++-------- .prettierignore | 2 +- CONTRIBUTING.md | 4 ++-- README.md | 14 +++++++------- .../CHANGELOG.md | 2 +- .../Makefile | 0 .../README.md | 10 +++++----- .../package.json | 4 ++-- ...support-make-it-work-in-Firefox-Safar.patch | 0 ...Setup-Cloudflare-devtools-target-type.patch | 0 ...rove-connection-stability.-Without-th.patch | 0 ...-source-files-over-the-network.-This-.patch | 0 ...-the-devtools-theme-via-a-query-param.patch | 0 .../0006-All-about-the-network-tab.patch | 0 ...-Limit-heap-profiling-modes-available.patch | 0 ...name-as-the-title-for-the-Javascript-.patch | 0 pnpm-lock.yaml | 18 +++++++++--------- .../__tests__/deploy-non-npm-packages.test.ts | 2 +- .../__tests__/validate-changesets.test.ts | 2 +- 21 files changed, 48 insertions(+), 43 deletions(-) create mode 100644 .changeset/mighty-waves-deny.md rename packages/{wrangler-devtools => chrome-devtools-patches}/CHANGELOG.md (94%) rename packages/{wrangler-devtools => chrome-devtools-patches}/Makefile (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/README.md (83%) rename packages/{wrangler-devtools => chrome-devtools-patches}/package.json (75%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0002-Setup-Cloudflare-devtools-target-type.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0006-All-about-the-network-tab.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0007-Limit-heap-profiling-modes-available.patch (100%) rename packages/{wrangler-devtools => chrome-devtools-patches}/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch (100%) diff --git a/.changeset/mighty-waves-deny.md b/.changeset/mighty-waves-deny.md new file mode 100644 index 000000000000..99fef3e93152 --- /dev/null +++ b/.changeset/mighty-waves-deny.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/chrome-devtools-patches": patch +--- + +change package name from @cloudflare/wrangler-devtools to @cloudflare/chrome-devtools-patches diff --git a/.github/workflows/deploy-pages-previews.yml b/.github/workflows/deploy-pages-previews.yml index 98a83b9c6654..5174ecfae2a0 100644 --- a/.github/workflows/deploy-pages-previews.yml +++ b/.github/workflows/deploy-pages-previews.yml @@ -10,7 +10,7 @@ name: Deploy Pages Previews # # PR Label | Pages Project # --------------------------------------------------------- -# preview:wrangler-devtools | packages/wrangler-devtools +# preview:chrome-devtools-patches | packages/chrome-devtools-patches # preview:quick-edit | packages/quick-edit # preview:workers-playground | packages/workers-playground # @@ -25,7 +25,7 @@ jobs: # Only run this on PRs that are for the "cloudflare" org and not "from" `main` # - non-Cloudflare PRs will not have the secrets needed # - PRs "from" main would accidentally do a production deployment - if: github.repository_owner == 'cloudflare' && github.head_ref != 'main' && (contains(github.event.*.labels.*.name, 'preview:wrangler-devtools') || contains(github.event.*.labels.*.name, 'preview:quick-edit') || contains(github.event.*.labels.*.name, 'preview:workers-playground')) + if: github.repository_owner == 'cloudflare' && github.head_ref != 'main' && (contains(github.event.*.labels.*.name, 'preview:chrome-devtools-patches') || contains(github.event.*.labels.*.name, 'preview:quick-edit') || contains(github.event.*.labels.*.name, 'preview:workers-playground')) timeout-minutes: 60 concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -49,9 +49,9 @@ jobs: CI_OS: ${{ runner.os }} - name: Deploy Wrangler DevTools preview - if: contains(github.event.*.labels.*.name, 'preview:wrangler-devtools') + if: contains(github.event.*.labels.*.name, 'preview:chrome-devtools-patches') run: | - output=$(pnpm --filter @cloudflare/wrangler-devtools run deploy) + output=$(pnpm --filter @cloudflare/chrome-devtools-patches run deploy) echo "Extracting deployed URL from command output" url=$(echo "$output" | grep -oP 'Take a peek over at \K\S+') echo "Extracted URL: $url" @@ -80,7 +80,7 @@ jobs: VITE_DEVTOOLS_PREVIEW_URL: ${{ env.VITE_DEVTOOLS_PREVIEW_URL }} - name: "Comment on PR with Devtools Link" - if: contains(github.event.*.labels.*.name, 'preview:wrangler-devtools') + if: contains(github.event.*.labels.*.name, 'preview:chrome-devtools-patches') uses: marocchino/sticky-pull-request-comment@v2 with: header: ${{ steps.finder.outputs.pr }} @@ -99,7 +99,7 @@ jobs: ``` - name: "Comment on PR with Workers Playground Link" - if: contains(github.event.*.labels.*.name, 'preview:wrangler-devtools') && contains(github.event.*.labels.*.name, 'preview:workers-playground') + if: contains(github.event.*.labels.*.name, 'preview:chrome-devtools-patches') && contains(github.event.*.labels.*.name, 'preview:workers-playground') uses: marocchino/sticky-pull-request-comment@v2 with: header: ${{ steps.finder.outputs.pr }} diff --git a/.gitignore b/.gitignore index cbc4bb8e245c..2d595642f592 100644 --- a/.gitignore +++ b/.gitignore @@ -188,14 +188,14 @@ packages/quick-edit/vscode packages/quick-edit/web packages/wrangler/config-schema.json -packages/wrangler-devtools/built-devtools -packages/wrangler-devtools/.cipd -packages/wrangler-devtools/.gclient -packages/wrangler-devtools/.gclient_entries -packages/wrangler-devtools/.gclient_previous_sync_commits -packages/wrangler-devtools/.gcs_entries -packages/wrangler-devtools/depot -packages/wrangler-devtools/devtools-frontend +packages/chrome-devtools-patches/built-devtools +packages/chrome-devtools-patches/.cipd +packages/chrome-devtools-patches/.gclient +packages/chrome-devtools-patches/.gclient_entries +packages/chrome-devtools-patches/.gclient_previous_sync_commits +packages/chrome-devtools-patches/.gcs_entries +packages/chrome-devtools-patches/depot +packages/chrome-devtools-patches/devtools-frontend packages/miniflare/dist-types/ fixtures/remix-pages-app/public/build diff --git a/.prettierignore b/.prettierignore index 8cd1d94f152b..d55c0a6c1427 100644 --- a/.prettierignore +++ b/.prettierignore @@ -24,7 +24,7 @@ packages/create-cloudflare/templates*/**/*.* # but still exclude the worker-configuration.d.ts file, since it's generated !packages/create-cloudflare/templates*/hello-world/**/*.* packages/create-cloudflare/templates*/hello-world/**/worker-configuration.d.ts -packages/wrangler-devtools/devtools-frontend +packages/chrome-devtools-patches/devtools-frontend # dist-functions are generated in the fixtures/vitest-pool-workers-examples/pages-functions-unit-integration-self folder dist-functions diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c467228cd4db..19d31ac68866 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -202,7 +202,7 @@ Changes should be committed to a new local branch, which then gets pushed to you git push -u origin ``` - Once you are happy with your changes, create a Pull Request on GitHub -- The format for Pull Request titles is `[package name] description`, where the package name should indicate which package of the `workers-sdk` monorepo your PR pertains to (e.g. `wrangler`/`pages-shared`/`wrangler-devtools`), and the description should be a succinct summary of the change you're making. +- The format for Pull Request titles is `[package name] description`, where the package name should indicate which package of the `workers-sdk` monorepo your PR pertains to (e.g. `wrangler`/`pages-shared`/`chrome-devtools-patches`), and the description should be a succinct summary of the change you're making. - GitHub will insert a template for the body of your Pull Request—it's important to carefully fill out all the fields, giving as much detail as possible to reviewers. ## PR Review @@ -215,7 +215,7 @@ Every PR will have an associated pre-release build for all releaseable packages It's also possible to generate preview builds for the applications in the repository. These aren't generated automatically because they're pretty slow CI jobs, but you can trigger preview builds by adding one of the following labels to your PR: -- `preview:wrangler-devtools` for deploying [wrangler-devtools](packages/wrangler-devtools) +- `preview:chrome-devtools-patches` for deploying [chrome-devtools-patches](packages/chrome-devtools-patches) - `preview:workers-playground` for deploying [workers-playground](packages/workers-playground) - `preview:quick-edit` for deploying [quick-edit](packages/quick-edit) diff --git a/README.md b/README.md index fc6d11130829..d9ec553e962c 100644 --- a/README.md +++ b/README.md @@ -57,13 +57,13 @@ Visit the official Workers documentation [here](https://developers.cloudflare.co ## Directory -| Package | Description | Links | -| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------- | -| [`wrangler`](https://github.com/cloudflare/workers-sdk/tree/main/packages/wrangler) | A command line tool for building [Cloudflare Workers](https://workers.cloudflare.com/). | [Docs](https://developers.cloudflare.com/workers/wrangler/) | -| [`create-cloudflare` (C3)](https://github.com/cloudflare/workers-sdk/tree/main/packages/create-cloudflare) | A CLI for creating and deploying new applications to Cloudflare. | [Docs](https://developers.cloudflare.com/pages/get-started/c3/) | -| [`miniflare`](https://github.com/cloudflare/workers-sdk/tree/main/packages/miniflare) | A simulator for developing and testing Cloudflare Workers, powered by [workerd](https://github.com/cloudflare/workerd) | [Docs](https://miniflare.dev) | -| [`wrangler-devtools`](https://github.com/cloudflare/workers-sdk/tree/main/packages/wrangler-devtools) | Cloudflare's fork of Chrome DevTools for inspecting your local or remote Workers | | -| [`pages-shared`](https://github.com/cloudflare/workers-sdk/tree/main/packages/pages-shared) | Used internally to power Wrangler and Cloudflare Pages. It contains all the code that is shared between these clients. | | +| Package | Description | Links | +| ----------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------- | +| [`wrangler`](https://github.com/cloudflare/workers-sdk/tree/main/packages/wrangler) | A command line tool for building [Cloudflare Workers](https://workers.cloudflare.com/). | [Docs](https://developers.cloudflare.com/workers/wrangler/) | +| [`create-cloudflare` (C3)](https://github.com/cloudflare/workers-sdk/tree/main/packages/create-cloudflare) | A CLI for creating and deploying new applications to Cloudflare. | [Docs](https://developers.cloudflare.com/pages/get-started/c3/) | +| [`miniflare`](https://github.com/cloudflare/workers-sdk/tree/main/packages/miniflare) | A simulator for developing and testing Cloudflare Workers, powered by [workerd](https://github.com/cloudflare/workerd) | [Docs](https://miniflare.dev) | +| [`chrome-devtools-patches`](https://github.com/cloudflare/workers-sdk/tree/main/packages/chrome-devtools-patches) | Cloudflare's fork of Chrome DevTools for inspecting your local or remote Workers | | +| [`pages-shared`](https://github.com/cloudflare/workers-sdk/tree/main/packages/pages-shared) | Used internally to power Wrangler and Cloudflare Pages. It contains all the code that is shared between these clients. | | ## Contributing diff --git a/packages/wrangler-devtools/CHANGELOG.md b/packages/chrome-devtools-patches/CHANGELOG.md similarity index 94% rename from packages/wrangler-devtools/CHANGELOG.md rename to packages/chrome-devtools-patches/CHANGELOG.md index 14c6e9dd761f..59bd120267b5 100644 --- a/packages/wrangler-devtools/CHANGELOG.md +++ b/packages/chrome-devtools-patches/CHANGELOG.md @@ -1,4 +1,4 @@ -# @cloudflare/wrangler-devtools +# @cloudflare/chrome-devtools-patches ## 0.1.0 diff --git a/packages/wrangler-devtools/Makefile b/packages/chrome-devtools-patches/Makefile similarity index 100% rename from packages/wrangler-devtools/Makefile rename to packages/chrome-devtools-patches/Makefile diff --git a/packages/wrangler-devtools/README.md b/packages/chrome-devtools-patches/README.md similarity index 83% rename from packages/wrangler-devtools/README.md rename to packages/chrome-devtools-patches/README.md index e0c5b810e3d1..4ade8e8be8b0 100644 --- a/packages/wrangler-devtools/README.md +++ b/packages/chrome-devtools-patches/README.md @@ -1,6 +1,6 @@ -# Wrangler Devtools Pages Project +# Workers Devtools Pages Project -This package contains a Workers specific version of Chrome Devtools that is used by the Wrangler dev command. It is a customized fork of Chrome DevTools specifically tailored for debugging Cloudflare Workers. This package provides Worker-specific functionality through carefully maintained patches on top of Chrome DevTools. +This package contains a Workers specific version of Chrome Devtools that is used by the Wrangler dev command and other applications. It is a customized fork of Chrome DevTools specifically tailored for debugging Cloudflare Workers. This package provides Worker-specific functionality through carefully maintained patches on top of Chrome DevTools. ## Overview @@ -56,8 +56,8 @@ On any pull request to the repo on GitHub, you can add labels to trigger preview There are two labels you can use: -- `preview:wrangler-devtools` - this will trigger the DevTools preview -- `preview:wrangler-playground` - this will trigger the Playground preview +- `preview:chrome-devtools-patches` - this will trigger the DevTools preview +- `preview:workers-playground` - this will trigger the Playground preview If you add **both** labels, Playground will embed the DevTools preview, so you can test them together. @@ -86,7 +86,7 @@ When making changes: Deployments are managed by GitHub Actions: - deploy-pages-previews.yml: - - Runs on any PR that has the `preview:wrangler-devtools` label. + - Runs on any PR that has the `preview:chrome-devtools-patches` label. - Deploys a preview, which can then be accessed via [https://.cloudflare-devtools.pages.dev/]. - changesets.yml: - Runs when a "Version Packages" PR, containing a changeset that touches this package, is merged to `main`. diff --git a/packages/wrangler-devtools/package.json b/packages/chrome-devtools-patches/package.json similarity index 75% rename from packages/wrangler-devtools/package.json rename to packages/chrome-devtools-patches/package.json index 1e968fffa11b..2c888353e49d 100644 --- a/packages/wrangler-devtools/package.json +++ b/packages/chrome-devtools-patches/package.json @@ -1,8 +1,8 @@ { - "name": "@cloudflare/wrangler-devtools", + "name": "@cloudflare/chrome-devtools-patches", "version": "0.1.0", "private": true, - "description": "Chrome Devtools hosted for easy use with Wrangler", + "description": "Chrome Devtools hosted for easy use with Workers tooling and applications (Wrangler, Playground, Quick Editor).", "homepage": "https://github.com/cloudflare/workers-sdk#readme", "bugs": { "url": "https://github.com/cloudflare/workers-sdk/issues" diff --git a/packages/wrangler-devtools/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch b/packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch similarity index 100% rename from packages/wrangler-devtools/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch rename to packages/chrome-devtools-patches/patches/0001-Expand-Browser-support-make-it-work-in-Firefox-Safar.patch diff --git a/packages/wrangler-devtools/patches/0002-Setup-Cloudflare-devtools-target-type.patch b/packages/chrome-devtools-patches/patches/0002-Setup-Cloudflare-devtools-target-type.patch similarity index 100% rename from packages/wrangler-devtools/patches/0002-Setup-Cloudflare-devtools-target-type.patch rename to packages/chrome-devtools-patches/patches/0002-Setup-Cloudflare-devtools-target-type.patch diff --git a/packages/wrangler-devtools/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch b/packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch similarity index 100% rename from packages/wrangler-devtools/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch rename to packages/chrome-devtools-patches/patches/0003-Add-ping-to-improve-connection-stability.-Without-th.patch diff --git a/packages/wrangler-devtools/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch b/packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch similarity index 100% rename from packages/wrangler-devtools/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch rename to packages/chrome-devtools-patches/patches/0004-Support-viewing-source-files-over-the-network.-This-.patch diff --git a/packages/wrangler-devtools/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch b/packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch similarity index 100% rename from packages/wrangler-devtools/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch rename to packages/chrome-devtools-patches/patches/0005-Support-forcing-the-devtools-theme-via-a-query-param.patch diff --git a/packages/wrangler-devtools/patches/0006-All-about-the-network-tab.patch b/packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch similarity index 100% rename from packages/wrangler-devtools/patches/0006-All-about-the-network-tab.patch rename to packages/chrome-devtools-patches/patches/0006-All-about-the-network-tab.patch diff --git a/packages/wrangler-devtools/patches/0007-Limit-heap-profiling-modes-available.patch b/packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch similarity index 100% rename from packages/wrangler-devtools/patches/0007-Limit-heap-profiling-modes-available.patch rename to packages/chrome-devtools-patches/patches/0007-Limit-heap-profiling-modes-available.patch diff --git a/packages/wrangler-devtools/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch b/packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch similarity index 100% rename from packages/wrangler-devtools/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch rename to packages/chrome-devtools-patches/patches/0008-Use-the-worker-name-as-the-title-for-the-Javascript-.patch diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f63655fae88c..73acc46b8003 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -741,6 +741,15 @@ importers: specifier: workspace:* version: link:../../packages/wrangler + packages/chrome-devtools-patches: + devDependencies: + patch-package: + specifier: ^6.5.1 + version: 6.5.1 + wrangler: + specifier: workspace:* + version: link:../wrangler + packages/cli: devDependencies: '@clack/core': @@ -1942,15 +1951,6 @@ importers: specifier: ^17.7.2 version: 17.7.2 - packages/wrangler-devtools: - devDependencies: - patch-package: - specifier: ^6.5.1 - version: 6.5.1 - wrangler: - specifier: workspace:* - version: link:../wrangler - tools: devDependencies: '@cloudflare/eslint-config-worker': diff --git a/tools/deployments/__tests__/deploy-non-npm-packages.test.ts b/tools/deployments/__tests__/deploy-non-npm-packages.test.ts index ee6394b8e94c..1e3265fbf5ca 100644 --- a/tools/deployments/__tests__/deploy-non-npm-packages.test.ts +++ b/tools/deployments/__tests__/deploy-non-npm-packages.test.ts @@ -97,6 +97,7 @@ describe("findDeployablePackageNames()", () => { }) => { expect(findDeployablePackageNames()).toMatchInlineSnapshot(` Set { + "@cloudflare/chrome-devtools-patches", "devprod-status-bot", "edge-preview-authenticated-proxy", "format-errors", @@ -108,7 +109,6 @@ describe("findDeployablePackageNames()", () => { "@cloudflare/workers-shared", "workers.new", "@cloudflare/workflows-shared", - "@cloudflare/wrangler-devtools", } `); }); diff --git a/tools/deployments/__tests__/validate-changesets.test.ts b/tools/deployments/__tests__/validate-changesets.test.ts index c5d830642b0d..97ef51145947 100644 --- a/tools/deployments/__tests__/validate-changesets.test.ts +++ b/tools/deployments/__tests__/validate-changesets.test.ts @@ -21,6 +21,7 @@ describe("findPackageNames()", () => { }) => { expect(findPackageNames()).toMatchInlineSnapshot(` Set { + "@cloudflare/chrome-devtools-patches", "create-cloudflare", "devprod-status-bot", "edge-preview-authenticated-proxy", @@ -40,7 +41,6 @@ describe("findPackageNames()", () => { "workers.new", "@cloudflare/workflows-shared", "wrangler", - "@cloudflare/wrangler-devtools", } `); }); From 97acf07b3e09192b71e81a722029d026a7198b8b Mon Sep 17 00:00:00 2001 From: Maximo Guk <62088388+Maximo-Guk@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:48:37 -0400 Subject: [PATCH 5/9] feat: Add additional fields to pages deploy detailed artifact for wrangler-action pages parity (#7252) --- .changeset/curly-dryers-admire.md | 5 +++++ packages/wrangler/src/__tests__/output.test.ts | 12 ++++++++++++ packages/wrangler/src/output.ts | 8 ++++++++ packages/wrangler/src/pages/deploy.ts | 7 +++++++ 4 files changed, 32 insertions(+) create mode 100644 .changeset/curly-dryers-admire.md diff --git a/.changeset/curly-dryers-admire.md b/.changeset/curly-dryers-admire.md new file mode 100644 index 000000000000..fdc93f3efe9b --- /dev/null +++ b/.changeset/curly-dryers-admire.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +feat: Add production_branch and deployment_trigger to pages deploy detailed artifact for wrangler-action pages parity diff --git a/packages/wrangler/src/__tests__/output.test.ts b/packages/wrangler/src/__tests__/output.test.ts index 6124c72660eb..c5e0fccd3e1c 100644 --- a/packages/wrangler/src/__tests__/output.test.ts +++ b/packages/wrangler/src/__tests__/output.test.ts @@ -193,6 +193,12 @@ describe("writeOutput()", () => { url: "test.com", alias: "dev.com", environment: "production", + production_branch: "production-branch", + deployment_trigger: { + metadata: { + commit_hash: "bc286bd30cf12b7fdbce046be6e53ce12ae1283d", + }, + }, }); const outputFilePaths = readdirSync("output"); @@ -215,6 +221,12 @@ describe("writeOutput()", () => { url: "test.com", alias: "dev.com", environment: "production", + production_branch: "production-branch", + deployment_trigger: { + metadata: { + commit_hash: "bc286bd30cf12b7fdbce046be6e53ce12ae1283d", + }, + }, }, ]); }); diff --git a/packages/wrangler/src/output.ts b/packages/wrangler/src/output.ts index d4297cc94a9c..4b193031e506 100644 --- a/packages/wrangler/src/output.ts +++ b/packages/wrangler/src/output.ts @@ -118,6 +118,14 @@ interface OutputEntryPagesDeploymentDetailed alias: string | undefined; /** The environment being deployed to */ environment: "production" | "preview"; + /** The production branch of the pages project */ + production_branch: string; + deployment_trigger: { + metadata: { + /** Commit hash of the deployment trigger metadata for the pages project */ + commit_hash: string; + }; + }; } interface OutputEntryVersionUpload extends OutputEntryBase<"version-upload"> { diff --git a/packages/wrangler/src/pages/deploy.ts b/packages/wrangler/src/pages/deploy.ts index e8a3e85298fc..9f5174e24bd3 100644 --- a/packages/wrangler/src/pages/deploy.ts +++ b/packages/wrangler/src/pages/deploy.ts @@ -454,6 +454,13 @@ ${failureMessage}`, url: deploymentResponse.url, alias, environment: deploymentResponse.environment, + production_branch: deploymentResponse.production_branch, + deployment_trigger: { + metadata: { + commit_hash: + deploymentResponse.deployment_trigger?.metadata?.commit_hash ?? "", + }, + }, }); await metrics.sendMetricsEvent("create pages deployment"); From 1b80decfaf56c8782e49dad685c344288629b668 Mon Sep 17 00:00:00 2001 From: Daniel Rivas <1887507+danielrs@users.noreply.github.com> Date: Wed, 20 Nov 2024 04:54:34 -0600 Subject: [PATCH 6/9] fix: wrangler pages deployment (list|tail) env filtering (#7263) Adds the --environment flag to `wrangler pages deployment list`, which should allow users to list deployments of a specific type. When the flag it's not provided it will list the latest 25 deployments regardless of type. Additionally, makes sure we're listing the correct type of deployments when attempting to tail. --- .changeset/curly-rice-travel.md | 5 + .../__tests__/pages/deployment-list.test.ts | 116 +++++++++++++++++- .../pages/pages-deployment-tail.test.ts | 97 ++++++++++++--- .../wrangler/src/pages/deployment-tails.ts | 4 +- packages/wrangler/src/pages/deployments.ts | 13 +- 5 files changed, 214 insertions(+), 21 deletions(-) create mode 100644 .changeset/curly-rice-travel.md diff --git a/.changeset/curly-rice-travel.md b/.changeset/curly-rice-travel.md new file mode 100644 index 000000000000..05a806c0117f --- /dev/null +++ b/.changeset/curly-rice-travel.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +Fix wrangler pages deployment (list|tail) environment filtering. diff --git a/packages/wrangler/src/__tests__/pages/deployment-list.test.ts b/packages/wrangler/src/__tests__/pages/deployment-list.test.ts index 828e2ad00ca2..8d86f743288a 100644 --- a/packages/wrangler/src/__tests__/pages/deployment-list.test.ts +++ b/packages/wrangler/src/__tests__/pages/deployment-list.test.ts @@ -47,20 +47,128 @@ describe("pages deployment list", () => { expect(requests.count).toBe(1); }); + + it("should pass no environment", async () => { + const deployments: Deployment[] = [ + { + id: "87bbc8fe-16be-45cd-81e0-63d722e82cdf", + url: "https://87bbc8fe.images.pages.dev", + environment: "preview", + created_on: "2021-11-17T14:52:26.133835Z", + latest_stage: { + ended_on: "2021-11-17T14:52:26.133835Z", + status: "success", + }, + deployment_trigger: { + metadata: { + branch: "main", + commit_hash: "c7649364c4cb32ad4f65b530b9424e8be5bec9d6", + }, + }, + project_name: "images", + }, + ]; + + const requests = mockDeploymentListRequest(deployments); + await runWrangler("pages deployment list --project-name=images"); + expect(requests.count).toBe(1); + expect( + requests.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toBeUndefined(); + }); + + it("should pass production environment with flag", async () => { + const deployments: Deployment[] = [ + { + id: "87bbc8fe-16be-45cd-81e0-63d722e82cdf", + url: "https://87bbc8fe.images.pages.dev", + environment: "preview", + created_on: "2021-11-17T14:52:26.133835Z", + latest_stage: { + ended_on: "2021-11-17T14:52:26.133835Z", + status: "success", + }, + deployment_trigger: { + metadata: { + branch: "main", + commit_hash: "c7649364c4cb32ad4f65b530b9424e8be5bec9d6", + }, + }, + project_name: "images", + }, + ]; + + const requests = mockDeploymentListRequest(deployments); + await runWrangler( + "pages deployment list --project-name=images --environment=production" + ); + expect(requests.count).toBe(1); + expect( + requests.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toStrictEqual(["env", "production"]); + }); + + it("should pass preview environment with flag", async () => { + const deployments: Deployment[] = [ + { + id: "87bbc8fe-16be-45cd-81e0-63d722e82cdf", + url: "https://87bbc8fe.images.pages.dev", + environment: "preview", + created_on: "2021-11-17T14:52:26.133835Z", + latest_stage: { + ended_on: "2021-11-17T14:52:26.133835Z", + status: "success", + }, + deployment_trigger: { + metadata: { + branch: "main", + commit_hash: "c7649364c4cb32ad4f65b530b9424e8be5bec9d6", + }, + }, + project_name: "images", + }, + ]; + + const requests = mockDeploymentListRequest(deployments); + await runWrangler( + "pages deployment list --project-name=images --environment=preview" + ); + expect(requests.count).toBe(1); + expect( + requests.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toStrictEqual(["env", "preview"]); + }); }); /* -------------------------------------------------- */ /* Helper Functions */ /* -------------------------------------------------- */ -function mockDeploymentListRequest(deployments: unknown[]) { - const requests = { count: 0 }; +/** + * A logger used to check how many times a mock API has been hit. + * Useful as a helper in our testing to check if wrangler is making + * the correct API calls without actually sending any web traffic. + */ +type RequestLogger = { + count: number; + queryParams: [string, string][][]; +}; + +function mockDeploymentListRequest(deployments: unknown[]): RequestLogger { + const requests: RequestLogger = { count: 0, queryParams: [] }; msw.use( http.get( "*/accounts/:accountId/pages/projects/:project/deployments", - ({ params }) => { + ({ request, params }) => { requests.count++; - + const url = new URL(request.url); + requests.queryParams.push(Array.from(url.searchParams.entries())); expect(params.project).toEqual("images"); expect(params.accountId).toEqual("some-account-id"); diff --git a/packages/wrangler/src/__tests__/pages/pages-deployment-tail.test.ts b/packages/wrangler/src/__tests__/pages/pages-deployment-tail.test.ts index f293c051ac33..344792b252f7 100644 --- a/packages/wrangler/src/__tests__/pages/pages-deployment-tail.test.ts +++ b/packages/wrangler/src/__tests__/pages/pages-deployment-tail.test.ts @@ -166,6 +166,63 @@ describe("pages deployment tail", () => { ); await api.closeHelper(); }); + + it("passes default environment to deployments list", async () => { + api = mockTailAPIs(); + expect(api.requests.creation.length).toStrictEqual(0); + + await runWrangler( + "pages deployment tail --project-name mock-project mock-deployment-id" + ); + + await expect(api.ws.connected).resolves.toBeTruthy(); + console.log(api.requests.deployments.queryParams[0]); + expect(api.requests.deployments.count).toStrictEqual(1); + expect( + api.requests.deployments.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toStrictEqual(["env", "production"]); + await api.closeHelper(); + }); + + it("passes production environment to deployments list", async () => { + api = mockTailAPIs(); + expect(api.requests.creation.length).toStrictEqual(0); + + await runWrangler( + "pages deployment tail --project-name mock-project mock-deployment-id --environment production" + ); + + await expect(api.ws.connected).resolves.toBeTruthy(); + console.log(api.requests.deployments.queryParams[0]); + expect(api.requests.deployments.count).toStrictEqual(1); + expect( + api.requests.deployments.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toStrictEqual(["env", "production"]); + await api.closeHelper(); + }); + + it("passes preview environment to deployments list", async () => { + api = mockTailAPIs(); + expect(api.requests.creation.length).toStrictEqual(0); + + await runWrangler( + "pages deployment tail --project-name mock-project mock-deployment-id --environment preview" + ); + + await expect(api.ws.connected).resolves.toBeTruthy(); + console.log(api.requests.deployments.queryParams[0]); + expect(api.requests.deployments.count).toStrictEqual(1); + expect( + api.requests.deployments.queryParams[0].find(([key, _]) => { + return key === "env"; + }) + ).toStrictEqual(["env", "preview"]); + await api.closeHelper(); + }); }); describe("filtering", () => { @@ -783,7 +840,7 @@ function deserializeToJson(message: WebSocket.RawData): string { */ type MockAPI = { requests: { - deployments: RequestCounter; + deployments: RequestLogger; creation: RequestInit[]; deletion: RequestCounter; }; @@ -792,17 +849,29 @@ type MockAPI = { closeHelper: () => Promise; }; +/** + * A logger used to check how many times a mock API has been hit. + * Useful as a helper in our testing to check if wrangler is making + * the correct API calls without actually sending any web traffic. + */ +type RequestLogger = { + count: number; + queryParams: [string, string][][]; +}; + /** * Mock out the API hit during Tail creation * * @returns a `RequestCounter` for counting how many times the API is hit */ -function mockListDeployments(): RequestCounter { - const requests: RequestCounter = { count: 0 }; +function mockListDeployments(): RequestLogger { + const requests: RequestLogger = { count: 0, queryParams: [] }; msw.use( http.get( `*/accounts/:accountId/pages/projects/:projectName/deployments`, - () => { + ({ request }) => { + const url = new URL(request.url); + requests.queryParams.push(Array.from(url.searchParams.entries())); requests.count++; return HttpResponse.json( { @@ -839,15 +908,6 @@ function mockListDeployments(): RequestCounter { return requests; } -/** - * A counter used to check how many times a mock API has been hit. - * Useful as a helper in our testing to check if wrangler is making - * the correct API calls without actually sending any web traffic - */ -type RequestCounter = { - count: number; -}; - /** * Mock out the API hit during Tail creation * @@ -911,6 +971,15 @@ const mockEmailEventTo = "to@example.com"; */ const mockEmailEventSize = 45416; +/** + * A counter used to check how many times a mock API has been hit. + * Useful as a helper in our testing to check if wrangler is making + * the correct API calls without actually sending any web traffic + */ +type RequestCounter = { + count: number; +}; + /** * Mock out the API hit during Tail deletion * @@ -950,7 +1019,7 @@ function mockTailAPIs(): MockAPI { requests: { deletion: { count: 0 }, creation: [], - deployments: { count: 0 }, + deployments: { count: 0, queryParams: [] }, }, // eslint-disable-next-line @typescript-eslint/no-non-null-assertion ws: null!, // will be set in the `beforeEach()`. diff --git a/packages/wrangler/src/pages/deployment-tails.ts b/packages/wrangler/src/pages/deployment-tails.ts index 96e2a9646302..9e0e9c0cdc57 100644 --- a/packages/wrangler/src/pages/deployment-tails.ts +++ b/packages/wrangler/src/pages/deployment-tails.ts @@ -163,7 +163,9 @@ export async function Handler({ } const deployments: Array = await fetchResult( - `/accounts/${accountId}/pages/projects/${projectName}/deployments` + `/accounts/${accountId}/pages/projects/${projectName}/deployments`, + {}, + new URLSearchParams({ env: environment }) ); const envDeployments = deployments.filter( diff --git a/packages/wrangler/src/pages/deployments.ts b/packages/wrangler/src/pages/deployments.ts index 2de7612ab9f2..6547d8ae1970 100644 --- a/packages/wrangler/src/pages/deployments.ts +++ b/packages/wrangler/src/pages/deployments.ts @@ -23,10 +23,15 @@ export function ListOptions(yargs: CommonYargsArgv) { description: "The name of the project you would like to list deployments for", }, + environment: { + type: "string", + choices: ["production", "preview"], + description: "Environment type to list deployments for", + }, }); } -export async function ListHandler({ projectName }: ListArgs) { +export async function ListHandler({ projectName, environment }: ListArgs) { const config = getConfigCache(PAGES_CONFIG_CACHE_FILENAME); const accountId = await requireAuth(config); @@ -42,7 +47,11 @@ export async function ListHandler({ projectName }: ListArgs) { } const deployments: Array = await fetchResult( - `/accounts/${accountId}/pages/projects/${projectName}/deployments` + `/accounts/${accountId}/pages/projects/${projectName}/deployments`, + {}, + environment + ? new URLSearchParams({ env: environment }) + : new URLSearchParams({}) ); const titleCase = (word: string) => From fa21312c6625680709e05547c13897bc1fa8c9d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Somhairle=20MacLe=C3=B2id?= Date: Wed, 20 Nov 2024 16:57:21 +0000 Subject: [PATCH 7/9] Fix relative project root (#7285) --- .changeset/sixty-news-relax.md | 5 + .../create-cloudflare/e2e-tests/cli.test.ts | 6 +- .../startDevWorker/BundleController.test.ts | 18 +-- .../startDevWorker/ConfigController.test.ts | 10 +- .../LocalRuntimeController.test.ts | 8 +- .../__tests__/find-additional-modules.test.ts | 14 +- .../wrangler/src/__tests__/get-entry.test.ts | 143 ++++++++++++++++++ .../__tests__/navigator-user-agent.test.ts | 4 +- .../api/startDevWorker/BundlerController.ts | 12 +- .../api/startDevWorker/ConfigController.ts | 2 +- .../wrangler/src/api/startDevWorker/types.ts | 2 +- packages/wrangler/src/deploy/index.ts | 2 +- .../wrangler/src/deployment-bundle/bundle.ts | 8 +- .../wrangler/src/deployment-bundle/entry.ts | 12 +- .../find-additional-modules.ts | 2 +- .../src/deployment-bundle/resolve-entry.ts | 16 +- packages/wrangler/src/dev/use-esbuild.ts | 2 +- .../src/pages/functions/buildPlugin.ts | 2 +- .../src/pages/functions/buildWorker.ts | 6 +- packages/wrangler/src/versions/index.ts | 2 +- 20 files changed, 214 insertions(+), 62 deletions(-) create mode 100644 .changeset/sixty-news-relax.md create mode 100644 packages/wrangler/src/__tests__/get-entry.test.ts diff --git a/.changeset/sixty-news-relax.md b/.changeset/sixty-news-relax.md new file mode 100644 index 000000000000..6944105069c3 --- /dev/null +++ b/.changeset/sixty-news-relax.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +Rename `directory` to `projectRoot` and ensure it's relative to the `wrangler.toml`. This fixes a regression which meant that `.wrangler` temporary folders were inadvertently generated relative to `process.cwd()` rather than the location of the `wrangler.toml` file. It also renames `directory` to `projectRoot`, which affects the `unstable_startWorker() interface. diff --git a/packages/create-cloudflare/e2e-tests/cli.test.ts b/packages/create-cloudflare/e2e-tests/cli.test.ts index 094d86829bc3..7efce879f3b4 100644 --- a/packages/create-cloudflare/e2e-tests/cli.test.ts +++ b/packages/create-cloudflare/e2e-tests/cli.test.ts @@ -199,7 +199,7 @@ describe.skipIf(experimental || frameworkToTest || isQuarantineMode())( const { output } = await runC3( [ project.path, - "--template=https://github.com/cloudflare/templates/worker-router", + "--template=https://github.com/cloudflare/workers-graphql-server", "--no-deploy", "--git=false", ], @@ -208,10 +208,10 @@ describe.skipIf(experimental || frameworkToTest || isQuarantineMode())( ); expect(output).toContain( - `repository https://github.com/cloudflare/templates/worker-router`, + `repository https://github.com/cloudflare/workers-graphql-server`, ); expect(output).toContain( - `Cloning template from: https://github.com/cloudflare/templates/worker-router`, + `Cloning template from: https://github.com/cloudflare/workers-graphql-server`, ); expect(output).toContain(`template cloned and validated`); }, diff --git a/packages/wrangler/src/__tests__/api/startDevWorker/BundleController.test.ts b/packages/wrangler/src/__tests__/api/startDevWorker/BundleController.test.ts index 7a478e3be5cd..7b096e4ce720 100644 --- a/packages/wrangler/src/__tests__/api/startDevWorker/BundleController.test.ts +++ b/packages/wrangler/src/__tests__/api/startDevWorker/BundleController.test.ts @@ -40,7 +40,7 @@ function configDefaults( const persist = path.join(process.cwd(), ".wrangler/persist"); return { entrypoint: "NOT_REAL", - directory: "NOT_REAL", + projectRoot: "NOT_REAL", build: unusable(), legacy: {}, dev: { persist }, @@ -68,7 +68,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("src/index.ts"), - directory: path.resolve("src"), + projectRoot: path.resolve("src"), build: { additionalModules: [], processEntrypoint: false, @@ -141,7 +141,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("src/index.ts"), - directory: path.resolve("src"), + projectRoot: path.resolve("src"), build: { additionalModules: [], processEntrypoint: false, @@ -207,7 +207,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("out.ts"), - directory: path.resolve("."), + projectRoot: path.resolve("."), build: { additionalModules: [], processEntrypoint: false, @@ -287,7 +287,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("src/index.ts"), - directory: path.resolve("src"), + projectRoot: path.resolve("src"), build: { additionalModules: [], processEntrypoint: false, @@ -345,7 +345,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("src/index.ts"), - directory: path.resolve("src"), + projectRoot: path.resolve("src"), build: { additionalModules: [], @@ -391,7 +391,7 @@ describe("BundleController", () => { const configCustom: Partial = { name: "worker", entrypoint: path.resolve("out.ts"), - directory: process.cwd(), + projectRoot: process.cwd(), build: { additionalModules: [], processEntrypoint: false, @@ -464,7 +464,7 @@ describe("BundleController", () => { const configCustom: Partial = { name: "worker", entrypoint: path.resolve("out.ts"), - directory: process.cwd(), + projectRoot: process.cwd(), build: { additionalModules: [], @@ -513,7 +513,7 @@ describe("BundleController", () => { legacy: {}, name: "worker", entrypoint: path.resolve("src/index.ts"), - directory: path.resolve("src"), + projectRoot: path.resolve("src"), build: { additionalModules: [], diff --git a/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts b/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts index 8ebd3d775429..58b6e603f488 100644 --- a/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts +++ b/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts @@ -51,7 +51,7 @@ describe("ConfigController", () => { moduleRoot: path.join(process.cwd(), "src"), moduleRules: [], }, - directory: process.cwd(), + projectRoot: process.cwd(), entrypoint: path.join(process.cwd(), "src/index.ts"), }, }); @@ -87,7 +87,7 @@ base_dir = \"./some/base_dir\"`, moduleRoot: path.join(process.cwd(), "./some/base_dir"), moduleRules: [], }, - directory: process.cwd(), + projectRoot: process.cwd(), entrypoint: path.join(process.cwd(), "./some/base_dir/nested/index.js"), }, }); @@ -114,7 +114,7 @@ base_dir = \"./some/base_dir\"`, type: "configUpdate", config: { entrypoint: path.join(process.cwd(), "src/index.ts"), - directory: process.cwd(), + projectRoot: process.cwd(), build: { additionalModules: [], define: {}, @@ -138,7 +138,7 @@ base_dir = \"./some/base_dir\"`, type: "configUpdate", config: { entrypoint: path.join(process.cwd(), "src/index.ts"), - directory: process.cwd(), + projectRoot: process.cwd(), build: { additionalModules: [], define: {}, @@ -168,7 +168,7 @@ base_dir = \"./some/base_dir\"`, type: "configUpdate", config: { entrypoint: path.join(process.cwd(), "src/index.ts"), - directory: process.cwd(), + projectRoot: process.cwd(), build: { alias: { foo: "bar", diff --git a/packages/wrangler/src/__tests__/api/startDevWorker/LocalRuntimeController.test.ts b/packages/wrangler/src/__tests__/api/startDevWorker/LocalRuntimeController.test.ts index 8b1530475978..444797925c81 100644 --- a/packages/wrangler/src/__tests__/api/startDevWorker/LocalRuntimeController.test.ts +++ b/packages/wrangler/src/__tests__/api/startDevWorker/LocalRuntimeController.test.ts @@ -88,7 +88,7 @@ function makeEsbuildBundle(testBundle: TestBundle): Bundle { entrypointSource: "", entry: { file: "index.mjs", - directory: "/virtual/", + projectRoot: "/virtual/", format: "modules", moduleRoot: "/virtual", name: undefined, @@ -127,7 +127,7 @@ function configDefaults( ): StartDevWorkerOptions { return { entrypoint: "NOT_REAL", - directory: "NOT_REAL", + projectRoot: "NOT_REAL", build: unusable(), legacy: {}, dev: { persist: "./persist" }, @@ -232,7 +232,7 @@ describe("LocalRuntimeController", () => { `, entry: { file: "esm/index.mjs", - directory: "/virtual/", + projectRoot: "/virtual/", format: "modules", moduleRoot: "/virtual", name: undefined, @@ -346,7 +346,7 @@ describe("LocalRuntimeController", () => { path: "/virtual/index.js", entry: { file: "index.js", - directory: "/virtual/", + projectRoot: "/virtual/", format: "service-worker", moduleRoot: "/virtual", name: undefined, diff --git a/packages/wrangler/src/__tests__/find-additional-modules.test.ts b/packages/wrangler/src/__tests__/find-additional-modules.test.ts index e871002f96aa..dc9df03f2670 100644 --- a/packages/wrangler/src/__tests__/find-additional-modules.test.ts +++ b/packages/wrangler/src/__tests__/find-additional-modules.test.ts @@ -39,7 +39,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./index.js"), - directory: process.cwd(), + projectRoot: process.cwd(), format: "modules", moduleRoot: process.cwd(), exports: [], @@ -75,7 +75,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./index.js"), - directory: process.cwd(), + projectRoot: process.cwd(), format: "modules", moduleRoot: process.cwd(), exports: [], @@ -109,7 +109,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), - directory: path.join(process.cwd(), "./src/nested"), + projectRoot: path.join(process.cwd(), "./src/nested"), format: "modules", // The default module root is dirname(file) moduleRoot: path.join(process.cwd(), "./src/nested"), @@ -144,7 +144,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), - directory: path.join(process.cwd(), "./src/nested"), + projectRoot: path.join(process.cwd(), "./src/nested"), format: "modules", // The default module root is dirname(file) moduleRoot: path.join(process.cwd(), "./src"), @@ -179,7 +179,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), - directory: path.join(process.cwd(), "./src/nested"), + projectRoot: path.join(process.cwd(), "./src/nested"), format: "modules", // The default module root is dirname(file) moduleRoot: path.join(process.cwd(), "./src"), @@ -214,7 +214,7 @@ describe("traverse module graph", () => { const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/index.js"), - directory: path.join(process.cwd(), "./src"), + projectRoot: path.join(process.cwd(), "./src"), format: "modules", // The default module root is dirname(file) moduleRoot: path.join(process.cwd(), "./src"), @@ -249,7 +249,7 @@ describe("traverse module graph", () => { findAdditionalModules( { file: path.join(process.cwd(), "./src/index.js"), - directory: path.join(process.cwd(), "./src"), + projectRoot: path.join(process.cwd(), "./src"), format: "modules", // The default module root is dirname(file) moduleRoot: path.join(process.cwd(), "./src"), diff --git a/packages/wrangler/src/__tests__/get-entry.test.ts b/packages/wrangler/src/__tests__/get-entry.test.ts new file mode 100644 index 000000000000..aa938d5054dc --- /dev/null +++ b/packages/wrangler/src/__tests__/get-entry.test.ts @@ -0,0 +1,143 @@ +import path from "path"; +import dedent from "ts-dedent"; +import { defaultWranglerConfig } from "../config/config"; +import { getEntry } from "../deployment-bundle/entry"; +import { mockConsoleMethods } from "./helpers/mock-console"; +import { runInTempDir } from "./helpers/run-in-tmp"; +import { seed } from "./helpers/seed"; +import type { Entry } from "../deployment-bundle/entry"; + +function normalize(entry: Entry): Entry { + const tmpDir = process.cwd(); + const tmpDirName = path.basename(tmpDir); + + return Object.fromEntries( + Object.entries(entry).map(([k, v]) => [ + k, + typeof v === "string" + ? v + .replaceAll("\\", "/") + .replace(new RegExp(`(.*${tmpDirName})`), `/tmp/dir`) + : v, + ]) + ) as Entry; +} + +describe("getEntry()", () => { + runInTempDir(); + mockConsoleMethods(); + + it("--script index.ts", async () => { + await seed({ + "index.ts": dedent/* javascript */ ` + export default { + fetch() { + + } + } + `, + }); + const entry = await getEntry( + { script: "index.ts" }, + defaultWranglerConfig, + "deploy" + ); + expect(normalize(entry)).toMatchObject({ + projectRoot: "/tmp/dir", + file: "/tmp/dir/index.ts", + moduleRoot: "/tmp/dir", + }); + }); + + it("--script src/index.ts", async () => { + await seed({ + "src/index.ts": dedent/* javascript */ ` + export default { + fetch() { + + } + } + `, + }); + const entry = await getEntry( + { script: "src/index.ts" }, + defaultWranglerConfig, + "deploy" + ); + expect(normalize(entry)).toMatchObject({ + projectRoot: "/tmp/dir", + file: "/tmp/dir/src/index.ts", + moduleRoot: "/tmp/dir/src", + }); + }); + + it("main = index.ts", async () => { + await seed({ + "index.ts": dedent/* javascript */ ` + export default { + fetch() { + + } + } + `, + }); + const entry = await getEntry( + {}, + { ...defaultWranglerConfig, main: "index.ts" }, + "deploy" + ); + expect(normalize(entry)).toMatchObject({ + projectRoot: "/tmp/dir", + file: "/tmp/dir/index.ts", + moduleRoot: "/tmp/dir", + }); + }); + + it("main = src/index.ts", async () => { + await seed({ + "src/index.ts": dedent/* javascript */ ` + export default { + fetch() { + + } + } + `, + }); + const entry = await getEntry( + {}, + { ...defaultWranglerConfig, main: "src/index.ts" }, + "deploy" + ); + expect(normalize(entry)).toMatchObject({ + projectRoot: "/tmp/dir", + file: "/tmp/dir/src/index.ts", + moduleRoot: "/tmp/dir/src", + }); + }); + + it("main = src/index.ts w/ configPath", async () => { + await seed({ + "other-worker/src/index.ts": dedent/* javascript */ ` + export default { + fetch() { + + } + } + `, + }); + const entry = await getEntry( + {}, + { + ...defaultWranglerConfig, + main: "src/index.ts", + configPath: "other-worker/wrangler.toml", + }, + "deploy" + ); + expect(normalize(entry)).toMatchObject({ + projectRoot: "/tmp/dir/other-worker", + file: "/tmp/dir/other-worker/src/index.ts", + moduleRoot: "/tmp/dir/other-worker/src", + }); + }); +}); diff --git a/packages/wrangler/src/__tests__/navigator-user-agent.test.ts b/packages/wrangler/src/__tests__/navigator-user-agent.test.ts index 1f3f3288e659..6b66a311a9ab 100644 --- a/packages/wrangler/src/__tests__/navigator-user-agent.test.ts +++ b/packages/wrangler/src/__tests__/navigator-user-agent.test.ts @@ -104,7 +104,7 @@ describe("defineNavigatorUserAgent is respected", () => { await bundleWorker( { file: path.resolve("src/index.js"), - directory: process.cwd(), + projectRoot: process.cwd(), format: "modules", moduleRoot: path.dirname(path.resolve("src/index.js")), exports: [], @@ -167,7 +167,7 @@ describe("defineNavigatorUserAgent is respected", () => { await bundleWorker( { file: path.resolve("src/index.js"), - directory: process.cwd(), + projectRoot: process.cwd(), format: "modules", moduleRoot: path.dirname(path.resolve("src/index.js")), exports: [], diff --git a/packages/wrangler/src/api/startDevWorker/BundlerController.ts b/packages/wrangler/src/api/startDevWorker/BundlerController.ts index bf633397473f..d3b7f91f7784 100644 --- a/packages/wrangler/src/api/startDevWorker/BundlerController.ts +++ b/packages/wrangler/src/api/startDevWorker/BundlerController.ts @@ -51,7 +51,7 @@ export class BundlerController extends Controller { // Since `this.#customBuildAborter` will change as new builds are scheduled, store the specific AbortController that will be used for this build const buildAborter = this.#customBuildAborter; const relativeFile = - path.relative(config.directory, config.entrypoint) || "."; + path.relative(config.projectRoot, config.entrypoint) || "."; logger.log(`The file ${filePath} changed, restarting build...`); this.emitBundleStartEvent(config); try { @@ -74,7 +74,7 @@ export class BundlerController extends Controller { const entry: Entry = { file: config.entrypoint, - directory: config.directory, + projectRoot: config.projectRoot, format: config.build.format, moduleRoot: config.build.moduleRoot, exports: config.build.exports, @@ -131,7 +131,7 @@ export class BundlerController extends Controller { // This could potentially cause issues as we no longer have identical behaviour between dev and deploy? targetConsumer: "dev", local: !config.dev?.remote, - projectRoot: config.directory, + projectRoot: config.projectRoot, defineNavigatorUserAgent: isNavigatorDefined( config.compatibilityDate, config.compatibilityFlags @@ -229,7 +229,7 @@ export class BundlerController extends Controller { assert(this.#tmpDir); const entry: Entry = { file: config.entrypoint, - directory: config.directory, + projectRoot: config.projectRoot, format: config.build.format, moduleRoot: config.build.moduleRoot, exports: config.build.exports, @@ -264,7 +264,7 @@ export class BundlerController extends Controller { // startDevWorker only applies to "dev" targetConsumer: "dev", testScheduled: Boolean(config.dev?.testScheduled), - projectRoot: config.directory, + projectRoot: config.projectRoot, onStart: () => { this.emitBundleStartEvent(config); }, @@ -325,7 +325,7 @@ export class BundlerController extends Controller { onConfigUpdate(event: ConfigUpdateEvent) { this.#tmpDir?.remove(); try { - this.#tmpDir = getWranglerTmpDir(event.config.directory, "dev"); + this.#tmpDir = getWranglerTmpDir(event.config.projectRoot, "dev"); } catch (e) { logger.error( "Failed to create temporary directory to store built files." diff --git a/packages/wrangler/src/api/startDevWorker/ConfigController.ts b/packages/wrangler/src/api/startDevWorker/ConfigController.ts index eaffac4e2435..2c6097d950b4 100644 --- a/packages/wrangler/src/api/startDevWorker/ConfigController.ts +++ b/packages/wrangler/src/api/startDevWorker/ConfigController.ts @@ -256,7 +256,7 @@ async function resolveConfig( compatibilityDate: getDevCompatibilityDate(config, input.compatibilityDate), compatibilityFlags: input.compatibilityFlags ?? config.compatibility_flags, entrypoint: entry.file, - directory: entry.directory, + projectRoot: entry.projectRoot, bindings, migrations: input.migrations ?? config.migrations, sendMetrics: input.sendMetrics ?? config.send_metrics, diff --git a/packages/wrangler/src/api/startDevWorker/types.ts b/packages/wrangler/src/api/startDevWorker/types.ts index fba44f65bc62..ba045f56f370 100644 --- a/packages/wrangler/src/api/startDevWorker/types.ts +++ b/packages/wrangler/src/api/startDevWorker/types.ts @@ -174,7 +174,7 @@ export interface StartDevWorkerInput { export type StartDevWorkerOptions = Omit & { /** A worker's directory. Usually where the wrangler.toml file is located */ - directory: string; + projectRoot: string; build: StartDevWorkerInput["build"] & { nodejsCompatMode: NodeJSCompatMode; format: CfScriptFormat; diff --git a/packages/wrangler/src/deploy/index.ts b/packages/wrangler/src/deploy/index.ts index 16fc175fca6e..000c3bcf97c5 100644 --- a/packages/wrangler/src/deploy/index.ts +++ b/packages/wrangler/src/deploy/index.ts @@ -329,7 +329,7 @@ export async function deployHandler(args: DeployArgs) { await verifyWorkerMatchesCITag( accountId, name, - path.relative(entry.directory, config.configPath ?? "wrangler.toml") + path.relative(entry.projectRoot, config.configPath ?? "wrangler.toml") ); } const { sourceMapSize, versionId, workerTag, targets } = await deploy({ diff --git a/packages/wrangler/src/deployment-bundle/bundle.ts b/packages/wrangler/src/deployment-bundle/bundle.ts index dd7d16715568..f367363f2a69 100644 --- a/packages/wrangler/src/deployment-bundle/bundle.ts +++ b/packages/wrangler/src/deployment-bundle/bundle.ts @@ -373,7 +373,7 @@ export async function bundleWorker( path: require.resolve(aliasPath, { // From the esbuild alias docs: "Note that when an import path is substituted using an alias, the resulting import path is resolved in the working directory instead of in the directory containing the source file with the import path." // https://esbuild.github.io/api/#alias:~:text=Note%20that%20when%20an%20import%20path%20is%20substituted%20using%20an%20alias%2C%20the%20resulting%20import%20path%20is%20resolved%20in%20the%20working%20directory%20instead%20of%20in%20the%20directory%20containing%20the%20source%20file%20with%20the%20import%20path. - paths: [entry.directory], + paths: [entry.projectRoot], }), }; } @@ -385,7 +385,7 @@ export async function bundleWorker( // Don't use entryFile here as the file may have been changed when applying the middleware entryPoints: [entry.file], bundle, - absWorkingDir: entry.directory, + absWorkingDir: entry.projectRoot, outdir: destination, keepNames: true, entryNames: entryName || path.parse(entryFile).name, @@ -526,7 +526,7 @@ export async function bundleWorker( )[0]; const resolvedEntryPointPath = path.resolve( - entry.directory, + entry.projectRoot, entryPoint.relativePath ); @@ -548,7 +548,7 @@ export async function bundleWorker( sourceMapPath, sourceMapMetadata: { tmpDir: tmpDir.path, - entryDirectory: entry.directory, + entryDirectory: entry.projectRoot, }, }; } diff --git a/packages/wrangler/src/deployment-bundle/entry.ts b/packages/wrangler/src/deployment-bundle/entry.ts index 1be44b13f210..bcc04895423c 100644 --- a/packages/wrangler/src/deployment-bundle/entry.ts +++ b/packages/wrangler/src/deployment-bundle/entry.ts @@ -22,7 +22,7 @@ export type Entry = { /** A worker's entrypoint */ file: string; /** A worker's directory. Usually where the wrangler.toml file is located */ - directory: string; + projectRoot: string; /** Is this a module worker or a service worker? */ format: CfScriptFormat; /** The directory that contains all of a `--no-bundle` worker's modules. Usually `${directory}/src`. Defaults to path.dirname(file) */ @@ -50,10 +50,11 @@ export async function getEntry( config: Config, command: "dev" | "deploy" | "versions upload" | "types" ): Promise { - const directory = process.cwd(); const entryPoint = config.site?.["entry-point"]; - let paths: { absolutePath: string; relativePath: string } | undefined; + let paths: + | { absolutePath: string; relativePath: string; projectRoot?: string } + | undefined; if (args.script) { paths = resolveEntryWithScript(args.script); @@ -81,9 +82,10 @@ export async function getEntry( } await runCustomBuild(paths.absolutePath, paths.relativePath, config.build); + const projectRoot = paths.projectRoot ?? process.cwd(); const { format, exports } = await guessWorkerFormat( paths.absolutePath, - directory, + projectRoot, args.format ?? config.build?.upload?.format, config.tsconfig ); @@ -117,7 +119,7 @@ export async function getEntry( return { file: paths.absolutePath, - directory, + projectRoot, format, moduleRoot: args.moduleRoot ?? config.base_dir ?? path.dirname(paths.absolutePath), diff --git a/packages/wrangler/src/deployment-bundle/find-additional-modules.ts b/packages/wrangler/src/deployment-bundle/find-additional-modules.ts index 1e0ffe9cf68c..21f9c585a921 100644 --- a/packages/wrangler/src/deployment-bundle/find-additional-modules.ts +++ b/packages/wrangler/src/deployment-bundle/find-additional-modules.ts @@ -72,7 +72,7 @@ export async function findAdditionalModules( let pythonRequirements = ""; try { pythonRequirements = await readFile( - path.resolve(entry.directory, "requirements.txt"), + path.resolve(entry.projectRoot, "requirements.txt"), "utf-8" ); } catch (e) { diff --git a/packages/wrangler/src/deployment-bundle/resolve-entry.ts b/packages/wrangler/src/deployment-bundle/resolve-entry.ts index 5a9efb535194..2da92e262709 100644 --- a/packages/wrangler/src/deployment-bundle/resolve-entry.ts +++ b/packages/wrangler/src/deployment-bundle/resolve-entry.ts @@ -16,11 +16,12 @@ export function resolveEntryWithMain( ): { absolutePath: string; relativePath: string; + projectRoot: string; } { - const directory = path.resolve(path.dirname(configPath ?? ".")); - const file = path.resolve(directory, main); - const relativePath = path.relative(directory, file) || "."; - return { absolutePath: file, relativePath }; + const projectRoot = path.resolve(path.dirname(configPath ?? ".")); + const file = path.resolve(projectRoot, main); + const relativePath = path.relative(projectRoot, file) || "."; + return { absolutePath: file, relativePath, projectRoot }; } export function resolveEntryWithEntryPoint( @@ -29,13 +30,14 @@ export function resolveEntryWithEntryPoint( ): { absolutePath: string; relativePath: string; + projectRoot: string; } { - const directory = path.resolve(path.dirname(configPath ?? ".")); + const projectRoot = path.resolve(path.dirname(configPath ?? ".")); const file = path.extname(entryPoint) ? path.resolve(entryPoint) : path.resolve(entryPoint, "index.js"); - const relativePath = path.relative(directory, file) || "."; - return { absolutePath: file, relativePath }; + const relativePath = path.relative(projectRoot, file) || "."; + return { absolutePath: file, relativePath, projectRoot }; } export function resolveEntryWithAssets(): { diff --git a/packages/wrangler/src/dev/use-esbuild.ts b/packages/wrangler/src/dev/use-esbuild.ts index c93018ee3062..0e2bdad78008 100644 --- a/packages/wrangler/src/dev/use-esbuild.ts +++ b/packages/wrangler/src/dev/use-esbuild.ts @@ -193,7 +193,7 @@ export function runBuild( // Check whether we need to watch a Python requirements.txt file. const watchPythonRequirements = getBundleType(entry.format, entry.file) === "python" - ? path.resolve(entry.directory, "requirements.txt") + ? path.resolve(entry.projectRoot, "requirements.txt") : undefined; if (watchPythonRequirements) { diff --git a/packages/wrangler/src/pages/functions/buildPlugin.ts b/packages/wrangler/src/pages/functions/buildPlugin.ts index 04bef665e0d6..2ac907f29ae0 100644 --- a/packages/wrangler/src/pages/functions/buildPlugin.ts +++ b/packages/wrangler/src/pages/functions/buildPlugin.ts @@ -29,7 +29,7 @@ export function buildPluginFromFunctions({ }: Options) { const entry: Entry = { file: resolve(getBasePath(), "templates/pages-template-plugin.ts"), - directory: functionsDirectory, + projectRoot: functionsDirectory, format: "modules", moduleRoot: functionsDirectory, exports: [], diff --git a/packages/wrangler/src/pages/functions/buildWorker.ts b/packages/wrangler/src/pages/functions/buildWorker.ts index 0320ba09ead9..95c9e4cc4b5b 100644 --- a/packages/wrangler/src/pages/functions/buildWorker.ts +++ b/packages/wrangler/src/pages/functions/buildWorker.ts @@ -55,7 +55,7 @@ export function buildWorkerFromFunctions({ }: Options) { const entry: Entry = { file: resolve(getBasePath(), "templates/pages-template-worker.ts"), - directory: functionsDirectory, + projectRoot: functionsDirectory, format: "modules", moduleRoot: functionsDirectory, exports: [], @@ -151,7 +151,7 @@ export function buildRawWorker({ }: RawOptions) { const entry: Entry = { file: workerScriptPath, - directory: resolve(directory), + projectRoot: resolve(directory), format: "modules", moduleRoot: resolve(directory), exports: [], @@ -240,7 +240,7 @@ export async function produceWorkerBundleForWorkerJSDirectory({ const additionalModules = await findAdditionalModules( { file: entrypoint, - directory: resolve(workerJSDirectory), + projectRoot: resolve(workerJSDirectory), format: "modules", moduleRoot: resolve(workerJSDirectory), exports: [], diff --git a/packages/wrangler/src/versions/index.ts b/packages/wrangler/src/versions/index.ts index cec8c4793333..fb3009d22e30 100644 --- a/packages/wrangler/src/versions/index.ts +++ b/packages/wrangler/src/versions/index.ts @@ -267,7 +267,7 @@ async function versionsUploadHandler( await verifyWorkerMatchesCITag( accountId, name, - path.relative(entry.directory, config.configPath ?? "wrangler.toml") + path.relative(entry.projectRoot, config.configPath ?? "wrangler.toml") ); } From 563439bd02c450921b28d721d36be5a70897690d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lu=C3=ADs=20Duarte?= Date: Wed, 20 Nov 2024 19:33:16 +0000 Subject: [PATCH 8/9] Fix multiple workflows and add proper engine persistance (#7286) * fix: allow multiple workflow definitions * feat: make engine persist logs and status over multiple runs * chore: add persistance test and remove redundant env types * chore: change changeset and remove redundant test options --- .changeset/cyan-geese-smell.md | 6 + fixtures/workflow-multiple/package.json | 17 +++ fixtures/workflow-multiple/src/index.ts | 90 +++++++++++++ .../workflow-multiple/tests/index.test.ts | 127 ++++++++++++++++++ .../workflow-multiple/tests/tsconfig.json | 7 + fixtures/workflow-multiple/tsconfig.json | 13 ++ fixtures/workflow-multiple/vitest.config.mts | 9 ++ fixtures/workflow-multiple/wrangler.toml | 14 ++ fixtures/workflow/tests/index.test.ts | 13 +- fixtures/workflow/worker-configuration.d.ts | 5 - .../miniflare/src/plugins/workflows/index.ts | 20 ++- .../test/plugins/workflows/index.spec.ts | 72 ++++++++++ packages/workflows-shared/src/engine.ts | 69 +++++++--- pnpm-lock.yaml | 12 ++ 14 files changed, 436 insertions(+), 38 deletions(-) create mode 100644 .changeset/cyan-geese-smell.md create mode 100644 fixtures/workflow-multiple/package.json create mode 100644 fixtures/workflow-multiple/src/index.ts create mode 100644 fixtures/workflow-multiple/tests/index.test.ts create mode 100644 fixtures/workflow-multiple/tests/tsconfig.json create mode 100644 fixtures/workflow-multiple/tsconfig.json create mode 100644 fixtures/workflow-multiple/vitest.config.mts create mode 100644 fixtures/workflow-multiple/wrangler.toml delete mode 100644 fixtures/workflow/worker-configuration.d.ts create mode 100644 packages/miniflare/test/plugins/workflows/index.spec.ts diff --git a/.changeset/cyan-geese-smell.md b/.changeset/cyan-geese-smell.md new file mode 100644 index 000000000000..1d3b0fbdbab8 --- /dev/null +++ b/.changeset/cyan-geese-smell.md @@ -0,0 +1,6 @@ +--- +"@cloudflare/workflows-shared": minor +"miniflare": minor +--- + +Add proper engine persistance in .wrangler and fix multiple workflows in miniflare diff --git a/fixtures/workflow-multiple/package.json b/fixtures/workflow-multiple/package.json new file mode 100644 index 000000000000..ca7184f54cc4 --- /dev/null +++ b/fixtures/workflow-multiple/package.json @@ -0,0 +1,17 @@ +{ + "name": "my-workflow-multiple", + "private": true, + "scripts": { + "deploy": "wrangler deploy", + "start": "wrangler dev", + "test:ci": "vitest" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20241106.0", + "undici": "catalog:default", + "wrangler": "workspace:*" + }, + "volta": { + "extends": "../../package.json" + } +} diff --git a/fixtures/workflow-multiple/src/index.ts b/fixtures/workflow-multiple/src/index.ts new file mode 100644 index 000000000000..3498d07e2f24 --- /dev/null +++ b/fixtures/workflow-multiple/src/index.ts @@ -0,0 +1,90 @@ +import { + WorkerEntrypoint, + WorkflowEntrypoint, + WorkflowEvent, + WorkflowStep, +} from "cloudflare:workers"; + +type Params = { + name: string; +}; + +export class Demo extends WorkflowEntrypoint<{}, Params> { + async run(event: WorkflowEvent, step: WorkflowStep) { + const { timestamp, payload } = event; + + await step.sleep("Wait", "1 second"); + + const result = await step.do("First step", async function () { + return { + output: "First step result", + }; + }); + + await step.sleep("Wait", "1 second"); + + const result2 = await step.do("Second step", async function () { + return { + output: "workflow1", + }; + }); + + return [result, result2, timestamp, payload, "workflow1"]; + } +} + +export class Demo2 extends WorkflowEntrypoint<{}, Params> { + async run(event: WorkflowEvent, step: WorkflowStep) { + const { timestamp, payload } = event; + + await step.sleep("Wait", "1 second"); + + const result = await step.do("First step", async function () { + return { + output: "First step result", + }; + }); + + await step.sleep("Wait", "1 second"); + + const result2 = await step.do("Second step", async function () { + return { + output: "workflow2", + }; + }); + + return [result, result2, timestamp, payload, "workflow2"]; + } +} + +type Env = { + WORKFLOW: Workflow; + WORKFLOW2: Workflow; +}; + +export default class extends WorkerEntrypoint { + async fetch(req: Request) { + const url = new URL(req.url); + const id = url.searchParams.get("id"); + const workflowName = url.searchParams.get("workflowName"); + + if (url.pathname === "/favicon.ico") { + return new Response(null, { status: 404 }); + } + let workflowToUse = + workflowName == "2" ? this.env.WORKFLOW2 : this.env.WORKFLOW; + + let handle: WorkflowInstance; + if (url.pathname === "/create") { + if (id === null) { + handle = await workflowToUse.create(); + } else { + handle = await workflowToUse.create({ id }); + } + } else { + handle = await workflowToUse.get(id); + } + + return Response.json({ status: await handle.status(), id: handle.id }); + } +} diff --git a/fixtures/workflow-multiple/tests/index.test.ts b/fixtures/workflow-multiple/tests/index.test.ts new file mode 100644 index 000000000000..29268cd0ea29 --- /dev/null +++ b/fixtures/workflow-multiple/tests/index.test.ts @@ -0,0 +1,127 @@ +import { rm } from "fs/promises"; +import { resolve } from "path"; +import { fetch } from "undici"; +import { afterAll, beforeAll, describe, it, vi } from "vitest"; +import { runWranglerDev } from "../../shared/src/run-wrangler-long-lived"; + +describe("Workflows", () => { + let ip: string, + port: number, + stop: (() => Promise) | undefined, + getOutput: () => string; + + beforeAll(async () => { + // delete previous run contents because of persistence + await rm(resolve(__dirname, "..") + "/.wrangler", { + force: true, + recursive: true, + }); + ({ ip, port, stop, getOutput } = await runWranglerDev( + resolve(__dirname, ".."), + ["--port=0", "--inspector-port=0"] + )); + }); + + afterAll(async () => { + await stop?.(); + }); + + async function fetchJson(url: string) { + const response = await fetch(url, { + headers: { + "MF-Disable-Pretty-Error": "1", + }, + }); + const text = await response.text(); + + try { + return JSON.parse(text); + } catch (err) { + throw new Error(`Couldn't parse JSON:\n\n${text}`); + } + } + + it("creates two instances with same id in two different workflows", async ({ + expect, + }) => { + const createResult = { + id: "test", + status: { + status: "running", + output: [], + }, + }; + + await Promise.all([ + expect( + fetchJson(`http://${ip}:${port}/create?workflowName=1&id=test`) + ).resolves.toStrictEqual(createResult), + expect( + fetchJson(`http://${ip}:${port}/create?workflowName=2&id=test`) + ).resolves.toStrictEqual(createResult), + ]); + + const firstResult = { + id: "test", + status: { + status: "running", + output: [{ output: "First step result" }], + }, + }; + await Promise.all([ + vi.waitFor( + async () => { + await expect( + fetchJson(`http://${ip}:${port}/status?workflowName=1&id=test`) + ).resolves.toStrictEqual(firstResult); + }, + { timeout: 5000 } + ), + vi.waitFor( + async () => { + await expect( + fetchJson(`http://${ip}:${port}/status?workflowName=2&id=test`) + ).resolves.toStrictEqual(firstResult); + }, + { timeout: 5000 } + ), + ]); + + await Promise.all([ + await vi.waitFor( + async () => { + await expect( + fetchJson(`http://${ip}:${port}/status?workflowName=1&id=test`) + ).resolves.toStrictEqual({ + id: "test", + status: { + status: "complete", + output: [ + { output: "First step result" }, + { output: "workflow1" }, + ], + }, + }); + }, + { timeout: 5000 } + ), + await vi.waitFor( + async () => { + await expect( + fetchJson(`http://${ip}:${port}/status?workflowName=2&id=test`) + ).resolves.toStrictEqual({ + id: "test", + status: { + status: "complete", + output: [ + { output: "First step result" }, + { output: "workflow2" }, + ], + }, + }); + }, + { timeout: 5000 } + ), + ]); + }); +}); diff --git a/fixtures/workflow-multiple/tests/tsconfig.json b/fixtures/workflow-multiple/tests/tsconfig.json new file mode 100644 index 000000000000..d2ce7f144694 --- /dev/null +++ b/fixtures/workflow-multiple/tests/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@cloudflare/workers-tsconfig/tsconfig.json", + "compilerOptions": { + "types": ["node"] + }, + "include": ["**/*.ts", "../../../node-types.d.ts"] +} diff --git a/fixtures/workflow-multiple/tsconfig.json b/fixtures/workflow-multiple/tsconfig.json new file mode 100644 index 000000000000..856398634a5e --- /dev/null +++ b/fixtures/workflow-multiple/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "CommonJS", + "lib": ["ES2020"], + "types": ["@cloudflare/workers-types"], + "moduleResolution": "node", + "noEmit": true, + "skipLibCheck": true + }, + "include": ["**/*.ts"], + "exclude": ["tests"] +} diff --git a/fixtures/workflow-multiple/vitest.config.mts b/fixtures/workflow-multiple/vitest.config.mts new file mode 100644 index 000000000000..846cddc41995 --- /dev/null +++ b/fixtures/workflow-multiple/vitest.config.mts @@ -0,0 +1,9 @@ +import { defineProject, mergeConfig } from "vitest/config"; +import configShared from "../../vitest.shared"; + +export default mergeConfig( + configShared, + defineProject({ + test: {}, + }) +); diff --git a/fixtures/workflow-multiple/wrangler.toml b/fixtures/workflow-multiple/wrangler.toml new file mode 100644 index 000000000000..c984cab94231 --- /dev/null +++ b/fixtures/workflow-multiple/wrangler.toml @@ -0,0 +1,14 @@ +#:schema node_modules/wrangler/config-schema.json +name = "my-workflow-demo" +main = "src/index.ts" +compatibility_date = "2024-10-22" + +[[workflows]] +binding = "WORKFLOW" +name = "my-workflow" +class_name = "Demo" + +[[workflows]] +binding = "WORKFLOW2" +name = "my-workflow-2" +class_name = "Demo2" \ No newline at end of file diff --git a/fixtures/workflow/tests/index.test.ts b/fixtures/workflow/tests/index.test.ts index ce5a11e4983e..592feeef7c0d 100644 --- a/fixtures/workflow/tests/index.test.ts +++ b/fixtures/workflow/tests/index.test.ts @@ -1,3 +1,4 @@ +import { rm } from "fs/promises"; import { resolve } from "path"; import { fetch } from "undici"; import { afterAll, beforeAll, describe, it, vi } from "vitest"; @@ -10,14 +11,14 @@ describe("Workflows", () => { getOutput: () => string; beforeAll(async () => { + // delete previous run contents because of persistence + await rm(resolve(__dirname, "..") + "/.wrangler", { + force: true, + recursive: true, + }); ({ ip, port, stop, getOutput } = await runWranglerDev( resolve(__dirname, ".."), - [ - "--port=0", - "--inspector-port=0", - "--upstream-protocol=https", - "--host=prod.example.org", - ] + ["--port=0", "--inspector-port=0"] )); }); diff --git a/fixtures/workflow/worker-configuration.d.ts b/fixtures/workflow/worker-configuration.d.ts deleted file mode 100644 index ad79d683e0fb..000000000000 --- a/fixtures/workflow/worker-configuration.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by Wrangler by running `wrangler types` - -interface Env { - WORKFLOW: Workflow; -} diff --git a/packages/miniflare/src/plugins/workflows/index.ts b/packages/miniflare/src/plugins/workflows/index.ts index edd08d2e0be1..4d4a53a10225 100644 --- a/packages/miniflare/src/plugins/workflows/index.ts +++ b/packages/miniflare/src/plugins/workflows/index.ts @@ -62,15 +62,20 @@ export const WORKFLOWS_PLUGIN: Plugin< sharedOptions.workflowsPersist ); await fs.mkdir(persistPath, { recursive: true }); - const storageService: Service = { - name: WORKFLOWS_STORAGE_SERVICE_NAME, + // each workflow should get its own storage service + const storageServices: Service[] = Object.entries( + options.workflows ?? {} + ).map(([_, workflow]) => ({ + name: `${WORKFLOWS_STORAGE_SERVICE_NAME}-${workflow.name}`, disk: { path: persistPath, writable: true }, - }; + })); // this creates one miniflare service per workflow that the user's script has. we should dedupe engine definition later const services = Object.entries(options.workflows ?? {}).map( ([_bindingName, workflow]) => { - const uniqueKey = `miniflare-workflows`; + // NOTE(lduarte): the engine unique namespace key must be unique per workflow definition + // otherwise workerd will crash because there's two equal DO namespaces + const uniqueKey = `miniflare-workflows-${workflow.name}`; const workflowsBinding: Service = { name: `${WORKFLOWS_PLUGIN_NAME}:${workflow.name}`, @@ -90,8 +95,9 @@ export const WORKFLOWS_PLUGIN: Plugin< preventEviction: true, }, ], - // this might conflict between workflows - durableObjectStorage: { localDisk: WORKFLOWS_STORAGE_SERVICE_NAME }, + durableObjectStorage: { + localDisk: `${WORKFLOWS_STORAGE_SERVICE_NAME}-${workflow.name}`, + }, bindings: [ { name: "ENGINE", @@ -116,7 +122,7 @@ export const WORKFLOWS_PLUGIN: Plugin< return []; } - return [storageService, ...services]; + return [...storageServices, ...services]; }, getPersistPath({ workflowsPersist }, tmpPath) { diff --git a/packages/miniflare/test/plugins/workflows/index.spec.ts b/packages/miniflare/test/plugins/workflows/index.spec.ts new file mode 100644 index 000000000000..ac19a418e868 --- /dev/null +++ b/packages/miniflare/test/plugins/workflows/index.spec.ts @@ -0,0 +1,72 @@ +import * as fs from "fs/promises"; +import { scheduler } from "timers/promises"; +import test from "ava"; +import { Miniflare, MiniflareOptions } from "miniflare"; +import { useTmp } from "../../test-shared"; + +const WORKFLOW_SCRIPT = () => ` +import { WorkflowEntrypoint } from "cloudflare:workers"; +export class MyWorkflow extends WorkflowEntrypoint { + async run(event, step) { + await step.do("i'm a step?", async () => "yes you are") + + return "I'm a output string" + } + } + export default { + async fetch(request, env, ctx) { + const workflow = await env.MY_WORKFLOW.create({id: "i'm an id"}) + + return new Response(JSON.stringify(await workflow.status())) + }, + };`; + +test("persists Workflow data on file-system between runs", async (t) => { + const tmp = await useTmp(t); + const opts: MiniflareOptions = { + name: "worker", + compatibilityDate: "2024-11-20", + modules: true, + script: WORKFLOW_SCRIPT(), + workflows: { + MY_WORKFLOW: { + className: "MyWorkflow", + name: "MY_WORKFLOW", + }, + }, + workflowsPersist: tmp, + }; + let mf = new Miniflare(opts); + t.teardown(() => mf.dispose()); + + let res = await mf.dispatchFetch("http://localhost"); + t.is(await res.text(), '{"status":"running","output":[]}'); + + // there's no waitUntil in ava haha + const begin = performance.now(); + let success = false; + let test = ""; + while (performance.now() - begin < 2000) { + const res = await mf.dispatchFetch("http://localhost"); + console.log(test); + test = await res.text(); + if (test === '{"status":"complete","output":["yes you are"]}') { + success = true; + break; + } + await scheduler.wait(50); + } + t.true(success, `Condition was not met in 2000ms - output is ${test}`); + + // check if files were commited + const names = await fs.readdir(tmp); + t.deepEqual(names, ["miniflare-workflows-MY_WORKFLOW"]); + + // restart miniflare + await mf.dispose(); + mf = new Miniflare(opts); + + // state should be persisted now + res = await mf.dispatchFetch("http://localhost"); + t.is(await res.text(), '{"status":"complete","output":["yes you are"]}'); +}); diff --git a/packages/workflows-shared/src/engine.ts b/packages/workflows-shared/src/engine.ts index a72f3f5fe68e..4a80f6cdbdc1 100644 --- a/packages/workflows-shared/src/engine.ts +++ b/packages/workflows-shared/src/engine.ts @@ -53,9 +53,10 @@ export type DatabaseInstance = { ended_on: string | null; }; +const ENGINE_STATUS_KEY = "ENGINE_STATUS"; + export class Engine extends DurableObject { logs: Array = []; - status: InstanceStatus = InstanceStatus.Queued; isRunning: boolean = false; accountId: number | undefined; @@ -66,21 +67,32 @@ export class Engine extends DurableObject { constructor(state: DurableObjectState, env: Env) { super(state, env); - void this.ctx.blockConcurrencyWhile(async () => { this.ctx.storage.transactionSync(() => { - this.ctx.storage.sql.exec(` - CREATE TABLE IF NOT EXISTS priority_queue ( - id INTEGER PRIMARY KEY NOT NULL, - created_on TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - target_timestamp INTEGER NOT NULL, - action INTEGER NOT NULL, -- should only be 0 or 1 (1 for added, 0 for deleted), - entryType INTEGER NOT NULL, - hash TEXT NOT NULL, - CHECK (action IN (0, 1)), -- guararentee that action can only be 0 or 1 - UNIQUE (action, entryType, hash) - ) - `); + try { + this.ctx.storage.sql.exec(` + CREATE TABLE IF NOT EXISTS priority_queue ( + id INTEGER PRIMARY KEY NOT NULL, + created_on TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + target_timestamp INTEGER NOT NULL, + action INTEGER NOT NULL, -- should only be 0 or 1 (1 for added, 0 for deleted), + entryType INTEGER NOT NULL, + hash TEXT NOT NULL, + CHECK (action IN (0, 1)), -- guararentee that action can only be 0 or 1 + UNIQUE (action, entryType, hash) + ); + CREATE TABLE IF NOT EXISTS states ( + id INTEGER PRIMARY KEY NOT NULL, + groupKey TEXT, + target TEXT, + metadata TEXT, + event INTEGER NOT NULL + ) + `); + } catch (e) { + console.error(e); + throw e; + } }); }); @@ -96,12 +108,13 @@ export class Engine extends DurableObject { target: string | null = null, metadata: Record ) { - this.logs.push({ + this.ctx.storage.sql.exec( + "INSERT INTO states (event, groupKey, target, metadata) VALUES (?, ?, ?, ?)", event, group, target, - metadata, - }); + JSON.stringify(metadata) + ); } readLogsFromStep(_cacheKey: string): RawInstanceLog[] { @@ -109,9 +122,19 @@ export class Engine extends DurableObject { } readLogs(): InstanceLogsResponse { + const logs = [ + ...this.ctx.storage.sql.exec>( + "SELECT event, groupKey, target, metadata FROM states" + ), + ]; + return { // @ts-expect-error TODO: Fix this - logs: this.logs, + logs: logs.map((log) => ({ + ...log, + metadata: JSON.parse(log.metadata as string), + group: log.groupKey, + })), }; } @@ -119,7 +142,13 @@ export class Engine extends DurableObject { _accountId: number, _instanceId: string ): Promise { - return this.status; + const res = await this.ctx.storage.get(ENGINE_STATUS_KEY); + + // NOTE(lduarte): if status don't exist, means that engine is running for the first time, so we assume queued + if (res === undefined) { + return InstanceStatus.Queued; + } + return res; } async setStatus( @@ -127,7 +156,7 @@ export class Engine extends DurableObject { instanceId: string, status: InstanceStatus ): Promise { - this.status = status; + await this.ctx.storage.put(ENGINE_STATUS_KEY, status); } async abort(_reason: string) { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 73acc46b8003..86cb00ea9c0f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -741,6 +741,18 @@ importers: specifier: workspace:* version: link:../../packages/wrangler + fixtures/workflow-multiple: + devDependencies: + '@cloudflare/workers-types': + specifier: ^4.20241106.0 + version: 4.20241106.0 + undici: + specifier: catalog:default + version: 5.28.4 + wrangler: + specifier: workspace:* + version: link:../../packages/wrangler + packages/chrome-devtools-patches: devDependencies: patch-package: From a30c8056621f44063082a81d06f10e723844059f Mon Sep 17 00:00:00 2001 From: Thomas Ankcorn Date: Thu, 21 Nov 2024 14:17:21 +0000 Subject: [PATCH 9/9] Fix/workers observability logs (#7314) * fix observability.logs.enabled optional logic * run lint fix * add changeset * Update .changeset/lemon-buckets-know.md Co-authored-by: Edmund Hung --------- Co-authored-by: Edmund Hung --- .changeset/lemon-buckets-know.md | 5 ++++ .../src/__tests__/configuration.test.ts | 23 +++++++++++++++++++ packages/wrangler/src/config/validation.ts | 2 +- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 .changeset/lemon-buckets-know.md diff --git a/.changeset/lemon-buckets-know.md b/.changeset/lemon-buckets-know.md new file mode 100644 index 000000000000..8ab72f884b24 --- /dev/null +++ b/.changeset/lemon-buckets-know.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +Fix observability.logs.enabled validation diff --git a/packages/wrangler/src/__tests__/configuration.test.ts b/packages/wrangler/src/__tests__/configuration.test.ts index 35ece1dcf03a..0a56c2f9fda0 100644 --- a/packages/wrangler/src/__tests__/configuration.test.ts +++ b/packages/wrangler/src/__tests__/configuration.test.ts @@ -5537,6 +5537,29 @@ describe("normalizeAndValidateConfig()", () => { expect(diagnostics.hasErrors()).toBe(false); }); + + it("should not error on mixed observability config", () => { + const { diagnostics } = normalizeAndValidateConfig( + { + observability: { + enabled: true, + logs: { + invocation_logs: false, + }, + }, + } as unknown as RawConfig, + undefined, + { env: undefined } + ); + + expect(diagnostics.hasWarnings()).toBe(false); + expect(diagnostics.renderWarnings()).toMatchInlineSnapshot(` + "Processing wrangler configuration: + " + `); + + expect(diagnostics.hasErrors()).toBe(false); + }); it("should error on a sampling rate out of range", () => { const { diagnostics } = normalizeAndValidateConfig( { diff --git a/packages/wrangler/src/config/validation.ts b/packages/wrangler/src/config/validation.ts index 9bda9ae9fdae..1315019110df 100644 --- a/packages/wrangler/src/config/validation.ts +++ b/packages/wrangler/src/config/validation.ts @@ -3384,7 +3384,7 @@ const validateObservability: ValidatorFn = (diagnostics, field, value) => { */ if (typeof val.logs === "object") { isValid = - validateRequiredProperty( + validateOptionalProperty( diagnostics, field, "logs.enabled",