diff --git a/.changeset/nervous-pugs-lay.md b/.changeset/nervous-pugs-lay.md
new file mode 100644
index 000000000000..135907fd6241
--- /dev/null
+++ b/.changeset/nervous-pugs-lay.md
@@ -0,0 +1,9 @@
+---
+"wrangler": patch
+---
+
+feat: inline text-like files into the worker bundle
+
+We were adding text-like modules (i.e. `.txt`, `.html` and `.pem` files) as separate modules in the Worker definition, but this only really 'works' with the ES module Worker format. This commit changes that to inline the text-like files into the Worker bundle directly.
+
+We still have to do something similar with `.wasm` modules, but that requires a different fix, and we'll do so in a subsequent commit.
diff --git a/.changeset/red-pillows-grow.md b/.changeset/red-pillows-grow.md
new file mode 100644
index 000000000000..dd3b84e60078
--- /dev/null
+++ b/.changeset/red-pillows-grow.md
@@ -0,0 +1,7 @@
+---
+"wrangler": patch
+---
+
+feat: Sites support for local mode `wrangler dev`
+
+This adds support for Workers Sites in local mode when running wrangler `dev`. Further, it fixes a bug where we were sending the `__STATIC_CONTENT_MANIFEST` definition as a separate module even with service worker format, and a bug where we weren't uploading the namespace binding when other kv namespaces weren't present.
diff --git a/package-lock.json b/package-lock.json
index 769bd29cfa86..8585abf29de8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -753,6 +753,14 @@
"node": ">=4"
}
},
+ "node_modules/@cloudflare/kv-asset-handler": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.2.0.tgz",
+ "integrity": "sha512-MVbXLbTcAotOPUj0pAMhVtJ+3/kFkwJqc5qNOleOZTv6QkZZABDMS21dSrSlVswEHwrpWC03e4fWytjqKvuE2A==",
+ "dependencies": {
+ "mime": "^3.0.0"
+ }
+ },
"node_modules/@cnakazawa/watch": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@cnakazawa/watch/-/watch-1.0.4.tgz",
@@ -5578,6 +5586,10 @@
"node": ">= 0.6"
}
},
+ "node_modules/example-sites-app": {
+ "resolved": "packages/example-sites-app",
+ "link": true
+ },
"node_modules/example-worker-app": {
"resolved": "packages/example-worker-app",
"link": true
@@ -11033,7 +11045,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
- "dev": true,
"bin": {
"mime": "cli.js"
},
@@ -15142,6 +15153,12 @@
"version": "1.0.0",
"license": "ISC"
},
+ "packages/example-sites-app": {
+ "version": "1.0.0",
+ "dependencies": {
+ "@cloudflare/kv-asset-handler": "~0.2.0"
+ }
+ },
"packages/example-worker-app": {
"version": "1.0.0",
"license": "ISC"
@@ -16036,6 +16053,14 @@
}
}
},
+ "@cloudflare/kv-asset-handler": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.2.0.tgz",
+ "integrity": "sha512-MVbXLbTcAotOPUj0pAMhVtJ+3/kFkwJqc5qNOleOZTv6QkZZABDMS21dSrSlVswEHwrpWC03e4fWytjqKvuE2A==",
+ "requires": {
+ "mime": "^3.0.0"
+ }
+ },
"@cnakazawa/watch": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@cnakazawa/watch/-/watch-1.0.4.tgz",
@@ -19603,6 +19628,12 @@
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=",
"dev": true
},
+ "example-sites-app": {
+ "version": "file:packages/example-sites-app",
+ "requires": {
+ "@cloudflare/kv-asset-handler": "~0.2.0"
+ }
+ },
"example-worker-app": {
"version": "file:packages/example-worker-app"
},
@@ -23633,8 +23664,7 @@
"mime": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
- "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
- "dev": true
+ "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="
},
"mime-db": {
"version": "1.51.0",
diff --git a/packages/example-sites-app/package.json b/packages/example-sites-app/package.json
new file mode 100644
index 000000000000..1b635fcbeb2b
--- /dev/null
+++ b/packages/example-sites-app/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "example-sites-app",
+ "private": true,
+ "version": "1.0.0",
+ "dependencies": {
+ "@cloudflare/kv-asset-handler": "~0.2.0"
+ }
+}
diff --git a/packages/example-sites-app/public/404.html b/packages/example-sites-app/public/404.html
new file mode 100644
index 000000000000..cc263a84c4a2
--- /dev/null
+++ b/packages/example-sites-app/public/404.html
@@ -0,0 +1,50 @@
+
+
+
+
+
+
+
+
+
+
+
404 Not Found
+
Oh dang! We couldn't find that page.
+
+
+
+
diff --git a/packages/example-sites-app/public/favicon.ico b/packages/example-sites-app/public/favicon.ico
new file mode 100644
index 000000000000..cc6c23bd37bb
Binary files /dev/null and b/packages/example-sites-app/public/favicon.ico differ
diff --git a/packages/example-sites-app/public/img/200-wrangler-ferris.gif b/packages/example-sites-app/public/img/200-wrangler-ferris.gif
new file mode 100644
index 000000000000..8853751fac7b
Binary files /dev/null and b/packages/example-sites-app/public/img/200-wrangler-ferris.gif differ
diff --git a/packages/example-sites-app/public/img/404-wrangler-ferris.gif b/packages/example-sites-app/public/img/404-wrangler-ferris.gif
new file mode 100644
index 000000000000..0ac1479fcf6f
Binary files /dev/null and b/packages/example-sites-app/public/img/404-wrangler-ferris.gif differ
diff --git a/packages/example-sites-app/public/index.html b/packages/example-sites-app/public/index.html
new file mode 100644
index 000000000000..bc39d712bc09
--- /dev/null
+++ b/packages/example-sites-app/public/index.html
@@ -0,0 +1,50 @@
+
+
+
+
+
+
+
+
+
+
+
200 Success
+
Hello World! Welcome to your Workers Site.
+
+
+
+
diff --git a/packages/example-sites-app/src/index.js b/packages/example-sites-app/src/index.js
new file mode 100644
index 000000000000..f1f4dd27a5ab
--- /dev/null
+++ b/packages/example-sites-app/src/index.js
@@ -0,0 +1,66 @@
+import {
+ getAssetFromKV,
+ mapRequestToAsset,
+} from "@cloudflare/kv-asset-handler";
+
+/**
+ * The DEBUG flag will do two things that help during development:
+ * 1. we will skip caching on the edge, which makes it easier to
+ * debug.
+ * 2. we will return an error message on exception in your Response rather
+ * than the default 404.html page.
+ */
+const DEBUG = false;
+
+addEventListener("fetch", (event) => {
+ event.respondWith(handleEvent(event));
+});
+
+async function handleEvent(event) {
+ let options = {};
+
+ /**
+ * You can add custom logic to how we fetch your assets
+ * by configuring the function `mapRequestToAsset`
+ */
+ // options.mapRequestToAsset = handlePrefix(/^\/docs/)
+
+ try {
+ if (DEBUG) {
+ // customize caching
+ options.cacheControl = {
+ bypassCache: true,
+ };
+ }
+
+ const page = await getAssetFromKV(event, options);
+
+ // allow headers to be altered
+ const response = new Response(page.body, page);
+
+ response.headers.set("X-XSS-Protection", "1; mode=block");
+ response.headers.set("X-Content-Type-Options", "nosniff");
+ response.headers.set("X-Frame-Options", "DENY");
+ response.headers.set("Referrer-Policy", "unsafe-url");
+ response.headers.set("Feature-Policy", "none");
+
+ return response;
+ } catch (e) {
+ // if an error is thrown try to serve the asset at 404.html
+ if (!DEBUG) {
+ try {
+ let notFoundResponse = await getAssetFromKV(event, {
+ mapRequestToAsset: (req) =>
+ new Request(`${new URL(req.url).origin}/404.html`, req),
+ });
+
+ return new Response(notFoundResponse.body, {
+ ...notFoundResponse,
+ status: 404,
+ });
+ } catch (e) {}
+ }
+
+ return new Response(e.message || e.toString(), { status: 500 });
+ }
+}
diff --git a/packages/example-sites-app/wrangler.toml b/packages/example-sites-app/wrangler.toml
new file mode 100644
index 000000000000..f294bc81852e
--- /dev/null
+++ b/packages/example-sites-app/wrangler.toml
@@ -0,0 +1,2 @@
+name = "example-sites-app"
+site = { bucket = "./public" }
diff --git a/packages/wrangler/src/__tests__/kv.test.ts b/packages/wrangler/src/__tests__/kv.test.ts
index b9ee7e5fafc5..2aab087e2792 100644
--- a/packages/wrangler/src/__tests__/kv.test.ts
+++ b/packages/wrangler/src/__tests__/kv.test.ts
@@ -4,11 +4,11 @@ import {
setMockResponse,
setMockRawResponse,
unsetAllMocks,
- createFetchResult,
} from "./mock-cfetch";
import { runWrangler } from "./run-wrangler";
import { runInTempDir } from "./run-in-tmp";
import { mockConsoleMethods } from "./mock-console";
+import { mockKeyListRequest } from "./mock-kv";
describe("wrangler", () => {
runInTempDir();
@@ -1222,42 +1222,3 @@ function writeWranglerConfig() {
"utf-8"
);
}
-
-export function mockKeyListRequest(
- expectedNamespaceId: string,
- expectedKeys: string[],
- keysPerRequest = 1000,
- blankCursorValue: "" | undefined | null = undefined
-) {
- const requests = { count: 0 };
- // See https://api.cloudflare.com/#workers-kv-namespace-list-a-namespace-s-keys
- const expectedKeyObjects = expectedKeys.map((name) => ({
- name,
- expiration: 123456789,
- metadata: {},
- }));
- setMockRawResponse(
- "/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys",
- "GET",
- ([_url, accountId, namespaceId], _init, query) => {
- requests.count++;
- expect(accountId).toEqual("some-account-id");
- expect(namespaceId).toEqual(expectedNamespaceId);
- if (expectedKeyObjects.length <= keysPerRequest) {
- return createFetchResult(expectedKeyObjects);
- } else {
- const start = parseInt(query.get("cursor") ?? "0") || 0;
- const end = start + keysPerRequest;
- const cursor = end < expectedKeyObjects.length ? end : blankCursorValue;
- return createFetchResult(
- expectedKeyObjects.slice(start, end),
- true,
- [],
- [],
- { cursor }
- );
- }
- }
- );
- return requests;
-}
diff --git a/packages/wrangler/src/__tests__/logout.test.ts b/packages/wrangler/src/__tests__/logout.test.ts
index e58ff907778a..db12cd27a99d 100644
--- a/packages/wrangler/src/__tests__/logout.test.ts
+++ b/packages/wrangler/src/__tests__/logout.test.ts
@@ -6,7 +6,7 @@ import { runWrangler } from "./run-wrangler";
import { runInTempDir } from "./run-in-tmp";
import { initialise } from "../user";
import { mockConsoleMethods } from "./mock-console";
-import { writeUserConfig } from "./whoami.test";
+import { writeUserConfig } from "./mock-user";
const ORIGINAL_CF_API_TOKEN = process.env.CF_API_TOKEN;
const ORIGINAL_CF_ACCOUNT_ID = process.env.CF_ACCOUNT_ID;
diff --git a/packages/wrangler/src/__tests__/mock-kv.ts b/packages/wrangler/src/__tests__/mock-kv.ts
new file mode 100644
index 000000000000..8c11122495ec
--- /dev/null
+++ b/packages/wrangler/src/__tests__/mock-kv.ts
@@ -0,0 +1,40 @@
+import { createFetchResult, setMockRawResponse } from "./mock-cfetch";
+
+export function mockKeyListRequest(
+ expectedNamespaceId: string,
+ expectedKeys: string[],
+ keysPerRequest = 1000,
+ blankCursorValue: "" | undefined | null = undefined
+) {
+ const requests = { count: 0 };
+ // See https://api.cloudflare.com/#workers-kv-namespace-list-a-namespace-s-keys
+ const expectedKeyObjects = expectedKeys.map((name) => ({
+ name,
+ expiration: 123456789,
+ metadata: {},
+ }));
+ setMockRawResponse(
+ "/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys",
+ "GET",
+ ([_url, accountId, namespaceId], _init, query) => {
+ requests.count++;
+ expect(accountId).toEqual("some-account-id");
+ expect(namespaceId).toEqual(expectedNamespaceId);
+ if (expectedKeyObjects.length <= keysPerRequest) {
+ return createFetchResult(expectedKeyObjects);
+ } else {
+ const start = parseInt(query.get("cursor") ?? "0") || 0;
+ const end = start + keysPerRequest;
+ const cursor = end < expectedKeyObjects.length ? end : blankCursorValue;
+ return createFetchResult(
+ expectedKeyObjects.slice(start, end),
+ true,
+ [],
+ [],
+ { cursor }
+ );
+ }
+ }
+ );
+ return requests;
+}
diff --git a/packages/wrangler/src/__tests__/mock-user.ts b/packages/wrangler/src/__tests__/mock-user.ts
new file mode 100644
index 000000000000..f5b305d4c971
--- /dev/null
+++ b/packages/wrangler/src/__tests__/mock-user.ts
@@ -0,0 +1,27 @@
+import path from "path";
+import os from "os";
+import { mkdirSync, writeFileSync } from "node:fs";
+
+export function writeUserConfig(
+ oauth_token?: string,
+ refresh_token?: string,
+ expiration_time?: string
+) {
+ const lines: string[] = [];
+ if (oauth_token) {
+ lines.push(`oauth_token = "${oauth_token}"`);
+ }
+ if (refresh_token) {
+ lines.push(`refresh_token = "${refresh_token}"`);
+ }
+ if (expiration_time) {
+ lines.push(`expiration_time = "${expiration_time}"`);
+ }
+ const configPath = path.join(os.homedir(), ".wrangler/config");
+ mkdirSync(configPath, { recursive: true });
+ writeFileSync(
+ path.join(configPath, "default.toml"),
+ lines.join("\n"),
+ "utf-8"
+ );
+}
diff --git a/packages/wrangler/src/__tests__/publish.test.ts b/packages/wrangler/src/__tests__/publish.test.ts
index 29aabb85a828..3ee4713a0181 100644
--- a/packages/wrangler/src/__tests__/publish.test.ts
+++ b/packages/wrangler/src/__tests__/publish.test.ts
@@ -1,13 +1,14 @@
import * as fs from "node:fs";
import * as path from "node:path";
import type { KVNamespaceInfo } from "../kv";
-import { mockKeyListRequest } from "./kv.test";
+import { mockKeyListRequest } from "./mock-kv";
import { setMockResponse, unsetAllMocks } from "./mock-cfetch";
import { runInTempDir } from "./run-in-tmp";
import { runWrangler } from "./run-wrangler";
import { mockConsoleMethods } from "./mock-console";
import type { Config } from "../config";
import * as TOML from "@iarna/toml";
+import type { WorkerMetadata } from "../api/form_data";
describe("publish", () => {
runInTempDir();
@@ -18,10 +19,30 @@ describe("publish", () => {
});
describe("entry-points", () => {
- it("should be able to use `index` with no extension as the entry-point", async () => {
+ it("should be able to use `index` with no extension as the entry-point (esm)", async () => {
writeWranglerToml();
- writeEsmWorkerSource();
- mockUploadWorkerRequest();
+ writeWorkerSource();
+ mockUploadWorkerRequest({ expectedType: "esm" });
+ mockSubDomainRequest();
+
+ await runWrangler("publish ./index");
+
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(std.err).toMatchInlineSnapshot(`""`);
+ });
+ it("should be able to use `index` with no extension as the entry-point (sw)", async () => {
+ writeWranglerToml();
+ writeWorkerSource({ type: "sw" });
+ mockUploadWorkerRequest({ expectedType: "sw" });
mockSubDomainRequest();
await runWrangler("publish ./index");
@@ -41,7 +62,7 @@ describe("publish", () => {
it("should be able to use the `build.upload.main` config as the entry-point for ESM sources", async () => {
writeWranglerToml({ build: { upload: { main: "./index.js" } } });
- writeEsmWorkerSource();
+ writeWorkerSource();
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -60,10 +81,10 @@ describe("publish", () => {
expect(std.err).toMatchInlineSnapshot(`""`);
});
- it("should be able to transpile TypeScript", async () => {
+ it("should be able to transpile TypeScript (esm)", async () => {
writeWranglerToml();
- writeEsmWorkerSource({ format: "ts" });
- mockUploadWorkerRequest({ expectedBody: "var foo = 100;" });
+ writeWorkerSource({ format: "ts" });
+ mockUploadWorkerRequest({ expectedEntry: "var foo = 100;" });
mockSubDomainRequest();
await runWrangler("publish index.ts");
@@ -80,10 +101,87 @@ describe("publish", () => {
expect(std.err).toMatchInlineSnapshot(`""`);
});
- it("should be able to transpile entry-points in sub-directories", async () => {
+ it("should be able to transpile TypeScript (sw)", async () => {
writeWranglerToml();
- writeEsmWorkerSource({ basePath: "./src" });
- mockUploadWorkerRequest({ expectedBody: "var foo = 100;" });
+ writeWorkerSource({ format: "ts", type: "sw" });
+ mockUploadWorkerRequest({
+ expectedEntry: "var foo = 100;",
+ expectedType: "sw",
+ });
+ mockSubDomainRequest();
+ await runWrangler("publish index.ts");
+
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(std.err).toMatchInlineSnapshot(`""`);
+ });
+
+ it("should inline referenced text modules into the worker", async () => {
+ writeWranglerToml();
+ fs.writeFileSync(
+ "./index.js",
+ `
+import txt from './textfile.txt';
+export default{
+ fetch(){
+ return new Response(txt);
+ }
+}
+`
+ );
+ fs.writeFileSync("./textfile.txt", "Hello, World!");
+ mockUploadWorkerRequest({ expectedEntry: "Hello, World!" });
+ mockSubDomainRequest();
+ await runWrangler("publish index.js");
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(std.err).toMatchInlineSnapshot(`""`);
+ });
+
+ it("should be able to transpile entry-points in sub-directories (esm)", async () => {
+ writeWranglerToml();
+ writeWorkerSource({ basePath: "./src" });
+ mockUploadWorkerRequest({ expectedEntry: "var foo = 100;" });
+ mockSubDomainRequest();
+
+ await runWrangler("publish ./src/index.js");
+
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(std.err).toMatchInlineSnapshot(`""`);
+ });
+
+ it("should be able to transpile entry-points in sub-directories (sw)", async () => {
+ writeWranglerToml();
+ writeWorkerSource({ basePath: "./src", type: "sw" });
+ mockUploadWorkerRequest({
+ expectedEntry: "var foo = 100;",
+ expectedType: "sw",
+ });
mockSubDomainRequest();
await runWrangler("publish ./src/index.js");
@@ -108,7 +206,7 @@ describe("publish", () => {
"entry-point": "./index.js",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
mockUploadWorkerRequest();
mockSubDomainRequest();
let error: Error | undefined;
@@ -151,7 +249,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -185,7 +283,7 @@ describe("publish", () => {
it("should error if there is no entry-point specified", async () => {
writeWranglerToml();
- writeEsmWorkerSource();
+ writeWorkerSource();
mockUploadWorkerRequest();
mockSubDomainRequest();
let error: Error | undefined;
@@ -224,7 +322,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -250,6 +348,112 @@ describe("publish", () => {
expect(std.err).toMatchInlineSnapshot(`""`);
});
+ it("when using a service worker type, it should inline an asset manifest, and bind to a namespace", async () => {
+ const assets = [
+ { filePath: "assets/file-1.txt", content: "Content of file-1" },
+ { filePath: "assets/file-2.txt", content: "Content of file-2" },
+ ];
+ const kvNamespace = {
+ title: "__test-name_sites_assets",
+ id: "__test-name_sites_assets-id",
+ };
+ writeWranglerToml({
+ build: { upload: { main: "./index.js" } },
+ site: {
+ bucket: "assets",
+ },
+ });
+ writeWorkerSource({ type: "sw" });
+ writeAssets(assets);
+ mockUploadWorkerRequest({
+ expectedType: "sw",
+ expectedEntry: `const __STATIC_CONTENT_MANIFEST = {"file-1.txt":"assets/file-1.2ca234f380.txt","file-2.txt":"assets/file-2.5938485188.txt"};`,
+ expectedBindings: [
+ {
+ name: "__STATIC_CONTENT",
+ namespace_id: "__test-name_sites_assets-id",
+ type: "kv_namespace",
+ },
+ ],
+ });
+ mockSubDomainRequest();
+ mockListKVNamespacesRequest(kvNamespace);
+ mockKeyListRequest(kvNamespace.id, []);
+ mockUploadAssetsToKVRequest(kvNamespace.id, assets);
+
+ await runWrangler("publish");
+
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "reading assets/file-1.txt...
+ uploading as assets/file-1.2ca234f380.txt...
+ reading assets/file-2.txt...
+ uploading as assets/file-2.5938485188.txt...
+ Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(stripTimings(std.err)).toMatchInlineSnapshot(`""`);
+ });
+
+ it("when using a module worker type, it should add an asset manifest module, and bind to a namespace", async () => {
+ const assets = [
+ { filePath: "assets/file-1.txt", content: "Content of file-1" },
+ { filePath: "assets/file-2.txt", content: "Content of file-2" },
+ ];
+ const kvNamespace = {
+ title: "__test-name_sites_assets",
+ id: "__test-name_sites_assets-id",
+ };
+ writeWranglerToml({
+ build: { upload: { main: "./index.js" } },
+ site: {
+ bucket: "assets",
+ },
+ });
+ writeWorkerSource({ type: "esm" });
+ writeAssets(assets);
+ mockUploadWorkerRequest({
+ expectedBindings: [
+ {
+ name: "__STATIC_CONTENT",
+ namespace_id: "__test-name_sites_assets-id",
+ type: "kv_namespace",
+ },
+ ],
+ expectedModules: {
+ __STATIC_CONTENT_MANIFEST:
+ '{"file-1.txt":"assets/file-1.2ca234f380.txt","file-2.txt":"assets/file-2.5938485188.txt"}',
+ },
+ });
+ mockSubDomainRequest();
+ mockListKVNamespacesRequest(kvNamespace);
+ mockKeyListRequest(kvNamespace.id, []);
+ mockUploadAssetsToKVRequest(kvNamespace.id, assets);
+
+ await runWrangler("publish");
+
+ expect(stripTimings(std.out)).toMatchInlineSnapshot(`
+ "reading assets/file-1.txt...
+ uploading as assets/file-1.2ca234f380.txt...
+ reading assets/file-2.txt...
+ uploading as assets/file-2.5938485188.txt...
+ Uploaded
+ test-name
+ (TIMINGS)
+ Deployed
+ test-name
+ (TIMINGS)
+
+ test-name.test-sub-domain.workers.dev"
+ `);
+ expect(stripTimings(std.err)).toMatchInlineSnapshot(`""`);
+ });
+
it("should only upload files that are not already in the KV namespace", async () => {
const assets = [
{ filePath: "assets/file-1.txt", content: "Content of file-1" },
@@ -265,7 +469,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -311,7 +515,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -354,7 +558,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -398,7 +602,7 @@ describe("publish", () => {
include: ["file-1.txt"],
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -442,7 +646,7 @@ describe("publish", () => {
exclude: ["file-2.txt"],
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -486,7 +690,7 @@ describe("publish", () => {
include: ["file-2.txt"],
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -530,7 +734,7 @@ describe("publish", () => {
exclude: ["assets/file-1.txt"],
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -579,7 +783,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -629,7 +833,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -677,7 +881,7 @@ describe("publish", () => {
exclude: ["assets/file-1.txt"],
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets(assets);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -720,7 +924,7 @@ describe("publish", () => {
bucket: "assets",
},
});
- writeEsmWorkerSource();
+ writeWorkerSource();
writeAssets([longFilePathAsset]);
mockUploadWorkerRequest();
mockSubDomainRequest();
@@ -758,7 +962,7 @@ describe("publish", () => {
});
mockUploadWorkerRequest({
- expectedBody: "return new Response(123)",
+ expectedEntry: "return new Response(123)",
});
mockSubDomainRequest();
@@ -799,23 +1003,31 @@ function writeWranglerToml(config: Omit = {}) {
}
/** Write a mock Worker script to disk. */
-function writeEsmWorkerSource({
+function writeWorkerSource({
basePath = ".",
format = "js",
-}: { basePath?: string; format?: "js" | "ts" | "jsx" | "tsx" | "mjs" } = {}) {
+ type = "esm",
+}: {
+ basePath?: string;
+ format?: "js" | "ts" | "jsx" | "tsx" | "mjs";
+ type?: "esm" | "sw";
+} = {}) {
if (basePath !== ".") {
fs.mkdirSync(basePath, { recursive: true });
}
fs.writeFileSync(
`${basePath}/index.${format}`,
- [
- `import { foo } from "./another";`,
- `export default {`,
- ` async fetch(request) {`,
- ` return new Response('Hello' + foo);`,
- ` },`,
- `};`,
- ].join("\n")
+ type === "esm"
+ ? `import { foo } from "./another";
+ export default {
+ async fetch(request) {
+ return new Response('Hello' + foo);
+ },
+ };`
+ : `import { foo } from "./another";
+ addEventListener('fetch', event => {
+ event.respondWith(new Response('Hello' + foo));
+ })`
);
fs.writeFileSync(`${basePath}/another.${format}`, `export const foo = 100;`);
}
@@ -831,10 +1043,16 @@ function writeAssets(assets: { filePath: string; content: string }[]) {
/** Create a mock handler for the request to upload a worker script. */
function mockUploadWorkerRequest({
available_on_subdomain = true,
- expectedBody,
+ expectedEntry,
+ expectedType = "esm",
+ expectedBindings,
+ expectedModules = {},
}: {
available_on_subdomain?: boolean;
- expectedBody?: string;
+ expectedEntry?: string;
+ expectedType?: "esm" | "sw";
+ expectedBindings?: unknown;
+ expectedModules?: Record;
} = {}) {
setMockResponse(
"/accounts/:accountId/workers/scripts/:scriptName",
@@ -843,11 +1061,28 @@ function mockUploadWorkerRequest({
expect(accountId).toEqual("some-account-id");
expect(scriptName).toEqual("test-name");
expect(queryParams.get("available_on_subdomains")).toEqual("true");
- if (expectedBody !== undefined) {
- expect(
- await ((body as FormData).get("index.js") as File).text()
- ).toMatch(expectedBody);
+ const formBody = body as FormData;
+ if (expectedEntry !== undefined) {
+ expect(await (formBody.get("index.js") as File).text()).toMatch(
+ expectedEntry
+ );
}
+
+ const metadata = JSON.parse(
+ formBody.get("metadata") as string
+ ) as WorkerMetadata;
+ if (expectedType === "esm") {
+ expect(metadata.main_module).toEqual("index.js");
+ } else {
+ expect(metadata.body_part).toEqual("index.js");
+ }
+ if (expectedBindings !== undefined) {
+ expect(metadata.bindings).toEqual(expectedBindings);
+ }
+ for (const [name, content] of Object.entries(expectedModules)) {
+ expect(await (formBody.get(name) as File).text()).toMatch(content);
+ }
+
return { available_on_subdomain };
}
);
diff --git a/packages/wrangler/src/__tests__/whoami.test.tsx b/packages/wrangler/src/__tests__/whoami.test.tsx
index 1d54356c9e3f..e6c774aa0b4e 100644
--- a/packages/wrangler/src/__tests__/whoami.test.tsx
+++ b/packages/wrangler/src/__tests__/whoami.test.tsx
@@ -1,13 +1,11 @@
import React from "react";
-import os from "node:os";
-import path from "node:path";
import { render } from "ink-testing-library";
import type { UserInfo } from "../whoami";
import { getUserInfo, WhoAmI } from "../whoami";
import { runInTempDir } from "./run-in-tmp";
-import { mkdirSync, writeFileSync } from "node:fs";
import { setMockResponse } from "./mock-cfetch";
import { initialise } from "../user";
+import { writeUserConfig } from "./mock-user";
const ORIGINAL_CF_API_TOKEN = process.env.CF_API_TOKEN;
const ORIGINAL_CF_ACCOUNT_ID = process.env.CF_ACCOUNT_ID;
@@ -101,27 +99,3 @@ describe("WhoAmI component", () => {
expect(lastFrame()).toMatch(/Account Three .+ account-3/);
});
});
-
-export function writeUserConfig(
- oauth_token?: string,
- refresh_token?: string,
- expiration_time?: string
-) {
- const lines: string[] = [];
- if (oauth_token) {
- lines.push(`oauth_token = "${oauth_token}"`);
- }
- if (refresh_token) {
- lines.push(`refresh_token = "${refresh_token}"`);
- }
- if (expiration_time) {
- lines.push(`expiration_time = "${expiration_time}"`);
- }
- const configPath = path.join(os.homedir(), ".wrangler/config");
- mkdirSync(configPath, { recursive: true });
- writeFileSync(
- path.join(configPath, "default.toml"),
- lines.join("\n"),
- "utf-8"
- );
-}
diff --git a/packages/wrangler/src/api/form_data.ts b/packages/wrangler/src/api/form_data.ts
index 51eebb282db6..2efd13f2aafa 100644
--- a/packages/wrangler/src/api/form_data.ts
+++ b/packages/wrangler/src/api/form_data.ts
@@ -30,7 +30,11 @@ function toModule(module: CfModule, entryType: CfModuleType): Blob {
return new Blob([content], { type });
}
-interface WorkerMetadata {
+export interface WorkerMetadata {
+ /** The name of the entry point module. Only exists when the worker is in the ES module format */
+ main_module?: string;
+ /** The name of the entry point module. Only exists when the worker is in the Service Worker format */
+ body_part?: string;
compatibility_date?: string;
compatibility_flags?: string[];
usage_model?: "bundled" | "unbound";
diff --git a/packages/wrangler/src/dev.tsx b/packages/wrangler/src/dev.tsx
index 299e071a7bfb..a9f03d938164 100644
--- a/packages/wrangler/src/dev.tsx
+++ b/packages/wrangler/src/dev.tsx
@@ -100,7 +100,7 @@ function Dev(props: DevProps): JSX.Element {
bundle={bundle}
format={props.format}
bindings={props.bindings}
- site={props.assetPaths}
+ assetPaths={props.assetPaths}
public={props.public}
port={port}
enableLocalPersistence={props.enableLocalPersistence}
@@ -183,8 +183,8 @@ function Local(props: {
bundle: EsbuildBundle | undefined;
format: CfScriptFormat;
bindings: CfWorkerInit["bindings"];
+ assetPaths: undefined | AssetPaths;
public: undefined | string;
- site: undefined | AssetPaths;
port: number;
enableLocalPersistence: boolean;
}) {
@@ -193,6 +193,8 @@ function Local(props: {
bundle: props.bundle,
format: props.format,
bindings: props.bindings,
+ assetPaths: props.assetPaths,
+ public: props.public,
port: props.port,
enableLocalPersistence: props.enableLocalPersistence,
});
@@ -205,11 +207,13 @@ function useLocalWorker(props: {
bundle: EsbuildBundle | undefined;
format: CfScriptFormat;
bindings: CfWorkerInit["bindings"];
+ assetPaths: undefined | AssetPaths;
+ public: undefined | string;
port: number;
enableLocalPersistence: boolean;
}) {
// TODO: pass vars via command line
- const { bundle, format, bindings, port } = props;
+ const { bundle, format, bindings, port, assetPaths } = props;
const local = useRef>();
const removeSignalExitListener = useRef<() => void>();
const [inspectorUrl, setInspectorUrl] = useState();
@@ -228,6 +232,11 @@ function useLocalWorker(props: {
}
await waitForPortToBeAvailable(port, { retryPeriod: 200, timeout: 2000 });
+ if (props.public) {
+ throw new Error(
+ '⎔ A "public" folder is not yet supported in local mode.'
+ );
+ }
console.log("⎔ Starting a local server...");
// TODO: just use execa for this
@@ -245,6 +254,20 @@ function useLocalWorker(props: {
path.join(__dirname, "../miniflare-config-stubs/package.empty.json"),
"--port",
port.toString(),
+ ...(assetPaths
+ ? [
+ "--site",
+ assetPaths.baseDirectory,
+ ...assetPaths.includePatterns.map((pattern) => [
+ "--site-include",
+ pattern,
+ ]),
+ ...assetPaths.excludePatterns.map((pattern) => [
+ "--site-exclude",
+ pattern,
+ ]),
+ ].flatMap((x) => x)
+ : []),
...(props.enableLocalPersistence
? ["--kv-persist", "--cache-persist", "--do-persist"]
: []),
@@ -327,6 +350,8 @@ function useLocalWorker(props: {
bindings.kv_namespaces,
bindings.vars,
props.enableLocalPersistence,
+ assetPaths,
+ props.public,
]);
return { inspectorUrl };
}
@@ -354,7 +379,7 @@ function useTmpDir(): string | undefined {
return () => {
dir.cleanup().catch(() => {
// extremely unlikely,
- // but it's 2021 after all
+ // but it's 2022 after all
console.error("failed to cleanup tmp dir");
});
};
@@ -464,6 +489,9 @@ function useEsbuild(props: {
sourcemap: true,
loader: {
".js": "jsx",
+ ".html": "text",
+ ".pem": "text",
+ ".txt": "text",
},
...(jsxFactory && { jsxFactory }),
...(jsxFragment && { jsxFragment }),
@@ -588,16 +616,23 @@ function useWorker(props: {
true
); // TODO: cancellable?
- const content = await readFile(bundle.path, "utf-8");
+ const workerType = format || bundle.type === "esm" ? "esm" : "commonjs";
+ let content = await readFile(bundle.path, "utf-8");
+ if (workerType !== "esm" && assets.manifest) {
+ content = `const __STATIC_CONTENT_MANIFEST = ${JSON.stringify(
+ assets.manifest
+ )};\n${content}`;
+ }
+
const init: CfWorkerInit = {
name,
main: {
name: path.basename(bundle.path),
- type: format || bundle.type === "esm" ? "esm" : "commonjs",
+ type: workerType,
content,
},
modules: modules.concat(
- assets.manifest
+ assets.manifest && workerType === "esm"
? {
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),
diff --git a/packages/wrangler/src/module-collection.ts b/packages/wrangler/src/module-collection.ts
index 1ebfa8e3073b..fbce76e299e1 100644
--- a/packages/wrangler/src/module-collection.ts
+++ b/packages/wrangler/src/module-collection.ts
@@ -30,11 +30,10 @@ export default function makeModuleCollector(): {
build.onResolve(
// filter on "known" file types,
// we can expand this list later
- { filter: /.*\.(pem|txt|html|wasm)$/ },
+ { filter: /.*\.(wasm)$/ },
async (args: esbuild.OnResolveArgs) => {
// take the file and massage it to a
// transportable/manageable format
- const fileExt = path.extname(args.path);
const filePath = path.join(args.resolveDir, args.path);
const fileContent = await readFile(filePath);
const fileHash = crypto
@@ -47,7 +46,7 @@ export default function makeModuleCollector(): {
modules.push({
name: fileName,
content: fileContent,
- type: fileExt === ".wasm" ? "compiled-wasm" : "text",
+ type: "compiled-wasm",
});
return {
diff --git a/packages/wrangler/src/publish.ts b/packages/wrangler/src/publish.ts
index bddcc9203cc5..119bc1c0bfb3 100644
--- a/packages/wrangler/src/publish.ts
+++ b/packages/wrangler/src/publish.ts
@@ -148,6 +148,9 @@ export default async function publish(props: Props): Promise {
conditions: ["worker", "browser"],
loader: {
".js": "jsx",
+ ".html": "text",
+ ".pem": "text",
+ ".txt": "text",
},
plugins: [moduleCollector.plugin],
...(jsxFactory && { jsxFactory }),
@@ -192,7 +195,7 @@ export default async function publish(props: Props): Promise {
return;
}
- const content = await readFile(resolvedEntryPointPath, { encoding: "utf-8" });
+ let content = await readFile(resolvedEntryPointPath, { encoding: "utf-8" });
await destination.cleanup();
// if config.migrations
@@ -242,7 +245,7 @@ export default async function publish(props: Props): Promise {
);
const bindings: CfWorkerInit["bindings"] = {
- kv_namespaces: envRootObj.kv_namespaces?.concat(
+ kv_namespaces: (envRootObj.kv_namespaces || []).concat(
assets.namespace
? { binding: "__STATIC_CONTENT", id: assets.namespace }
: []
@@ -252,17 +255,24 @@ export default async function publish(props: Props): Promise {
services: envRootObj.experimental_services,
};
+ const workerType = bundle.type === "esm" ? "esm" : "commonjs";
+ if (workerType !== "esm" && assets.manifest) {
+ content = `const __STATIC_CONTENT_MANIFEST = ${JSON.stringify(
+ assets.manifest
+ )};\n${content}`;
+ }
+
const worker: CfWorkerInit = {
name: scriptName,
main: {
name: path.basename(resolvedEntryPointPath),
content: content,
- type: bundle.type === "esm" ? "esm" : "commonjs",
+ type: workerType,
},
bindings,
...(migrations && { migrations }),
modules: moduleCollector.modules.concat(
- assets.manifest
+ assets.manifest && workerType === "esm"
? {
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),