From 2453577c96704ca1d6934582796199a409d7b770 Mon Sep 17 00:00:00 2001 From: Pete Bacon Darwin Date: Wed, 19 Jan 2022 13:04:42 +0000 Subject: [PATCH] feat: add support for include and exclude when publishing site assets --- .changeset/fair-buses-fix.md | 5 + package-lock.json | 2 + packages/wrangler/package.json | 1 + packages/wrangler/src/__tests__/dev.test.tsx | 4 +- packages/wrangler/src/__tests__/kv.test.ts | 75 +-- .../wrangler/src/__tests__/publish.test.ts | 485 ++++++++++++++++-- packages/wrangler/src/dev.tsx | 24 +- packages/wrangler/src/index.tsx | 44 +- packages/wrangler/src/publish.ts | 39 +- packages/wrangler/src/sites.tsx | 85 ++- 10 files changed, 635 insertions(+), 129 deletions(-) create mode 100644 .changeset/fair-buses-fix.md diff --git a/.changeset/fair-buses-fix.md b/.changeset/fair-buses-fix.md new file mode 100644 index 000000000000..6e7d1f1c4d71 --- /dev/null +++ b/.changeset/fair-buses-fix.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +feat: add support for include and exclude when publishing site assets diff --git a/package-lock.json b/package-lock.json index a19b7b5c62f0..c2ca21fe54f4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15364,6 +15364,7 @@ "finalhandler": "^1.1.2", "find-up": "^6.2.0", "formdata-node": "^4.3.1", + "ignore": "^5.2.0", "ink": "^3.2.0", "ink-select-input": "^4.2.1", "ink-table": "^3.0.0", @@ -26862,6 +26863,7 @@ "find-up": "^6.2.0", "formdata-node": "^4.3.1", "fsevents": "~2.3.2", + "ignore": "*", "ink": "^3.2.0", "ink-select-input": "^4.2.1", "ink-table": "^3.0.0", diff --git a/packages/wrangler/package.json b/packages/wrangler/package.json index 1cf4001a9c18..95237cba2bc1 100644 --- a/packages/wrangler/package.json +++ b/packages/wrangler/package.json @@ -65,6 +65,7 @@ "finalhandler": "^1.1.2", "find-up": "^6.2.0", "formdata-node": "^4.3.1", + "ignore": "^5.2.0", "ink": "^3.2.0", "ink-select-input": "^4.2.1", "ink-table": "^3.0.0", diff --git a/packages/wrangler/src/__tests__/dev.test.tsx b/packages/wrangler/src/__tests__/dev.test.tsx index 8b43c80c2bf1..07451e94c198 100644 --- a/packages/wrangler/src/__tests__/dev.test.tsx +++ b/packages/wrangler/src/__tests__/dev.test.tsx @@ -39,7 +39,7 @@ function renderDev({ jsxFragment, bindings = {}, public: publicDir, - site, + assetPaths, compatibilityDate, compatibilityFlags, usageModel, @@ -56,7 +56,7 @@ function renderDev({ jsxFactory={jsxFactory} jsxFragment={jsxFragment} accountId={accountId} - site={site} + assetPaths={assetPaths} public={publicDir} compatibilityDate={compatibilityDate} compatibilityFlags={compatibilityFlags} diff --git a/packages/wrangler/src/__tests__/kv.test.ts b/packages/wrangler/src/__tests__/kv.test.ts index b52866204a92..fc2a0d177aca 100644 --- a/packages/wrangler/src/__tests__/kv.test.ts +++ b/packages/wrangler/src/__tests__/kv.test.ts @@ -681,43 +681,6 @@ describe("wrangler", () => { }); describe("list", () => { - function mockKeyListRequest( - expectedNamespaceId: string, - expectedKeys: string[], - keysPerRequest = 1000 - ) { - const requests = { count: 0 }; - // See https://api.cloudflare.com/#workers-kv-namespace-list-a-namespace-s-keys - const expectedKeyObjects = expectedKeys.map((name) => ({ - name, - expiration: 123456789, - metadata: {}, - })); - setMockRawResponse( - "/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys", - ([_url, accountId, namespaceId], _init, query) => { - requests.count++; - expect(accountId).toEqual("some-account-id"); - expect(namespaceId).toEqual(expectedNamespaceId); - if (expectedKeyObjects.length <= keysPerRequest) { - return createFetchResult(expectedKeyObjects); - } else { - const start = parseInt(query.get("cursor") ?? "0") || 0; - const end = start + keysPerRequest; - const cursor = end < expectedKeyObjects.length ? end : undefined; - return createFetchResult( - expectedKeyObjects.slice(start, end), - true, - [], - [], - { cursor } - ); - } - } - ); - return requests; - } - it("should list the keys of a namespace specified by namespace-id", async () => { const keys = ["key-1", "key-2", "key-3"]; mockKeyListRequest("some-namespace-id", keys); @@ -1196,3 +1159,41 @@ function writeWranglerConfig() { "utf-8" ); } + +export function mockKeyListRequest( + expectedNamespaceId: string, + expectedKeys: string[], + keysPerRequest = 1000 +) { + const requests = { count: 0 }; + // See https://api.cloudflare.com/#workers-kv-namespace-list-a-namespace-s-keys + const expectedKeyObjects = expectedKeys.map((name) => ({ + name, + expiration: 123456789, + metadata: {}, + })); + setMockRawResponse( + "/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys", + "GET", + ([_url, accountId, namespaceId], _init, query) => { + requests.count++; + expect(accountId).toEqual("some-account-id"); + expect(namespaceId).toEqual(expectedNamespaceId); + if (expectedKeyObjects.length <= keysPerRequest) { + return createFetchResult(expectedKeyObjects); + } else { + const start = parseInt(query.get("cursor") ?? "0") || 0; + const end = start + keysPerRequest; + const cursor = end < expectedKeyObjects.length ? end : undefined; + return createFetchResult( + expectedKeyObjects.slice(start, end), + true, + [], + [], + { cursor } + ); + } + } + ); + return requests; +} diff --git a/packages/wrangler/src/__tests__/publish.test.ts b/packages/wrangler/src/__tests__/publish.test.ts index b006264e48c1..cc3cc8a9cd35 100644 --- a/packages/wrangler/src/__tests__/publish.test.ts +++ b/packages/wrangler/src/__tests__/publish.test.ts @@ -1,64 +1,396 @@ import * as fs from "node:fs"; -import { setMockResponse } from "./mock-cfetch"; +import * as path from "node:path"; +import type { KVNamespaceInfo } from "../kv"; +import { mockKeyListRequest } from "./kv.test"; +import { setMockResponse, unsetAllMocks } from "./mock-cfetch"; import { runInTempDir } from "./run-in-tmp"; import { runWrangler } from "./run-wrangler"; describe("publish", () => { runInTempDir(); - it("should be able to use `index` with no extension as the entry-point", async () => { - writeWranglerToml(); - writeEsmWorkerSource(); - mockUploadWorkerRequest(); - mockSubDomainRequest(); - - const { stdout, stderr, error } = await runWrangler("publish ./index"); - - expect(stdout).toMatchInlineSnapshot(` - "Uploaded - test-name - (0.00 sec) - Deployed - test-name - (0.00 sec) - - test-name.test-sub-domain.workers.dev" - `); - expect(stderr).toMatchInlineSnapshot(`""`); - expect(error).toMatchInlineSnapshot(`undefined`); + afterEach(() => { + unsetAllMocks(); }); - it("should be able to use the `build.upload.main` config as the entry-point for ESM sources", async () => { - writeWranglerToml("./index.js"); - writeEsmWorkerSource(); - mockUploadWorkerRequest(); - mockSubDomainRequest(); - - const { stdout, stderr, error } = await runWrangler("publish"); - - expect(stdout).toMatchInlineSnapshot(` - "Uploaded - test-name - (0.00 sec) - Deployed - test-name - (0.00 sec) - - test-name.test-sub-domain.workers.dev" - `); - expect(stderr).toMatchInlineSnapshot(`""`); - expect(error).toMatchInlineSnapshot(`undefined`); + describe("entry-points", () => { + it("should be able to use `index` with no extension as the entry-point", async () => { + writeWranglerToml(); + writeEsmWorkerSource(); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + + const { stdout, stderr, error } = await runWrangler("publish ./index"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should be able to use the `build.upload.main` config as the entry-point for ESM sources", async () => { + writeWranglerToml("./index.js"); + writeEsmWorkerSource(); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + + const { stdout, stderr, error } = await runWrangler("publish"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + }); + + describe("asset upload", () => { + it("should upload all the files in the directory specified by `config.site.bucket`", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets"); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + mockUploadAssetsToKVRequest(kvNamespace.id, assets); + const { stdout, stderr, error } = await runWrangler("publish"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + uploading assets/file-2.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should only upload files that are not already in the KV namespace", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets"); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + // Put file-1 in the KV namespace + mockKeyListRequest(kvNamespace.id, [ + "file-1.c514defbb343fb04ad55183d8336ae0a5988616b.txt", + ]); + // Check we do not upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath !== "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler("publish"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-2.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should only upload files that match the `site-include` arg", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets"); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler( + "publish --site-include file-1.txt" + ); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should not upload files that match the `site-exclude` arg", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets"); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler( + "publish --site-exclude file-2.txt" + ); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should only upload files that match the `site.include` config", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets", ["file-1.txt"]); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler("publish"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should not upload files that match the `site.exclude` config", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets", undefined, ["file-2.txt"]); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler("publish"); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should use `site-include` arg over `site.include` config", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets", ["file-2.txt"]); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler( + "publish --site-include file-1.txt" + ); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); + + it("should use `site-exclude` arg over `site.exclude` config", async () => { + const assets = [ + { filePath: "file-1.txt", content: "Content of file-1" }, + { filePath: "file-2.txt", content: "Content of file-2" }, + ]; + const kvNamespace = { + title: "__test-name_sites_assets", + id: "__test-name_sites_assets-id", + }; + writeWranglerToml("./index.js", "./assets", undefined, ["file-1.txt"]); + writeEsmWorkerSource(); + writeAssets("./assets", assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + // Check we only upload file-1 + mockUploadAssetsToKVRequest( + kvNamespace.id, + assets.filter((a) => a.filePath === "file-1.txt") + ); + const { stdout, stderr, error } = await runWrangler( + "publish --site-exclude file-2.txt" + ); + + expect(stripTimings(stdout)).toMatchInlineSnapshot(` + "uploading assets/file-1.txt... + Uploaded + test-name + (TIMINGS) + Deployed + test-name + (TIMINGS) + + test-name.test-sub-domain.workers.dev" + `); + expect(stderr).toMatchInlineSnapshot(`""`); + expect(error).toMatchInlineSnapshot(`undefined`); + }); }); }); /** Write a mock wrangler.toml file to disk. */ -function writeWranglerToml(main?: string) { +function writeWranglerToml( + main?: string, + bucket?: string, + include?: string[], + exclude?: string[] +) { fs.writeFileSync( "./wrangler.toml", [ `compatibility_date = "2022-01-12"`, `name = "test-name"`, main !== undefined ? `[build.upload]\nmain = "${main}"` : "", + bucket || include || exclude ? "[site]" : "", + bucket !== undefined ? `bucket = "${bucket}"` : "", + include !== undefined ? `include = ${JSON.stringify(include)}` : "", + exclude !== undefined ? `exclude = ${JSON.stringify(exclude)}` : "", ].join("\n"), "utf-8" ); @@ -80,6 +412,18 @@ function writeEsmWorkerSource() { fs.writeFileSync("another.js", `export const foo = 100;`); } +/** Write mock assets to the file system so they can be uploaded. */ +function writeAssets( + assetDir: string, + assets: { filePath: string; content: string }[] +) { + for (const asset of assets) { + const filePath = path.join(assetDir, asset.filePath); + fs.mkdirSync(path.dirname(filePath), { recursive: true }); + fs.writeFileSync(filePath, asset.content); + } +} + /** Create a mock handler for the request to upload a worker script. */ function mockUploadWorkerRequest(available_on_subdomain = true) { setMockResponse( @@ -94,9 +438,64 @@ function mockUploadWorkerRequest(available_on_subdomain = true) { ); } -/** Create a mock handler the request for the account's subdomain. */ +/** Create a mock handler for the request to get the account's subdomain. */ function mockSubDomainRequest(subdomain = "test-sub-domain") { setMockResponse("/accounts/:accountId/workers/subdomain", () => { return { subdomain }; }); } + +/** Create a mock handler for the request to get a list of all KV namespaces. */ +function mockListKVNamespacesRequest(...namespaces: KVNamespaceInfo[]) { + setMockResponse( + "/accounts/:accountId/storage/kv/namespaces", + "GET", + ([_url, accountId]) => { + expect(accountId).toEqual("some-account-id"); + return namespaces; + } + ); +} + +/** Create a mock handler for the request that tries to do a bulk upload of assets to a KV namespace. */ +function mockUploadAssetsToKVRequest( + expectedNamespaceId: string, + assets: { filePath: string; content: string }[] +) { + setMockResponse( + "/accounts/:accountId/storage/kv/namespaces/:namespaceId/bulk", + "PUT", + ([_url, accountId, namespaceId], { body }) => { + expect(accountId).toEqual("some-account-id"); + expect(namespaceId).toEqual(expectedNamespaceId); + const uploads = JSON.parse(body as string); + expect(assets.length).toEqual(uploads.length); + for (let i = 0; i < uploads.length; i++) { + const asset = assets[i]; + const upload = uploads[i]; + // The asset key consists of: + // - the basename of the filepath + // - some hash value + // - the extension + const keyMatcher = new RegExp( + "^" + + asset.filePath + .replace(/(\.[^.]+)$/, ".[a-z0-9]+$1") + .replace(/\./g, "\\.") + ); + expect(upload.key).toMatch(keyMatcher); + // The asset value is base64 encoded. + expect(upload.base64).toBe(true); + expect(Buffer.from(upload.value, "base64").toString()).toEqual( + asset.content + ); + } + return null; + } + ); +} + +/** Strip timing data out of the stdout, since this is not always deterministic. */ +function stripTimings(stdout: string): string { + return stdout.replace(/\(\d+\.\d+ sec\)/g, "(TIMINGS)"); +} diff --git a/packages/wrangler/src/dev.tsx b/packages/wrangler/src/dev.tsx index d38c3a551790..cd5132dd0b9e 100644 --- a/packages/wrangler/src/dev.tsx +++ b/packages/wrangler/src/dev.tsx @@ -25,6 +25,7 @@ import type { CfWorkerInit } from "./api/worker"; import useInspector from "./inspect"; import makeModuleCollector from "./module-collection"; import { usePreviewServer } from "./proxy"; +import type { AssetPaths } from "./sites"; import { syncAssets } from "./sites"; import { getAPIToken } from "./user"; @@ -41,7 +42,7 @@ export type DevProps = { jsxFragment: undefined | string; bindings: CfWorkerInit["bindings"]; public: undefined | string; - site: undefined | string; + assetPaths: undefined | AssetPaths; compatibilityDate: undefined | string; compatibilityFlags: undefined | string[]; usageModel: undefined | "bundled" | "unbound"; @@ -98,7 +99,7 @@ function Dev(props: DevProps): JSX.Element { bundle={bundle} format={props.format} bindings={props.bindings} - site={props.site} + site={props.assetPaths} public={props.public} port={port} /> @@ -110,7 +111,7 @@ function Dev(props: DevProps): JSX.Element { accountId={props.accountId} apiToken={apiToken} bindings={props.bindings} - site={props.site} + assetPaths={props.assetPaths} public={props.public} port={port} compatibilityDate={props.compatibilityDate} @@ -136,7 +137,7 @@ function Remote(props: { bundle: EsbuildBundle | undefined; format: CfScriptFormat; public: undefined | string; - site: undefined | string; + assetPaths: undefined | AssetPaths; port: number; accountId: undefined | string; apiToken: undefined | string; @@ -155,7 +156,7 @@ function Remote(props: { accountId: props.accountId, apiToken: props.apiToken, bindings: props.bindings, - sitesFolder: props.site, + assetPaths: props.assetPaths, port: props.port, compatibilityDate: props.compatibilityDate, compatibilityFlags: props.compatibilityFlags, @@ -181,7 +182,7 @@ function Local(props: { format: CfScriptFormat; bindings: CfWorkerInit["bindings"]; public: undefined | string; - site: undefined | string; + site: undefined | AssetPaths; port: number; }) { const { inspectorUrl } = useLocalWorker({ @@ -522,7 +523,7 @@ function useWorker(props: { accountId: string; apiToken: string; bindings: CfWorkerInit["bindings"]; - sitesFolder: undefined | string; + assetPaths: undefined | AssetPaths; port: number; compatibilityDate: string | undefined; compatibilityFlags: string[] | undefined; @@ -536,7 +537,7 @@ function useWorker(props: { accountId, apiToken, bindings, - sitesFolder, + assetPaths, compatibilityDate, compatibilityFlags, usageModel, @@ -574,9 +575,8 @@ function useWorker(props: { const assets = await syncAssets( accountId, path.basename(bundle.path), - sitesFolder, - true, - undefined // TODO: env + assetPaths, + true ); // TODO: cancellable? const content = await readFile(bundle.path, "utf-8"); @@ -628,7 +628,7 @@ function useWorker(props: { accountId, apiToken, port, - sitesFolder, + assetPaths, compatibilityDate, compatibilityFlags, usageModel, diff --git a/packages/wrangler/src/index.tsx b/packages/wrangler/src/index.tsx index a6b60cdb81bf..4993311c5b4a 100644 --- a/packages/wrangler/src/index.tsx +++ b/packages/wrangler/src/index.tsx @@ -8,6 +8,7 @@ import { findUp } from "find-up"; import TOML from "@iarna/toml"; import { normaliseAndValidateEnvironmentsConfig } from "./config"; import type { Config } from "./config"; +import { getAssetPaths } from "./sites"; import { confirm, prompt } from "./dialogs"; import { version as wranglerVersion } from "../package.json"; import { @@ -444,6 +445,18 @@ export async function main(argv: string[]): Promise { describe: "Root folder of static assets for Workers Sites", type: "string", }) + .option("site-include", { + describe: + "Array of .gitignore-style patterns that match file or directory names from the sites directory. Only matched items will be uploaded.", + type: "string", + array: true, + }) + .option("site-exclude", { + describe: + "Array of .gitignore-style patterns that match file or directory names from the sites directory. Matched items will not be uploaded.", + type: "string", + array: true, + }) .option("upstream-protocol", { default: "https", describe: @@ -517,7 +530,12 @@ export async function main(argv: string[]): Promise { jsxFactory={args["jsx-factory"] || envRootObj?.jsx_factory} jsxFragment={args["jsx-fragment"] || envRootObj?.jsx_fragment} accountId={config.account_id} - site={args.site || config.site?.bucket} + assetPaths={getAssetPaths( + config, + args.site, + args.siteInclude, + args.siteExclude + )} port={args.port || config.dev?.port} public={args["experimental-public"]} compatibilityDate={ @@ -602,6 +620,18 @@ export async function main(argv: string[]): Promise { describe: "Root folder of static assets for Workers Sites", type: "string", }) + .option("site-include", { + describe: + "Array of .gitignore-style patterns that match file or directory names from the sites directory. Only matched items will be uploaded.", + type: "string", + array: true, + }) + .option("site-exclude", { + describe: + "Array of .gitignore-style patterns that match file or directory names from the sites directory. Matched items will not be uploaded.", + type: "string", + array: true, + }) .option("triggers", { describe: "cron schedules to attach", alias: ["schedule", "schedules"], @@ -671,6 +701,12 @@ export async function main(argv: string[]): Promise { // -- snip, end -- } + const assetPaths = getAssetPaths( + config, + args["experimental-public"] || args.site, + args.siteInclude, + args.siteExclude + ); await publish({ config: args.config as Config, name: args.name, @@ -684,8 +720,10 @@ export async function main(argv: string[]): Promise { jsxFactory: args["jsx-factory"], jsxFragment: args["jsx-fragment"], routes: args.routes, - public: args["experimental-public"], - site: args.site, + assetPaths, + format: undefined, // TODO: add args for this + legacyEnv: undefined, // TODO: get this from somewhere... config? + experimentalPublic: args["experimental-public"] !== undefined, }); } ); diff --git a/packages/wrangler/src/publish.ts b/packages/wrangler/src/publish.ts index 62ddbabee514..1bd497038b16 100644 --- a/packages/wrangler/src/publish.ts +++ b/packages/wrangler/src/publish.ts @@ -10,25 +10,26 @@ import { toFormData } from "./api/form_data"; import { fetchResult } from "./cfetch"; import type { Config } from "./config"; import makeModuleCollector from "./module-collection"; +import type { AssetPaths } from "./sites"; import { syncAssets } from "./sites"; type CfScriptFormat = undefined | "modules" | "service-worker"; type Props = { config: Config; - format?: CfScriptFormat; - script?: string; - name?: string; - env?: string; - compatibilityDate?: string; - compatibilityFlags?: string[]; - public?: string; - site?: string; - triggers?: (string | number)[]; - routes?: (string | number)[]; - legacyEnv?: boolean; + format: CfScriptFormat | undefined; + script: string | undefined; + name: string | undefined; + env: string | undefined; + compatibilityDate: string | undefined; + compatibilityFlags: string[] | undefined; + assetPaths: AssetPaths | undefined; + triggers: (string | number)[] | undefined; + routes: (string | number)[] | undefined; + legacyEnv: boolean | undefined; jsxFactory: undefined | string; jsxFragment: undefined | string; + experimentalPublic: boolean; }; function sleep(ms: number) { @@ -36,10 +37,10 @@ function sleep(ms: number) { } export default async function publish(props: Props): Promise { - if (props.public && props.format === "service-worker") { + if (props.experimentalPublic && props.format === "service-worker") { // TODO: check config too throw new Error( - "You cannot use the service worker format with a public directory." + "You cannot publish in the service worker format with a public directory." ); } // TODO: warn if git/hg has uncommitted changes @@ -106,7 +107,7 @@ export default async function publish(props: Props): Promise { const moduleCollector = makeModuleCollector(); const result = await esbuild.build({ - ...(props.public + ...(props.experimentalPublic ? { stdin: { contents: ( @@ -118,10 +119,10 @@ export default async function publish(props: Props): Promise { sourcefile: "static-asset-facade.js", resolveDir: path.dirname(file), }, + nodePaths: [path.join(__dirname, "../vendor")], } : { entryPoints: [file] }), bundle: true, - nodePaths: props.public ? [path.join(__dirname, "../vendor")] : undefined, outdir: destination.path, external: ["__STATIC_CONTENT_MANIFEST"], format: "esm", @@ -221,8 +222,12 @@ export default async function publish(props: Props): Promise { } } - const assetPath = props.public || props.site || props.config.site?.bucket; // TODO: allow both - const assets = await syncAssets(accountId, scriptName, assetPath, false); + const assets = await syncAssets( + accountId, + scriptName, + props.assetPaths, + false + ); const bindings: CfWorkerInit["bindings"] = { kv_namespaces: envRootObj.kv_namespaces?.concat( diff --git a/packages/wrangler/src/sites.tsx b/packages/wrangler/src/sites.tsx index 027fefba9cc3..47da578c946c 100644 --- a/packages/wrangler/src/sites.tsx +++ b/packages/wrangler/src/sites.tsx @@ -2,7 +2,9 @@ import crypto from "node:crypto"; import { createReadStream } from "node:fs"; import * as path from "node:path"; import { readdir, readFile } from "node:fs/promises"; +import ignore from "ignore"; import { fetchResult } from "./cfetch"; +import type { Config } from "./config"; import { listNamespaceKeys, listNamespaces, putBulkKeyValue } from "./kv"; async function* getFilesInFolder(dirPath: string): AsyncIterable { @@ -26,15 +28,15 @@ async function hashFileContent(filePath: string): Promise { }); } -async function hashFile(filePath: string): Promise<{ - filePath: string; +async function hashAsset(filePath: string): Promise<{ + assetKey: string; hash: string; }> { const extName = path.extname(filePath); const baseName = path.basename(filePath, extName); const hash = await hashFileContent(filePath); return { - filePath: `${baseName}.${hash}${extName || ""}`, + assetKey: `${baseName}.${hash}${extName || ""}`, hash, }; } @@ -76,7 +78,7 @@ async function createKVNamespaceIfNotAlreadyExisting( * * @param accountId the account to upload to. * @param scriptName the name of the worker whose assets we are uploading. - * @param dirPath the path to the directory of assets to upload, or undefined if there are no assets to upload. + * @param siteAssets an objects describing what assets to upload, or undefined if there are no assets to upload. * @param preview if true then upload to a "preview" KV namespace. * @param _env (not implemented). * @returns a promise for an object mapping the relative paths of the assets to the key of that @@ -85,14 +87,14 @@ async function createKVNamespaceIfNotAlreadyExisting( export async function syncAssets( accountId: string, scriptName: string, - dirPath: string | undefined, + siteAssets: AssetPaths | undefined, preview: boolean, _env?: string ): Promise<{ manifest: { [filePath: string]: string } | undefined; namespace: string | undefined; }> { - if (dirPath === undefined) { + if (siteAssets === undefined) { return { manifest: undefined, namespace: undefined }; } @@ -103,9 +105,8 @@ export async function syncAssets( ); // let's get all the keys in this namespace - const keys = new Set( - (await listNamespaceKeys(accountId, namespace)).map((x) => x.name) - ); + const result = await listNamespaceKeys(accountId, namespace); + const keys = new Set(result.map((x) => x.name)); const manifest = {}; const upload: { @@ -113,25 +114,79 @@ export async function syncAssets( value: string; base64: boolean; }[] = []; + + const include = createPatternMatcher(siteAssets.includePatterns, false); + const exclude = createPatternMatcher(siteAssets.excludePatterns, true); + // TODO: this can be more efficient by parallelising - for await (const file of getFilesInFolder(dirPath)) { - // TODO: "exclude:" config - const { filePath } = await hashFile(file); + for await (const file of getFilesInFolder(siteAssets.baseDirectory)) { + const relativePath = path.relative(siteAssets.baseDirectory, file); + if (!include(relativePath)) { + continue; + } + if (exclude(relativePath)) { + continue; + } + const { assetKey } = await hashAsset(file); // now put each of the files into kv - if (!keys.has(filePath)) { + if (!keys.has(assetKey)) { console.log(`uploading ${file}...`); const content = await readFile(file, "base64"); if (content.length > 25 * 1024 * 1024) { throw new Error(`File ${file} is too big, it should be under 25 mb.`); } upload.push({ - key: filePath, + key: assetKey, value: content, base64: true, }); } - manifest[path.relative(dirPath, file)] = filePath; + manifest[path.relative(siteAssets.baseDirectory, file)] = assetKey; } await putBulkKeyValue(accountId, namespace, JSON.stringify(upload)); return { manifest, namespace }; } + +function createPatternMatcher( + patterns: string[], + exclude: boolean +): (filePath: string) => boolean { + if (patterns.length === 0) { + return (_filePath) => !exclude; + } else { + const ignorer = ignore().add(patterns); + return (filePath) => ignorer.test(filePath).ignored; + } +} + +/** + * Information about the assets that should be uploaded + */ +export interface AssetPaths { + baseDirectory: string; + includePatterns: string[]; + excludePatterns: string[]; +} + +/** + * Get an object that describes what assets to upload, if any. + * + * Uses the args (passed from the command line) if available, + * falling back to those defined in the config. + * + * // TODO: Support for environments + */ +export function getAssetPaths( + config: Config, + baseDirectory = config.site?.bucket, + includePatterns = config.site?.include ?? [], + excludePatterns = config.site?.exclude ?? [] +): undefined | AssetPaths { + return baseDirectory + ? { + baseDirectory, + includePatterns, + excludePatterns, + } + : undefined; +}