diff --git a/.changeset/fluffy-files-brush.md b/.changeset/fluffy-files-brush.md new file mode 100644 index 0000000000..29221c524d --- /dev/null +++ b/.changeset/fluffy-files-brush.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +feature: Improved `d1 execute --file --remote` performance & added support for much larger SQL files within a single transaction. diff --git a/packages/wrangler/package.json b/packages/wrangler/package.json index d512a005f4..2f112cb62b 100644 --- a/packages/wrangler/package.json +++ b/packages/wrangler/package.json @@ -173,6 +173,7 @@ "jest": "^29.7.0", "jest-fetch-mock": "^3.0.3", "jest-websocket-mock": "^2.5.0", + "md5-file": "5.0.0", "mime": "^3.0.0", "minimatch": "^5.1.0", "msw": "^0.49.1", diff --git a/packages/wrangler/src/__tests__/d1/execute.test.ts b/packages/wrangler/src/__tests__/d1/execute.test.ts index 4ab325389c..757fdb6853 100644 --- a/packages/wrangler/src/__tests__/d1/execute.test.ts +++ b/packages/wrangler/src/__tests__/d1/execute.test.ts @@ -1,3 +1,5 @@ +import fs from "node:fs"; +import { join } from "path"; import { mockConsoleMethods } from "../helpers/mock-console"; import { useMockIsTTY } from "../helpers/mock-istty"; import { runInTempDir } from "../helpers/run-in-tmp"; @@ -87,4 +89,29 @@ describe("execute", () => { ) ); }); + + it("should reject a binary SQLite DB", async () => { + setIsTTY(false); + writeWranglerToml({ + d1_databases: [ + { binding: "DATABASE", database_name: "db", database_id: "xxxx" }, + ], + }); + const path = join(__dirname, "fixtures", "db.sqlite3"); + fs.copyFileSync(path, "db.sqlite3"); + + await expect( + runWrangler(`d1 execute db --file db.sqlite3 --local --json`) + ).rejects.toThrowError( + JSON.stringify( + { + error: { + text: "Provided file is a binary SQLite database file instead of an SQL text file. The execute command can only process SQL text files. Please export an SQL file from your SQLite database and try again.", + }, + }, + null, + 2 + ) + ); + }); }); diff --git a/packages/wrangler/src/__tests__/d1/fixtures/db.sqlite3 b/packages/wrangler/src/__tests__/d1/fixtures/db.sqlite3 new file mode 100644 index 0000000000..2f7292d090 Binary files /dev/null and b/packages/wrangler/src/__tests__/d1/fixtures/db.sqlite3 differ diff --git a/packages/wrangler/src/d1/constants.ts b/packages/wrangler/src/d1/constants.ts index 90a5459eeb..45e14b379f 100644 --- a/packages/wrangler/src/d1/constants.ts +++ b/packages/wrangler/src/d1/constants.ts @@ -1,5 +1,3 @@ export const DEFAULT_MIGRATION_PATH = "./migrations"; export const DEFAULT_MIGRATION_TABLE = "d1_migrations"; export const LOCATION_CHOICES = ["weur", "eeur", "apac", "oc", "wnam", "enam"]; -// Max number of statements to send in a single /execute call -export const DEFAULT_BATCH_SIZE = 10_000; diff --git a/packages/wrangler/src/d1/execute.tsx b/packages/wrangler/src/d1/execute.tsx index 4ca0793672..1dee76964b 100644 --- a/packages/wrangler/src/d1/execute.tsx +++ b/packages/wrangler/src/d1/execute.tsx @@ -1,10 +1,14 @@ +import { createReadStream, promises as fs } from "fs"; import assert from "node:assert"; import path from "node:path"; +import { spinnerWhile } from "@cloudflare/cli/interactive"; import chalk from "chalk"; import { Static, Text } from "ink"; import Table from "ink-table"; +import md5File from "md5-file"; import { Miniflare } from "miniflare"; import React from "react"; +import { fetch } from "undici"; import { printWranglerBanner } from "../"; import { fetchResult } from "../cfetch"; import { readConfig } from "../config"; @@ -12,11 +16,10 @@ import { getLocalPersistencePath } from "../dev/get-local-persistence-path"; import { confirm } from "../dialogs"; import { JsonFriendlyFatalError, UserError } from "../errors"; import { logger } from "../logger"; -import { readFileSync } from "../parse"; +import { APIError, readFileSync } from "../parse"; import { readableRelative } from "../paths"; import { requireAuth } from "../user"; import { renderToString } from "../utils/render"; -import { DEFAULT_BATCH_SIZE } from "./constants"; import * as options from "./options"; import splitSqlQuery from "./splitter"; import { getDatabaseByNameOrBinding, getDatabaseInfoFromConfig } from "./utils"; @@ -25,7 +28,12 @@ import type { CommonYargsArgv, StrictYargsOptionsToInterface, } from "../yargs-types"; -import type { Database } from "./types"; +import type { + Database, + ImportInitResponse, + ImportPollingResponse, + PollingFailure, +} from "./types"; import type { D1Result } from "@cloudflare/workers-types/experimental"; export type QueryResult = { @@ -81,7 +89,8 @@ export function Options(yargs: CommonYargsArgv) { .option("batch-size", { describe: "Number of queries to send in a single batch", type: "number", - default: DEFAULT_BATCH_SIZE, + deprecated: true, + hidden: true, }); } @@ -98,7 +107,6 @@ export const Handler = async (args: HandlerOptions): Promise => { command, json, preview, - batchSize, } = args; const existingLogLevel = logger.loggerLevel; if (json) { @@ -120,13 +128,12 @@ export const Handler = async (args: HandlerOptions): Promise => { remote, config, name: database, - shouldPrompt: isInteractive && !yes, + shouldPrompt: isInteractive && !yes && !json, persistTo, file, command, json, preview, - batchSize, }); // Early exit if prompt rejected @@ -177,6 +184,10 @@ export const Handler = async (args: HandlerOptions): Promise => { } }; +type ExecuteInput = + | { file: string; command: never } + | { file: never; command: string }; + export async function executeSql({ local, remote, @@ -188,7 +199,6 @@ export async function executeSql({ command, json, preview, - batchSize, }: { local: boolean | undefined; remote: boolean | undefined; @@ -200,7 +210,6 @@ export async function executeSql({ command: string | undefined; json: boolean | undefined; preview: boolean | undefined; - batchSize: number; }) { const existingLogLevel = logger.loggerLevel; if (json) { @@ -208,8 +217,12 @@ export async function executeSql({ logger.loggerLevel = "error"; } - const sql = file ? readFileSync(file) : command; - if (!sql) { + const input = file + ? ({ file } as ExecuteInput) + : command + ? ({ command } as ExecuteInput) + : null; + if (!input) { throw new UserError(`Error: must provide --command or --file.`); } if (local && remote) { @@ -223,31 +236,23 @@ export async function executeSql({ if (persistTo && !local) { throw new UserError(`Error: can't use --persist-to without --local`); } - logger.log(`šŸŒ€ Mapping SQL input into an array of statements`); - const queries = splitSqlQuery(sql); - - if (file && sql) { - if (queries[0].startsWith("SQLite format 3")) { - //TODO: update this error to recommend using `wrangler d1 restore` when it exists - throw new UserError( - "Provided file is a binary SQLite database file instead of an SQL text file.\nThe execute command can only process SQL text files.\nPlease export an SQL file from your SQLite database and try again." - ); - } + if (input.file) { + await checkForSQLiteBinary(input.file); } + const result = remote || preview ? await executeRemotely({ config, name, shouldPrompt, - batches: batchSplit(queries, batchSize), - json, + input, preview, }) : await executeLocally({ config, name, - queries, + input, persistTo, }); @@ -260,12 +265,12 @@ export async function executeSql({ async function executeLocally({ config, name, - queries, + input, persistTo, }: { config: Config; name: string; - queries: string[]; + input: ExecuteInput; persistTo: string | undefined; }) { const localDB = getDatabaseInfoFromConfig(config, name); @@ -296,6 +301,9 @@ async function executeLocally({ }); const db = await mf.getD1Database("DATABASE"); + const sql = input.file ? readFileSync(input.file) : input.command; + const queries = splitSqlQuery(sql); + let results: D1Result>[]; try { results = await db.batch(queries.map((query) => db.prepare(query))); @@ -328,32 +336,25 @@ async function executeRemotely({ config, name, shouldPrompt, - batches, - json, + input, preview, }: { config: Config; name: string; shouldPrompt: boolean | undefined; - batches: string[]; - json: boolean | undefined; + input: ExecuteInput; preview: boolean | undefined; }) { - const multiple_batches = batches.length > 1; - // in JSON mode, we don't want a prompt here - if (multiple_batches && !json) { - const warning = `āš ļø Too much SQL to send at once, this execution will be sent as ${batches.length} batches.`; + if (input.file) { + const warning = `āš ļø This process may take some time, during which your D1 database will be unavailable to serve queries.`; if (shouldPrompt) { - const ok = await confirm( - `${warning}\nā„¹ļø Each batch is sent individually and may leave your DB in an unexpected state if a later batch fails.\nāš ļø Make sure you have a recent backup. Ok to proceed?` - ); + const ok = await confirm(`${warning}\n Ok to proceed?`); if (!ok) { return null; } - logger.log(`šŸŒ€ Let's go`); } else { - logger.error(warning); + logger.warn(warning); } } @@ -363,42 +364,206 @@ async function executeRemotely({ accountId, name ); - if (preview && !db.previewDatabaseUuid) { - const error = new UserError( - "Please define a `preview_database_id` in your wrangler.toml to execute your queries against a preview database" - ); - logger.error(error.message); - throw error; + if (preview) { + if (!db.previewDatabaseUuid) { + throw new UserError( + "Please define a `preview_database_id` in your wrangler.toml to execute your queries against a preview database" + ); + } + db.uuid = db.previewDatabaseUuid; } - const dbUuid = preview ? db.previewDatabaseUuid : db.uuid; - logger.log(`šŸŒ€ Executing on remote database ${name} (${dbUuid}):`); + logger.log( + `šŸŒ€ Executing on ${ + db.previewDatabaseUuid ? "preview" : "remote" + } database ${name} (${db.uuid}):` + ); logger.log( "šŸŒ€ To execute on your local development database, remove the --remote flag from your wrangler command." ); - const results: QueryResult[] = []; - for (const sql of batches) { - if (multiple_batches) { - logger.log( - chalk.gray(` ${sql.slice(0, 70)}${sql.length > 70 ? "..." : ""}`) - ); + if (input.file) { + // TODO: do we need to update hashing code if we upload in parts? + const etag = await md5File(input.file); + + logger.log( + chalk.gray( + `Note: if the execution fails to complete, your DB will return to its original state and you can safely retry.` + ) + ); + + const initResponse = await spinnerWhile({ + promise: d1ApiPost< + ImportInitResponse | ImportPollingResponse | PollingFailure + >(accountId, db, "import", { action: "init", etag }), + startMessage: "Checking if file needs uploading", + }); + + // An init response usually returns a {filename, uploadUrl} pair, except if we've detected that file + // already exists and is valid, to save people reuploading. In which case `initResponse` has already + // kicked the import process off. + const uploadRequired = "uploadUrl" in initResponse; + if (!uploadRequired) { + logger.log(`šŸŒ€ File already uploaded. Processing.`); + } + const firstPollResponse = uploadRequired + ? // Upload the file to R2, then inform D1 to start processing it. The server delays before responding + // in case the file is quite small and can be processed without a second round-trip. + await uploadAndBeginIngestion( + accountId, + db, + input.file, + etag, + initResponse + ) + : initResponse; + + // If the file takes longer than the specified delay (~1s) to import, we'll need to continue polling + // until it's complete. If it's already finished, this call will early-exit. + const finalResponse = await pollUntilComplete( + firstPollResponse, + accountId, + db + ); + + if (finalResponse.status !== "complete") { + throw new APIError({ text: `D1 reset before execute completed!` }); } - const result = await fetchResult( - `/accounts/${accountId}/d1/database/${dbUuid}/query`, + const { + result: { numQueries, finalBookmark, meta }, + } = finalResponse; + logger.log( + `🚣 Executed ${numQueries} queries in ${(meta.duration / 1000).toFixed( + 2 + )} seconds (${meta.rows_read} rows read, ${ + meta.rows_written + } rows written)\n` + + chalk.gray(` Database is currently at bookmark ${finalBookmark}.`) + ); + + return [ + { + results: [ + { + "Total queries executed": numQueries, + "Rows read": meta.rows_read, + "Rows written": meta.rows_written, + "Databas size (MB)": (meta.size_after / 1_000_000).toFixed(2), + }, + ], + success: true, + finalBookmark, + meta, + }, + ]; + } else { + const result = await d1ApiPost(accountId, db, "query", { + sql: input.command, + }); + logResult(result); + return result; + } +} + +async function uploadAndBeginIngestion( + accountId: string, + db: Database, + file: string, + etag: string, + initResponse: ImportInitResponse +) { + const { uploadUrl, filename } = initResponse; + + const { size } = await fs.stat(file); + + const uploadResponse = await spinnerWhile({ + promise: fetch(uploadUrl, { + method: "PUT", + headers: { + "Content-length": `${size}`, + }, + body: createReadStream(file), + duplex: "half", // required for NodeJS streams over .fetch ? + }), + startMessage: `šŸŒ€ Uploading ${filename}`, + endMessage: `šŸŒ€ Uploading complete.`, + }); + + if (uploadResponse.status !== 200) { + throw new UserError( + `File could not be uploaded. Please retry.\nGot response: ${await uploadResponse.text()}` + ); + } + + const etagResponse = uploadResponse.headers.get("etag"); + if (!etagResponse) { + throw new UserError(`File did not upload successfully. Please retry.`); + } + if (etag !== etagResponse.replace(/^"|"$/g, "")) { + throw new UserError( + `File contents did not upload successfully. Please retry.` + ); + } + + return await d1ApiPost( + accountId, + db, + "import", + { action: "ingest", filename, etag } + ); +} + +async function pollUntilComplete( + response: ImportPollingResponse | PollingFailure, + accountId: string, + db: Database +): Promise { + if (!response.success) { + throw new Error(response.error); + } + + response.messages.forEach((line) => { + logger.log(`šŸŒ€ ${line}`); + }); + + if (response.status === "complete") { + return response; + } else if (response.status === "error") { + throw new APIError({ + text: response.errors?.join("\n"), + notes: response.messages.map((text) => ({ text })), + }); + } else { + const newResponse = await d1ApiPost( + accountId, + db, + "import", { - method: "POST", - headers: { - "Content-Type": "application/json", - ...(db.internal_env ? { "x-d1-internal-env": db.internal_env } : {}), - }, - body: JSON.stringify({ sql }), + action: "poll", + currentBookmark: response.at_bookmark, } ); - logResult(result); - results.push(...result); + return await pollUntilComplete(newResponse, accountId, db); } - return results; +} + +async function d1ApiPost( + accountId: string, + db: Database, + action: string, + body: unknown +) { + return await fetchResult( + `/accounts/${accountId}/d1/database/${db.uuid}/${action}`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + ...(db.internal_env ? { "x-d1-internal-env": db.internal_env } : {}), + }, + body: JSON.stringify(body), + } + ); } function logResult(r: QueryResult | QueryResult[]) { @@ -415,23 +580,19 @@ function logResult(r: QueryResult | QueryResult[]) { ); } -function batchSplit(queries: string[], batchSize: number) { - logger.log(`šŸŒ€ Parsing ${queries.length} statements`); - const num_batches = Math.ceil(queries.length / batchSize); - const batches: string[] = []; - for (let i = 0; i < num_batches; i++) { - batches.push(queries.slice(i * batchSize, (i + 1) * batchSize).join("; ")); - } - if (num_batches > 1) { - logger.log( - `šŸŒ€ We are sending ${num_batches} batch(es) to D1 (limited to ${batchSize} statements per batch. Use --batch-size to override.)` - ); - } - return batches; -} - function shorten(query: string | undefined, length: number) { return query && query.length > length ? query.slice(0, length) + "..." : query; } + +async function checkForSQLiteBinary(filename: string) { + const fd = await fs.open(filename, "r"); + const buffer = Buffer.alloc(15); + await fd.read(buffer, 0, 15); + if (buffer.toString("utf8") === "SQLite format 3") { + throw new UserError( + "Provided file is a binary SQLite database file instead of an SQL text file. The execute command can only process SQL text files. Please export an SQL file from your SQLite database and try again." + ); + } +} diff --git a/packages/wrangler/src/d1/export.ts b/packages/wrangler/src/d1/export.ts index ac4475b305..ac3dc9f4d1 100644 --- a/packages/wrangler/src/d1/export.ts +++ b/packages/wrangler/src/d1/export.ts @@ -15,45 +15,59 @@ import type { CommonYargsArgv, StrictYargsOptionsToInterface, } from "../yargs-types"; -import type { Database } from "./types"; +import type { Database, ExportPollingResponse, PollingFailure } from "./types"; export function Options(yargs: CommonYargsArgv) { - return Name(yargs) - .option("local", { - type: "boolean", - describe: "Export from your local DB you use with wrangler dev", - conflicts: "remote", - }) - .option("remote", { - type: "boolean", - describe: "Export from your live D1", - conflicts: "local", - }) - .option("no-schema", { - type: "boolean", - describe: "Only output table contents, not the DB schema", - conflicts: "no-data", - }) - .option("no-data", { - type: "boolean", - describe: - "Only output table schema, not the contents of the DBs themselves", - conflicts: "no-schema", - }) - .option("table", { - type: "string", - describe: "Specify which tables to include in export", - }) - .option("output", { - type: "string", - describe: "Which .sql file to output to", - demandOption: true, - }); + return ( + Name(yargs) + .option("local", { + type: "boolean", + describe: "Export from your local DB you use with wrangler dev", + conflicts: "remote", + }) + .option("remote", { + type: "boolean", + describe: "Export from your live D1", + conflicts: "local", + }) + .option("no-schema", { + type: "boolean", + describe: "Only output table contents, not the DB schema", + conflicts: "no-data", + }) + .option("no-data", { + type: "boolean", + describe: + "Only output table schema, not the contents of the DBs themselves", + conflicts: "no-schema", + }) + // For --no-schema and --no-data to work, we need their positive versions + // to be defined. But keep them hidden as they default to true + .option("schema", { + type: "boolean", + hidden: true, + default: true, + }) + .option("data", { + type: "boolean", + hidden: true, + default: true, + }) + .option("table", { + type: "string", + describe: "Specify which tables to include in export", + }) + .option("output", { + type: "string", + describe: "Which .sql file to output to", + demandOption: true, + }) + ); } type HandlerOptions = StrictYargsOptionsToInterface; export const Handler = async (args: HandlerOptions): Promise => { - const { local, remote, name, output, noSchema, noData, table } = args; + const { local, remote, name, output, schema, data, table } = args; await printWranglerBanner(); const config = readConfig(args.config, args); @@ -66,6 +80,10 @@ export const Handler = async (args: HandlerOptions): Promise => { throw new UserError(`You must specify either --local or --remote`); } + if (!schema && !data) { + throw new UserError(`You cannot specify both --no-schema and --no-data`); + } + // Allow multiple --table x --table y flags or none const tables: string[] = table ? Array.isArray(table) @@ -78,35 +96,19 @@ export const Handler = async (args: HandlerOptions): Promise => { name, output, tables, - noSchema, - noData + !schema, + !data ); return result; }; -type PollingResponse = { - success: true; - type: "export"; - at_bookmark: string; - messages: string[]; - errors: string[]; -} & ( - | { - status: "active" | "error"; - } - | { - status: "complete"; - result: { filename: string; signedUrl: string }; - } -); - async function exportRemotely( config: Config, name: string, output: string, tables: string[], - noSchema?: boolean, - noData?: boolean + noSchema: boolean, + noData: boolean ) { const accountId = await requireAuth(config); const db: Database = await getDatabaseByNameOrBinding( @@ -150,17 +152,18 @@ async function pollExport( }, currentBookmark: string | undefined, num_parts_uploaded = 0 -): Promise { - const response = await fetchResult< - PollingResponse | { success: false; error: string } - >(`/accounts/${accountId}/d1/database/${db.uuid}/export`, { - method: "POST", - body: JSON.stringify({ - outputFormat: "polling", - dumpOptions, - currentBookmark, - }), - }); +): Promise { + const response = await fetchResult( + `/accounts/${accountId}/d1/database/${db.uuid}/export`, + { + method: "POST", + body: JSON.stringify({ + outputFormat: "polling", + dumpOptions, + currentBookmark, + }), + } + ); if (!response.success) { throw new Error(response.error); diff --git a/packages/wrangler/src/d1/migrations/apply.tsx b/packages/wrangler/src/d1/migrations/apply.tsx index bb98a89d96..95dc81275a 100644 --- a/packages/wrangler/src/d1/migrations/apply.tsx +++ b/packages/wrangler/src/d1/migrations/apply.tsx @@ -14,11 +14,7 @@ import { logger } from "../../logger"; import { requireAuth } from "../../user"; import { renderToString } from "../../utils/render"; import { createBackup } from "../backups"; -import { - DEFAULT_BATCH_SIZE, - DEFAULT_MIGRATION_PATH, - DEFAULT_MIGRATION_TABLE, -} from "../constants"; +import { DEFAULT_MIGRATION_PATH, DEFAULT_MIGRATION_TABLE } from "../constants"; import { executeSql } from "../execute"; import { getDatabaseInfoFromConfig, getDatabaseInfoFromId } from "../utils"; import { @@ -37,7 +33,7 @@ export function ApplyOptions(yargs: CommonYargsArgv) { return MigrationOptions(yargs).option("batch-size", { describe: "Number of queries to send in a single batch", type: "number", - default: DEFAULT_BATCH_SIZE, + deprecated: true, }); } @@ -51,7 +47,6 @@ export const ApplyHandler = withConfig( remote, persistTo, preview, - batchSize, }): Promise => { await printWranglerBanner(); const databaseInfo = getDatabaseInfoFromConfig(config, database); @@ -175,7 +170,6 @@ Your database may not be available to serve requests during the migration, conti file: undefined, json: undefined, preview, - batchSize, }); if (response === null) { diff --git a/packages/wrangler/src/d1/migrations/helpers.ts b/packages/wrangler/src/d1/migrations/helpers.ts index 2d98fe91d7..9f07a36fc8 100644 --- a/packages/wrangler/src/d1/migrations/helpers.ts +++ b/packages/wrangler/src/d1/migrations/helpers.ts @@ -5,7 +5,7 @@ import { UserError } from "../../errors"; import { CI } from "../../is-ci"; import isInteractive from "../../is-interactive"; import { logger } from "../../logger"; -import { DEFAULT_BATCH_SIZE, DEFAULT_MIGRATION_PATH } from "../constants"; +import { DEFAULT_MIGRATION_PATH } from "../constants"; import { executeSql } from "../execute"; import type { ConfigFields, DevConfig, Environment } from "../../config"; import type { QueryResult } from "../execute"; @@ -118,7 +118,6 @@ const listAppliedMigrations = async ({ file: undefined, json: true, preview, - batchSize: DEFAULT_BATCH_SIZE, }); if (!response || response[0].results.length === 0) { @@ -189,6 +188,5 @@ export const initMigrationsTable = async ({ file: undefined, json: true, preview, - batchSize: DEFAULT_BATCH_SIZE, }); }; diff --git a/packages/wrangler/src/d1/types.ts b/packages/wrangler/src/d1/types.ts index 7d76d3c2a4..064328e02e 100644 --- a/packages/wrangler/src/d1/types.ts +++ b/packages/wrangler/src/d1/types.ts @@ -104,3 +104,56 @@ export interface D1QueriesGraphQLResponse { }; }; } + +export type ImportInitResponse = { + success: true; + filename: string; + uploadUrl: string; +}; +export type ImportPollingResponse = { + success: true; + type: "import"; + at_bookmark: string; + messages: string[]; + errors: string[]; +} & ( + | { + status: "active" | "error"; + } + | { + status: "complete"; + result: { + success: boolean; + finalBookmark: string; + numQueries: number; + meta: { + served_by: string; + duration: number; + changes: number; + last_row_id: number; + changed_db: boolean; + size_after: number; + rows_read: number; + rows_written: number; + }; + }; + } +); + +export type ExportPollingResponse = { + success: true; + type: "export"; + at_bookmark: string; + messages: string[]; + errors: string[]; +} & ( + | { + status: "active" | "error"; + } + | { + status: "complete"; + result: { filename: string; signedUrl: string }; + } +); + +export type PollingFailure = { success: false; error: string }; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7f2c90eefd..779cc7df98 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1745,6 +1745,9 @@ importers: jest-websocket-mock: specifier: ^2.5.0 version: 2.5.0 + md5-file: + specifier: 5.0.0 + version: 5.0.0 mime: specifier: ^3.0.0 version: 3.0.0 @@ -8673,6 +8676,11 @@ packages: resolution: {integrity: sha512-s2EMBOWtXFc8dgqvoAzKJXxNHibcdJMV0gwqKUaw9E2JBJuGUK7DrNKrA6g/i+v72TT16+6sVm5mS3thaMLQUw==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + md5-file@5.0.0: + resolution: {integrity: sha512-xbEFXCYVWrSx/gEKS1VPlg84h/4L20znVIulKw6kMfmBUAZNAnF00eczz9ICMl+/hjQGo5KSXRxbL/47X3rmMw==} + engines: {node: '>=10.13.0'} + hasBin: true + md5-hex@3.0.1: resolution: {integrity: sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==} engines: {node: '>=8'} @@ -20594,6 +20602,8 @@ snapshots: dependencies: escape-string-regexp: 5.0.0 + md5-file@5.0.0: {} + md5-hex@3.0.1: dependencies: blueimp-md5: 2.19.0