diff --git a/packages/opencode/src/cli/cmd/debug/agent.ts b/packages/opencode/src/cli/cmd/debug/agent.ts index fe300348597..13ff7021173 100644 --- a/packages/opencode/src/cli/cmd/debug/agent.ts +++ b/packages/opencode/src/cli/cmd/debug/agent.ts @@ -28,7 +28,7 @@ export const AgentCommand = cmd({ }) .option("params", { type: "string", - description: "Tool params as JSON or a JS object literal", + description: "Tool params as JSON object", }), async handler(args) { await bootstrap(process.cwd(), async () => { @@ -86,29 +86,20 @@ async function resolveTools(agent: Agent.Info, availableTools: Awaited { if (!input) return {} const trimmed = input.trim() if (trimmed.length === 0) return {} - const parsed = iife(() => { - try { - return JSON.parse(trimmed) - } catch (jsonError) { - try { - return new Function(`return (${trimmed})`)() - } catch (evalError) { - throw new Error( - `Failed to parse --params. Use JSON or a JS object literal. JSON error: ${jsonError}. Eval error: ${evalError}.`, - ) - } + try { + const parsed = JSON.parse(trimmed) + if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) { + throw new Error("Tool params must be a JSON object.") } - }) - - if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) { - throw new Error("Tool params must be an object.") + return parsed as Record + } catch (e) { + throw new Error(`Failed to parse --params as JSON: ${e instanceof Error ? e.message : String(e)}`) } - return parsed as Record } async function createToolContext(agent: Agent.Info) { diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index 32465015e9c..cee27b32f0b 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -16,6 +16,26 @@ import { Global } from "../global" export namespace File { const log = Log.create({ service: "file" }) + async function resolveRealPath(filepath: string): Promise { + try { + return await fs.promises.realpath(filepath) + } catch (err) { + // If realpath fails (for example, the file doesn't exist yet), resolve the + // parent directory with realpath and append the basename. This ensures that + // all existing path components are fully resolved (and symlinks detected), + // avoiding an insecure lexical-only fallback. + const parentDir = path.dirname(filepath) + try { + const realParent = await fs.promises.realpath(parentDir) + return path.join(realParent, path.basename(filepath)) + } catch { + // If the parent directory cannot be resolved safely, propagate the + // original error instead of falling back to an unchecked lexical path. + throw err + } + } + } + export const Info = z .object({ path: z.string(), @@ -429,15 +449,14 @@ export namespace File { const project = Instance.project const full = path.join(Instance.directory, file) - // TODO: Filesystem.contains is lexical only - symlinks inside the project can escape. - // TODO: On Windows, cross-drive paths bypass this check. Consider realpath canonicalization. - if (!Instance.containsPath(full)) { + const realFull = await resolveRealPath(full) + if (!Instance.containsPath(realFull)) { throw new Error(`Access denied: path escapes project directory`) } // Fast path: check extension before any filesystem operations if (isImageByExtension(file)) { - const bunFile = Bun.file(full) + const bunFile = Bun.file(realFull) if (await bunFile.exists()) { const buffer = await bunFile.arrayBuffer().catch(() => new ArrayBuffer(0)) const content = Buffer.from(buffer).toString("base64") @@ -451,7 +470,7 @@ export namespace File { return { type: "binary", content: "" } } - const bunFile = Bun.file(full) + const bunFile = Bun.file(realFull) if (!(await bunFile.exists())) { return { type: "text", content: "" } @@ -509,20 +528,19 @@ export namespace File { } const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory - // TODO: Filesystem.contains is lexical only - symlinks inside the project can escape. - // TODO: On Windows, cross-drive paths bypass this check. Consider realpath canonicalization. - if (!Instance.containsPath(resolved)) { + const realResolved = await resolveRealPath(resolved) + if (!Instance.containsPath(realResolved)) { throw new Error(`Access denied: path escapes project directory`) } const nodes: Node[] = [] for (const entry of await fs.promises - .readdir(resolved, { + .readdir(realResolved, { withFileTypes: true, }) .catch(() => [])) { if (exclude.includes(entry.name)) continue - const fullPath = path.join(resolved, entry.name) + const fullPath = path.join(realResolved, entry.name) const relativePath = path.relative(Instance.directory, fullPath) const type = entry.isDirectory() ? "directory" : "file" nodes.push({ diff --git a/packages/opencode/src/global/index.ts b/packages/opencode/src/global/index.ts index 10b6125a6a9..f6020240bb6 100644 --- a/packages/opencode/src/global/index.ts +++ b/packages/opencode/src/global/index.ts @@ -50,6 +50,9 @@ if (version !== CACHE_VERSION) { }), ), ) - } catch (e) {} - await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION) + await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION) + } catch (e) { + // Log to stderr since Log module may not be initialized yet (circular dependency) + console.error("[global] failed to clear cache:", e instanceof Error ? e.message : String(e)) + } } diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index 29e958fe357..9d4e5d90eff 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -801,7 +801,7 @@ export namespace MCP { // Register the callback BEFORE opening the browser to avoid race condition // when the IdP has an active SSO session and redirects immediately - const callbackPromise = McpOAuthCallback.waitForCallback(oauthState) + const callbackPromise = McpOAuthCallback.waitForCallback(oauthState, mcpName) try { const subprocess = await open(authorizationUrl) @@ -812,11 +812,11 @@ export namespace MCP { await new Promise((resolve, reject) => { // Give the process a moment to fail if it's going to const timeout = setTimeout(() => resolve(), 500) - subprocess.on("error", (error) => { + subprocess.once("error", (error) => { clearTimeout(timeout) reject(error) }) - subprocess.on("exit", (code) => { + subprocess.once("exit", (code) => { if (code !== null && code !== 0) { clearTimeout(timeout) reject(new Error(`Browser open failed with exit code ${code}`)) diff --git a/packages/opencode/src/mcp/oauth-callback.ts b/packages/opencode/src/mcp/oauth-callback.ts index bb3b56f2e95..b1dd7cc82bf 100644 --- a/packages/opencode/src/mcp/oauth-callback.ts +++ b/packages/opencode/src/mcp/oauth-callback.ts @@ -48,6 +48,7 @@ interface PendingAuth { resolve: (code: string) => void reject: (error: Error) => void timeout: ReturnType + mcpName: string } export namespace McpOAuthCallback { @@ -136,7 +137,7 @@ export namespace McpOAuthCallback { log.info("oauth callback server started", { port: OAUTH_CALLBACK_PORT }) } - export function waitForCallback(oauthState: string): Promise { + export function waitForCallback(oauthState: string, mcpName: string): Promise { return new Promise((resolve, reject) => { const timeout = setTimeout(() => { if (pendingAuths.has(oauthState)) { @@ -145,16 +146,18 @@ export namespace McpOAuthCallback { } }, CALLBACK_TIMEOUT_MS) - pendingAuths.set(oauthState, { resolve, reject, timeout }) + pendingAuths.set(oauthState, { resolve, reject, timeout, mcpName }) }) } export function cancelPending(mcpName: string): void { - const pending = pendingAuths.get(mcpName) - if (pending) { - clearTimeout(pending.timeout) - pendingAuths.delete(mcpName) - pending.reject(new Error("Authorization cancelled")) + for (const [state, pending] of pendingAuths) { + if (pending.mcpName === mcpName) { + clearTimeout(pending.timeout) + pendingAuths.delete(state) + pending.reject(new Error("Authorization cancelled")) + return + } } } diff --git a/packages/opencode/src/patch/index.ts b/packages/opencode/src/patch/index.ts index 0efeff544f6..6b89f35f92e 100644 --- a/packages/opencode/src/patch/index.ts +++ b/packages/opencode/src/patch/index.ts @@ -514,57 +514,82 @@ export namespace Patch { return hasChanges ? diff : "" } + // Path validation helper to prevent path traversal attacks + function validatePatchPath(filePath: string, cwd: string): string { + // Resolve the file path against cwd. If filePath is absolute, path.resolve + // will return the normalized absolute path; if it's relative, it will be + // resolved under cwd. + const resolved = path.resolve(cwd, filePath) + const normalizedCwd = path.resolve(cwd) + + // Ensure the resolved path is within the working directory. + // Check both that it starts with cwd+sep OR equals cwd exactly. + if (!resolved.startsWith(normalizedCwd + path.sep) && resolved !== normalizedCwd) { + throw new Error(`Path traversal detected: ${filePath} escapes working directory`) + } + + return resolved + } + // Apply hunks to filesystem - export async function applyHunksToFiles(hunks: Hunk[]): Promise { + export async function applyHunksToFiles(hunks: Hunk[], cwd?: string): Promise { if (hunks.length === 0) { throw new Error("No files were modified.") } + const workdir = cwd || process.cwd() const added: string[] = [] const modified: string[] = [] const deleted: string[] = [] for (const hunk of hunks) { switch (hunk.type) { - case "add": + case "add": { + const resolvedPath = validatePatchPath(hunk.path, workdir) // Create parent directories - const addDir = path.dirname(hunk.path) + const addDir = path.dirname(resolvedPath) if (addDir !== "." && addDir !== "/") { await fs.mkdir(addDir, { recursive: true }) } - await fs.writeFile(hunk.path, hunk.contents, "utf-8") - added.push(hunk.path) - log.info(`Added file: ${hunk.path}`) + await fs.writeFile(resolvedPath, hunk.contents, "utf-8") + added.push(resolvedPath) + log.info(`Added file: ${resolvedPath}`) break + } - case "delete": - await fs.unlink(hunk.path) - deleted.push(hunk.path) - log.info(`Deleted file: ${hunk.path}`) + case "delete": { + const resolvedPath = validatePatchPath(hunk.path, workdir) + await fs.unlink(resolvedPath) + deleted.push(resolvedPath) + log.info(`Deleted file: ${resolvedPath}`) break + } - case "update": - const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) + case "update": { + const resolvedPath = validatePatchPath(hunk.path, workdir) + const fileUpdate = deriveNewContentsFromChunks(resolvedPath, hunk.chunks) if (hunk.move_path) { + const resolvedMovePath = validatePatchPath(hunk.move_path, workdir) // Handle file move - const moveDir = path.dirname(hunk.move_path) + const moveDir = path.dirname(resolvedMovePath) if (moveDir !== "." && moveDir !== "/") { await fs.mkdir(moveDir, { recursive: true }) } - await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") - await fs.unlink(hunk.path) - modified.push(hunk.move_path) - log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) + await fs.writeFile(resolvedMovePath, fileUpdate.content, "utf-8") + await fs.unlink(resolvedPath) + modified.push(resolvedMovePath) + log.info(`Moved file: ${resolvedPath} -> ${resolvedMovePath}`) } else { // Regular update - await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") - modified.push(hunk.path) - log.info(`Updated file: ${hunk.path}`) + await fs.writeFile(resolvedPath, fileUpdate.content, "utf-8") + modified.push(resolvedPath) + log.info(`Updated file: ${resolvedPath}`) } break + } } } @@ -572,9 +597,9 @@ export namespace Patch { } // Main patch application function - export async function applyPatch(patchText: string): Promise { + export async function applyPatch(patchText: string, cwd?: string): Promise { const { hunks } = parsePatch(patchText) - return applyHunksToFiles(hunks) + return applyHunksToFiles(hunks, cwd) } // Async version of maybeParseApplyPatchVerified diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 24b4a4f9fbc..cddb6cc8ee3 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -360,7 +360,14 @@ export namespace SessionProcessor { message: retry, next: Date.now() + delay, }) - await SessionRetry.sleep(delay, input.abort).catch(() => {}) + try { + await SessionRetry.sleep(delay, input.abort) + } catch { + // If sleep was aborted, check if we should stop + if (input.abort.aborted) { + break + } + } continue } input.assistantMessage.error = error diff --git a/packages/opencode/src/session/retry.ts b/packages/opencode/src/session/retry.ts index a71a6a38241..e643759078e 100644 --- a/packages/opencode/src/session/retry.ts +++ b/packages/opencode/src/session/retry.ts @@ -89,7 +89,7 @@ export namespace SessionRetry { if (json.type === "error" && json.error?.code?.includes("rate_limit")) { return "Rate Limited" } - return JSON.stringify(json) + return undefined } catch { return undefined } diff --git a/packages/opencode/src/tool/webfetch.ts b/packages/opencode/src/tool/webfetch.ts index a4a54598c7a..07d78104487 100644 --- a/packages/opencode/src/tool/webfetch.ts +++ b/packages/opencode/src/tool/webfetch.ts @@ -7,6 +7,60 @@ const MAX_RESPONSE_SIZE = 5 * 1024 * 1024 // 5MB const DEFAULT_TIMEOUT = 30 * 1000 // 30 seconds const MAX_TIMEOUT = 120 * 1000 // 2 minutes +function isPrivateIP(hostname: string): boolean { + // Normalize hostname to lowercase + const h = hostname.toLowerCase() + + // Check for localhost variations + if (h === 'localhost' || h === '127.0.0.1' || h === '::1' || h === '[::1]') { + return true + } + + // Check for private IPv4 ranges + const ipv4Match = h.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/) + if (ipv4Match) { + const [, a, b] = ipv4Match.map(Number) + // 10.0.0.0/8 + if (a === 10) return true + // 172.16.0.0/12 + if (a === 172 && b >= 16 && b <= 31) return true + // 192.168.0.0/16 + if (a === 192 && b === 168) return true + // 169.254.0.0/16 (link-local) + if (a === 169 && b === 254) return true + // 127.0.0.0/8 (loopback) + if (a === 127) return true + // 0.0.0.0/8 + if (a === 0) return true + } + + // Check for IPv6 addresses (must be valid IPv6 format, not just strings starting with fc/fd) + // IPv6 addresses in URLs are enclosed in brackets: [::1], [fe80::1], etc. + const ipv6Match = h.match(/^\[([a-f0-9:]+)\]$/) || h.match(/^([a-f0-9:]+)$/) + if (ipv6Match) { + const ipv6 = ipv6Match[1] + // Check for IPv6 private/local ranges + // fe80::/10 - Link-local + if (ipv6.startsWith('fe80:') || ipv6.startsWith('fe90:') || + ipv6.startsWith('fea0:') || ipv6.startsWith('feb0:')) return true + // fc00::/7 - Unique local addresses (fc00:: to fdff::) + if (/^f[cd][0-9a-f]{2}:/.test(ipv6)) return true + // ::1 - Loopback + if (ipv6 === '::1' || ipv6 === '0:0:0:0:0:0:0:1') return true + // :: - Unspecified + if (ipv6 === '::' || ipv6 === '0:0:0:0:0:0:0:0') return true + // IPv4-mapped IPv6 (::ffff:192.168.x.x) + const mappedMatch = ipv6.match(/^::ffff:(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/i) + if (mappedMatch) { + const [, a, b] = mappedMatch.map(Number) + if (a === 10 || a === 127 || (a === 172 && b >= 16 && b <= 31) || + (a === 192 && b === 168) || (a === 169 && b === 254)) return true + } + } + + return false +} + export const WebFetchTool = Tool.define("webfetch", { description: DESCRIPTION, parameters: z.object({ @@ -23,6 +77,23 @@ export const WebFetchTool = Tool.define("webfetch", { throw new Error("URL must start with http:// or https://") } + // Check for private/internal IPs to prevent SSRF + try { + const url = new URL(params.url) + if (isPrivateIP(url.hostname)) { + throw new Error("Access to private/internal IP addresses is not allowed") + } + } catch (e) { + if (e instanceof Error && e.message.includes("private/internal")) { + throw e + } + throw new Error("Invalid URL format") + } + + // Note: DNS rebinding attacks are mitigated by checking the hostname before fetch, + // but a determined attacker could still bypass this. For high-security environments, + // consider using a DNS-over-HTTPS resolver that pins results or a proxy that validates IPs. + await ctx.ask({ permission: "webfetch", patterns: [params.url], diff --git a/packages/opencode/src/util/queue.ts b/packages/opencode/src/util/queue.ts index a1af53fe8f0..2964ff8b140 100644 --- a/packages/opencode/src/util/queue.ts +++ b/packages/opencode/src/util/queue.ts @@ -1,20 +1,55 @@ export class AsyncQueue implements AsyncIterable { private queue: T[] = [] - private resolvers: ((value: T) => void)[] = [] + private resolvers: ((value: T | PromiseLike) => void)[] = [] + private rejecters: ((reason: Error) => void)[] = [] + private closed = false push(item: T) { + if (this.closed) return const resolve = this.resolvers.shift() + this.rejecters.shift() // Remove corresponding rejecter if (resolve) resolve(item) else this.queue.push(item) } async next(): Promise { + if (this.closed) { + throw new Error("Queue is closed") + } if (this.queue.length > 0) return this.queue.shift()! - return new Promise((resolve) => this.resolvers.push(resolve)) + return new Promise((resolve, reject) => { + this.resolvers.push(resolve) + this.rejecters.push(reject) + }) + } + + close(): T[] { + this.closed = true + const error = new Error("Queue closed") + for (const reject of this.rejecters) { + reject(error) + } + this.resolvers = [] + this.rejecters = [] + // Return remaining items instead of dropping them + const remaining = this.queue + this.queue = [] + return remaining + } + + get isClosed() { + return this.closed } async *[Symbol.asyncIterator]() { - while (true) yield await this.next() + while (!this.closed) { + try { + yield await this.next() + } catch { + // Queue was closed during iteration + break + } + } } } diff --git a/packages/opencode/test/patch/patch.test.ts b/packages/opencode/test/patch/patch.test.ts index 020253bfe2d..34235a27a81 100644 --- a/packages/opencode/test/patch/patch.test.ts +++ b/packages/opencode/test/patch/patch.test.ts @@ -136,12 +136,12 @@ PATCH` describe("applyPatch", () => { test("should add a new file", async () => { const patchText = `*** Begin Patch -*** Add File: ${tempDir}/new-file.txt +*** Add File: new-file.txt +Hello World +This is a new file *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.added).toHaveLength(1) expect(result.modified).toHaveLength(0) expect(result.deleted).toHaveLength(0) @@ -155,10 +155,10 @@ PATCH` await fs.writeFile(filePath, "This file will be deleted") const patchText = `*** Begin Patch -*** Delete File: ${filePath} +*** Delete File: to-delete.txt *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.deleted).toHaveLength(1) expect(result.deleted[0]).toBe(filePath) @@ -174,7 +174,7 @@ PATCH` await fs.writeFile(filePath, "line 1\nline 2\nline 3\n") const patchText = `*** Begin Patch -*** Update File: ${filePath} +*** Update File: to-update.txt @@ line 1 -line 2 @@ -182,7 +182,7 @@ PATCH` line 3 *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.modified).toHaveLength(1) expect(result.modified[0]).toBe(filePath) @@ -196,14 +196,14 @@ PATCH` await fs.writeFile(oldPath, "old content\n") const patchText = `*** Begin Patch -*** Update File: ${oldPath} -*** Move to: ${newPath} +*** Update File: old-name.txt +*** Move to: new-name.txt @@ -old content +new content *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.modified).toHaveLength(1) expect(result.modified[0]).toBe(newPath) @@ -226,16 +226,16 @@ PATCH` await fs.writeFile(file2, "content 2") const patchText = `*** Begin Patch -*** Add File: ${file3} +*** Add File: file3.txt +new file content -*** Update File: ${file1} +*** Update File: file1.txt @@ -content 1 +updated content 1 -*** Delete File: ${file2} +*** Delete File: file2.txt *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.added).toHaveLength(1) expect(result.modified).toHaveLength(1) expect(result.deleted).toHaveLength(1) @@ -245,11 +245,11 @@ PATCH` const nestedPath = path.join(tempDir, "deep", "nested", "file.txt") const patchText = `*** Begin Patch -*** Add File: ${nestedPath} +*** Add File: deep/nested/file.txt +Deep nested content *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.added).toHaveLength(1) expect(result.added[0]).toBe(nestedPath) @@ -263,26 +263,22 @@ PATCH` describe("error handling", () => { test("should throw error when updating non-existent file", async () => { - const nonExistent = path.join(tempDir, "does-not-exist.txt") - const patchText = `*** Begin Patch -*** Update File: ${nonExistent} +*** Update File: does-not-exist.txt @@ -old line +new line *** End Patch` - await expect(Patch.applyPatch(patchText)).rejects.toThrow() + await expect(Patch.applyPatch(patchText, tempDir)).rejects.toThrow() }) test("should throw error when deleting non-existent file", async () => { - const nonExistent = path.join(tempDir, "does-not-exist.txt") - const patchText = `*** Begin Patch -*** Delete File: ${nonExistent} +*** Delete File: does-not-exist.txt *** End Patch` - await expect(Patch.applyPatch(patchText)).rejects.toThrow() + await expect(Patch.applyPatch(patchText, tempDir)).rejects.toThrow() }) }) @@ -292,12 +288,12 @@ PATCH` await fs.writeFile(emptyFile, "") const patchText = `*** Begin Patch -*** Update File: ${emptyFile} +*** Update File: empty.txt @@ +First line *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.modified).toHaveLength(1) const content = await fs.readFile(emptyFile, "utf-8") @@ -309,13 +305,13 @@ PATCH` await fs.writeFile(filePath, "no newline") const patchText = `*** Begin Patch -*** Update File: ${filePath} +*** Update File: no-newline.txt @@ -no newline +has newline now *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.modified).toHaveLength(1) const content = await fs.readFile(filePath, "utf-8") @@ -327,7 +323,7 @@ PATCH` await fs.writeFile(filePath, "line 1\nline 2\nline 3\nline 4\n") const patchText = `*** Begin Patch -*** Update File: ${filePath} +*** Update File: multi-chunk.txt @@ line 1 -line 2 @@ -338,7 +334,7 @@ PATCH` +LINE 4 *** End Patch` - const result = await Patch.applyPatch(patchText) + const result = await Patch.applyPatch(patchText, tempDir) expect(result.modified).toHaveLength(1) const content = await fs.readFile(filePath, "utf-8") diff --git a/packages/opencode/test/session/retry.test.ts b/packages/opencode/test/session/retry.test.ts index a483a015271..da74e264eb3 100644 --- a/packages/opencode/test/session/retry.test.ts +++ b/packages/opencode/test/session/retry.test.ts @@ -97,9 +97,9 @@ describe("session.retry.retryable", () => { expect(SessionRetry.retryable(error)).toBe("Provider is overloaded") }) - test("handles json messages without code", () => { + test("returns undefined for json messages without known retryable patterns", () => { const error = wrap(JSON.stringify({ error: { message: "no_kv_space" } })) - expect(SessionRetry.retryable(error)).toBe(`{"error":{"message":"no_kv_space"}}`) + expect(SessionRetry.retryable(error)).toBeUndefined() }) test("does not throw on numeric error codes", () => {