Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions packages/opencode/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1042,6 +1042,9 @@ export namespace Config {
.object({
auto: z.boolean().optional().describe("Enable automatic compaction when context is full (default: true)"),
prune: z.boolean().optional().describe("Enable pruning of old tool outputs (default: true)"),
token_threshold: z.number().int().positive().optional().describe("Trigger compaction when total token count exceeds this absolute number"),
context_threshold: z.number().gt(0).lte(1).optional().describe("Trigger compaction when token usage exceeds this fraction of the model context window (e.g. 0.8 = 80%)"),
min_messages: z.number().int().positive().optional().describe("Minimum number of messages to wait before next compaction (default: 5)"),
})
.optional(),
experimental: z
Expand Down
7 changes: 7 additions & 0 deletions packages/opencode/src/session/compaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ export namespace SessionCompaction {
const context = input.model.limit.context
if (context === 0) return false
const count = input.tokens.input + input.tokens.cache.read + input.tokens.output

// Absolute token threshold
if (config.compaction?.token_threshold && count > config.compaction.token_threshold) return true

// Context percentage threshold
if (config.compaction?.context_threshold && count > context * config.compaction.context_threshold) return true

const output = Math.min(input.model.limit.output, SessionPrompt.OUTPUT_TOKEN_MAX) || SessionPrompt.OUTPUT_TOKEN_MAX
const usable = input.model.limit.input || context - output
return count > usable
Expand Down
20 changes: 14 additions & 6 deletions packages/opencode/src/session/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import z from "zod"
import { Identifier } from "../id/id"
import { MessageV2 } from "./message-v2"
import { Log } from "../util/log"
import { Config } from "../config/config"
import { SessionRevert } from "./revert"
import { Session } from "."
import { Agent } from "../agent/agent"
Expand Down Expand Up @@ -269,6 +270,7 @@ export namespace SessionPrompt {
let step = 0
const session = await Session.get(sessionID)
while (true) {
const config = await Config.get()
SessionStatus.set(sessionID, { type: "busy" })
log.info("loop", { step, sessionID })
if (abort.aborted) break
Expand Down Expand Up @@ -496,9 +498,13 @@ export namespace SessionPrompt {
}

// context overflow, needs compaction
const lastSummaryIndex = msgs.findLastIndex((m) => m.info.role === "assistant" && m.info.summary)
const messagesSinceSummary = lastSummaryIndex === -1 ? Infinity : msgs.length - 1 - lastSummaryIndex

if (
lastFinished &&
lastFinished.summary !== true &&
messagesSinceSummary > (config.compaction?.min_messages ?? 5) &&
(await SessionCompaction.isOverflow({ tokens: lastFinished.tokens, model }))
) {
await SessionCompaction.create({
Expand Down Expand Up @@ -615,12 +621,14 @@ export namespace SessionPrompt {
})
if (result === "stop") break
if (result === "compact") {
await SessionCompaction.create({
sessionID,
agent: lastUser.agent,
model: lastUser.model,
auto: true,
})
if (messagesSinceSummary > (config.compaction?.min_messages ?? 5)) {
await SessionCompaction.create({
sessionID,
agent: lastUser.agent,
model: lastUser.model,
auto: true,
})
}
}
continue
}
Expand Down