Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 26 additions & 23 deletions crates/goose-server/src/routes/reply.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ pub enum MessageEvent {
},
Finish {
reason: String,
token_state: TokenState,
},
ModelChange {
model: String,
Expand All @@ -149,6 +150,27 @@ pub enum MessageEvent {
Ping,
}

async fn get_token_state(session_id: &str) -> TokenState {
SessionManager::get_session(session_id, false)
.await
.map(|session| TokenState {
input_tokens: session.input_tokens.unwrap_or(0),
output_tokens: session.output_tokens.unwrap_or(0),
total_tokens: session.total_tokens.unwrap_or(0),
accumulated_input_tokens: session.accumulated_input_tokens.unwrap_or(0),
accumulated_output_tokens: session.accumulated_output_tokens.unwrap_or(0),
accumulated_total_tokens: session.accumulated_total_tokens.unwrap_or(0),
})
.inspect_err(|e| {
tracing::warn!(
"Failed to fetch session token state for {}: {}",
session_id,
e
);
})
.unwrap_or_default()
}

async fn stream_event(
event: MessageEvent,
tx: &mpsc::Sender<String>,
Expand Down Expand Up @@ -321,29 +343,7 @@ pub async fn reply(

all_messages.push(message.clone());

let token_state = match SessionManager::get_session(&session_id, false).await {
Ok(session) => {
TokenState {
input_tokens: session.input_tokens.unwrap_or(0),
output_tokens: session.output_tokens.unwrap_or(0),
total_tokens: session.total_tokens.unwrap_or(0),
accumulated_input_tokens: session.accumulated_input_tokens.unwrap_or(0),
accumulated_output_tokens: session.accumulated_output_tokens.unwrap_or(0),
accumulated_total_tokens: session.accumulated_total_tokens.unwrap_or(0),
}
},
Err(e) => {
tracing::warn!("Failed to fetch session for token state: {}", e);
TokenState {
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
accumulated_input_tokens: 0,
accumulated_output_tokens: 0,
accumulated_total_tokens: 0,
}
}
};
let token_state = get_token_state(&session_id).await;

stream_event(MessageEvent::Message { message, token_state }, &tx, &cancel_token).await;
}
Expand Down Expand Up @@ -437,9 +437,12 @@ pub async fn reply(
);
}

let final_token_state = get_token_state(&session_id).await;

let _ = stream_event(
MessageEvent::Finish {
reason: "stop".to_string(),
token_state: final_token_state,
},
&task_tx,
&cancel_token,
Expand Down
2 changes: 1 addition & 1 deletion crates/goose/src/conversation/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ impl Message {
}
}

#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct TokenState {
pub input_tokens: i32,
Expand Down
4 changes: 4 additions & 0 deletions ui/desktop/openapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -3321,12 +3321,16 @@
"type": "object",
"required": [
"reason",
"token_state",
"type"
],
"properties": {
"reason": {
"type": "string"
},
"token_state": {
"$ref": "#/components/schemas/TokenState"
},
"type": {
"type": "string",
"enum": [
Expand Down
1 change: 1 addition & 0 deletions ui/desktop/src/api/types.gen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@ export type MessageEvent = {
type: 'Error';
} | {
reason: string;
token_state: TokenState;
type: 'Finish';
} | {
mode: string;
Expand Down
6 changes: 3 additions & 3 deletions ui/desktop/src/components/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -443,12 +443,12 @@ function BaseChatContent({
commandHistory={commandHistory}
initialValue={input || ''}
setView={setView}
totalTokens={tokenState?.totalTokens ?? sessionTokenCount}
totalTokens={tokenState?.totalTokens || sessionTokenCount}
Copy link

Copilot AI Nov 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using logical OR (||) instead of nullish coalescing (??) means that a valid token count of 0 will be incorrectly treated as falsy and fall back to sessionTokenCount. Use ?? to only fallback on null/undefined values.

Suggested change
totalTokens={tokenState?.totalTokens || sessionTokenCount}
totalTokens={tokenState?.totalTokens ?? sessionTokenCount}

Copilot uses AI. Check for mistakes.
accumulatedInputTokens={
tokenState?.accumulatedInputTokens ?? sessionInputTokens ?? localInputTokens
tokenState?.accumulatedInputTokens || sessionInputTokens || localInputTokens
Copy link

Copilot AI Nov 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using logical OR (||) instead of nullish coalescing (??) means that a valid token count of 0 will be incorrectly treated as falsy and fall through to the next fallback. Use ?? to only fallback on null/undefined values.

Copilot uses AI. Check for mistakes.
}
accumulatedOutputTokens={
tokenState?.accumulatedOutputTokens ?? sessionOutputTokens ?? localOutputTokens
tokenState?.accumulatedOutputTokens || sessionOutputTokens || localOutputTokens
Copy link

Copilot AI Nov 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using logical OR (||) instead of nullish coalescing (??) means that a valid token count of 0 will be incorrectly treated as falsy and fall through to the next fallback. Use ?? to only fallback on null/undefined values.

Copilot uses AI. Check for mistakes.
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What are these three sources of token counts and do we need them all? tokenState, sessionOutputTokens, localOutputTokens

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fwiw this is all gone in basecamp2

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Im not convinced sessionOutputTokens is working right; I think that's the reason we were seeing this issue. The stream seems like a reasonable place to do this (although I suspect that might be used for session load because the stream code path is not hit there).

Also really don't know why we have token estimates in the client.

Will take a deeper pass and try to clean shop here.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fwiw this is all gone in basecamp2

👍

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If it's all going away soon, I wouldn't bother tracking it all down if this gets it working

}
Comment on lines +446 to 452
Copy link

Copilot AI Nov 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changing from nullish coalescing (??) to logical OR (||) alters the behavior for falsy values. The nullish coalescing operator only falls back for null or undefined, while || also falls back for 0, empty string, false, etc. Since these are token counts that could legitimately be 0, using || means a value of 0 would incorrectly fall through to the next fallback. The original ?? operator should be retained to preserve correct behavior when token counts are zero.

Copilot uses AI. Check for mistakes.
droppedFiles={droppedFiles}
onFilesProcessed={() => setDroppedFiles([])} // Clear dropped files after processing
Expand Down
1 change: 0 additions & 1 deletion ui/desktop/src/hooks/useChatEngine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,6 @@ export const useChatEngine = ({

// Update token counts when session changes from the message stream
useEffect(() => {
console.log('Session received:', session);
if (session) {
setSessionTokenCount(session.total_tokens || 0);
setSessionInputTokens(session.accumulated_input_tokens || 0);
Expand Down
4 changes: 3 additions & 1 deletion ui/desktop/src/hooks/useMessageStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export interface NotificationEvent {
type MessageEvent =
| { type: 'Message'; message: Message; token_state: TokenState }
| { type: 'Error'; error: string }
| { type: 'Finish'; reason: string }
| { type: 'Finish'; reason: string; token_state: TokenState }
| { type: 'ModelChange'; model: string; mode: string }
| { type: 'UpdateConversation'; conversation: Conversation }
| NotificationEvent;
Expand Down Expand Up @@ -368,6 +368,8 @@ export function useMessageStream({
}

case 'Finish': {
setTokenState(parsedEvent.token_state);

if (onFinish && currentMessages.length > 0) {
const lastMessage = currentMessages[currentMessages.length - 1];
onFinish(lastMessage, parsedEvent.reason);
Expand Down