Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
5464c14
That's a lot again
Feb 11, 2026
cfa19c3
Resolve merge conflicts - remove complete_with_model methods
Feb 16, 2026
072b93c
Merge origin/main - resolve conflicts with streaming consolidation
Feb 16, 2026
f340ff6
Remove supports_streaming() - all providers now use stream()
Feb 16, 2026
a401e3a
Removed dead code
Feb 16, 2026
70cabc8
Merge remote-tracking branch 'origin/main' into everything-is-streaming
Feb 16, 2026
7a24ae1
What copilot said
Feb 16, 2026
b17897b
fmt
Feb 16, 2026
832d5ac
Two small things
Feb 16, 2026
e3fb061
Make it testable
Feb 16, 2026
413c688
fmt
Feb 16, 2026
3fb9851
Keep it supported for custom providers of OpenAI type
Feb 17, 2026
3ef7d1c
Fmt
Feb 17, 2026
a8bf5ed
Merge main into everything-is-streaming
Feb 17, 2026
297d816
Merge main into everything-is-streaming (round 2)
Feb 17, 2026
0944940
Merge main into everything-is-streaming (round 3)
Feb 17, 2026
47e3fe8
Merge remote-tracking branch 'origin/main' into everything-is-streaming
Feb 17, 2026
c37c0c2
Fmt
Feb 17, 2026
f07f90a
Fix provider tests to use new complete() API
Feb 17, 2026
1a07224
Merge remote-tracking branch 'origin/main' into everything-is-streaming
Feb 17, 2026
5db4558
Fmt. Again
Feb 17, 2026
49ebaa7
Remove temporary migration docs from tracking
Feb 17, 2026
5a830c5
Remove .envrc from tracking
Feb 17, 2026
99389d8
Merge branch 'main' into everything-is-streaming
Feb 17, 2026
3b51648
Fix tests after merge: update mock providers to streaming architecture
Feb 17, 2026
75fb172
Fmt
Feb 17, 2026
66b05c3
Fix session_id_propagation tests for streaming architecture
Feb 17, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .envrc

This file was deleted.

12 changes: 3 additions & 9 deletions crates/goose-acp/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1289,20 +1289,14 @@ print(\"hello, world\")
"mock"
}

async fn complete_with_model(
async fn stream(
&self,
_session_id: Option<&str>,
_model_config: &goose::model::ModelConfig,
_session_id: &str,
_system: &str,
_messages: &[goose::conversation::message::Message],
_tools: &[rmcp::model::Tool],
) -> Result<
(
goose::conversation::message::Message,
goose::providers::base::ProviderUsage,
),
ProviderError,
> {
) -> Result<goose::providers::base::MessageStream, ProviderError> {
unimplemented!()
}

Expand Down
8 changes: 4 additions & 4 deletions crates/goose-cli/src/commands/configure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1673,11 +1673,11 @@ pub async fn handle_openrouter_auth() -> anyhow::Result<()> {

match create("openrouter", model_config, Vec::new()).await {
Ok(provider) => {
let model_config = provider.get_model_config();
let provider_model_config = provider.get_model_config();
let test_result = provider
.complete_with_model(
None,
&model_config,
.complete(
&provider_model_config,
"",
"You are goose, an AI assistant.",
&[Message::user().with_text("Say 'Configuration test successful!'")],
&[],
Expand Down
4 changes: 4 additions & 0 deletions crates/goose-cli/src/session/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,10 @@ pub async fn classify_planner_response(
let prompt = format!("The text below is the output from an AI model which can either provide a plan or list of clarifying questions. Based on the text below, decide if the output is a \"plan\" or \"clarifying questions\".\n---\n{message_text}");

let message = Message::user().with_text(&prompt);
let model_config = provider.get_model_config();
let (result, _usage) = provider
.complete(
&model_config,
session_id,
"Reply only with the classification label: \"plan\" or \"clarifying questions\"",
&[message],
Expand Down Expand Up @@ -840,8 +842,10 @@ impl CliSession {
) -> Result<(), anyhow::Error> {
let plan_prompt = self.agent.get_plan_prompt(&self.session_id).await?;
output::show_thinking();
let model_config = reasoner.get_model_config();
let (plan_response, _usage) = reasoner
.complete(
&model_config,
&self.session_id,
&plan_prompt,
plan_messages.messages(),
Expand Down
11 changes: 4 additions & 7 deletions crates/goose/examples/databricks_oauth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,18 @@ use goose::providers::databricks::DATABRICKS_DEFAULT_MODEL;
async fn main() -> Result<()> {
dotenv().ok();

// Clear any token to force OAuth
std::env::remove_var("DATABRICKS_TOKEN");

// Create the provider
let provider =
create_with_named_model("databricks", DATABRICKS_DEFAULT_MODEL, Vec::new()).await?;

// Create a simple message
let message = Message::user().with_text("Tell me a short joke about programming.");

// Get a response
let model_config = provider.get_model_config();
let (response, usage) = provider
.complete_with_model(
None,
&provider.get_model_config(),
.complete(
&model_config,
"",
"You are a helpful assistant.",
&[message],
&[],
Expand Down
7 changes: 4 additions & 3 deletions crates/goose/examples/image_tool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,11 @@ async fn main() -> Result<()> {
},
}
});
let model_config = provider.get_model_config();
let (response, usage) = provider
.complete_with_model(
None,
&provider.get_model_config(),
.complete(
&model_config,
"",
"You are a helpful assistant. Please describe any text you see in the image.",
&messages,
&[Tool::new("view_image", "View an image", input_schema)],
Expand Down
17 changes: 16 additions & 1 deletion crates/goose/src/agents/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1777,6 +1777,15 @@ impl Agent {
);

tracing::info!("Calling provider to generate recipe content");
let model_config = {
let provider_guard = self.provider.lock().await;
let provider = provider_guard.as_ref().ok_or_else(|| {
let error = anyhow!("Provider not available during recipe creation");
tracing::error!("{}", error);
error
})?;
provider.get_model_config()
};
let (result, _usage) = self
.provider
.lock()
Expand All @@ -1787,7 +1796,13 @@ impl Agent {
tracing::error!("{}", error);
error
})?
.complete(session_id, &system_prompt, messages.messages(), &tools)
.complete(
&model_config,
session_id,
&system_prompt,
messages.messages(),
&tools,
)
.await
.map_err(|e| {
tracing::error!("Provider completion failed during recipe creation: {}", e);
Expand Down
7 changes: 4 additions & 3 deletions crates/goose/src/agents/mcp_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,10 +242,11 @@ impl ClientHandler for GooseClient {
.as_deref()
.unwrap_or("You are a general-purpose AI agent called goose");

let model_config = provider.get_model_config();
let (response, usage) = provider
.complete_with_model(
session_id.as_deref(),
&provider.get_model_config(),
.complete(
&model_config,
session_id.as_deref().unwrap_or(""),
system_prompt,
&provider_ready_messages,
&[],
Comment on lines +245 to 252
Copy link

Copilot AI Feb 16, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Switching to complete() (which now collects from streaming) can yield a Message made of multiple partial Text chunks; the later code in this function still only returns response.content.first(), which will truncate output—build the MCP reply from the full accumulated text instead.

Copilot uses AI. Check for mistakes.
Expand Down
16 changes: 2 additions & 14 deletions crates/goose/src/agents/platform_extensions/apps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,13 +294,7 @@ impl AppsManagerClient {
model_config.max_tokens = Some(16384);

let (response, _usage) = provider
.complete_with_model(
Some(session_id),
&model_config,
&system_prompt,
&messages,
&tools,
)
.complete(&model_config, session_id, &system_prompt, &messages, &tools)
.await
.map_err(|e| format!("LLM call failed: {}", e))?;

Expand Down Expand Up @@ -334,13 +328,7 @@ impl AppsManagerClient {
model_config.max_tokens = Some(16384);

let (response, _usage) = provider
.complete_with_model(
Some(session_id),
&model_config,
&system_prompt,
&messages,
&tools,
)
.complete(&model_config, session_id, &system_prompt, &messages, &tools)
.await
.map_err(|e| format!("LLM call failed: {}", e))?;

Expand Down
58 changes: 21 additions & 37 deletions crates/goose/src/agents/reply_parts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ use super::super::agents::Agent;
use crate::agents::platform_extensions::code_execution;
use crate::conversation::message::{Message, MessageContent, ToolRequest};
use crate::conversation::Conversation;
use crate::providers::base::{stream_from_single_message, MessageStream, Provider, ProviderUsage};
#[cfg(test)]
use crate::providers::base::stream_from_single_message;
use crate::providers::base::{MessageStream, Provider, ProviderUsage};
use crate::providers::errors::ProviderError;
use crate::providers::toolshim::{
augment_message_with_tool_calls, convert_tool_messages_to_text,
Expand Down Expand Up @@ -229,35 +231,18 @@ impl Agent {

// Capture errors during stream creation and return them as part of the stream
// so they can be handled by the existing error handling logic in the agent
let stream_result = if provider.supports_streaming() {
debug!("WAITING_LLM_STREAM_START");
let result = provider
.stream(
session_id,
system_prompt.as_str(),
messages_for_provider.messages(),
&tools,
)
.await;
debug!("WAITING_LLM_STREAM_END");
result
} else {
debug!("WAITING_LLM_START");
let complete_result = provider
.complete(
session_id,
system_prompt.as_str(),
messages_for_provider.messages(),
&tools,
)
.await;
debug!("WAITING_LLM_END");

match complete_result {
Ok((message, usage)) => Ok(stream_from_single_message(message, usage)),
Err(e) => Err(e),
}
};
let model_config = provider.get_model_config();
debug!("WAITING_LLM_STREAM_START");
let stream_result = provider
.stream(
&model_config,
session_id,
system_prompt.as_str(),
messages_for_provider.messages(),
&tools,
)
.await;
debug!("WAITING_LLM_STREAM_END");

// If there was an error creating the stream, return a stream that yields that error
let mut stream = match stream_result {
Expand Down Expand Up @@ -462,18 +447,17 @@ mod tests {
self.model_config.clone()
}

async fn complete_with_model(
async fn stream(
&self,
_session_id: Option<&str>,
_model_config: &ModelConfig,
_session_id: &str,
_system: &str,
_messages: &[Message],
_tools: &[Tool],
) -> anyhow::Result<(Message, ProviderUsage), ProviderError> {
Ok((
Message::assistant().with_text("ok"),
ProviderUsage::new("mock".to_string(), Usage::default()),
))
) -> Result<MessageStream, ProviderError> {
let message = Message::assistant().with_text("ok");
let usage = ProviderUsage::new("mock".to_string(), Usage::default());
Ok(stream_from_single_message(message, usage))
}
}

Expand Down
15 changes: 8 additions & 7 deletions crates/goose/src/context_mgmt/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ use crate::conversation::message::{ActionRequiredData, MessageMetadata};
use crate::conversation::message::{Message, MessageContent};
use crate::conversation::{merge_consecutive_messages, Conversation};
use crate::prompt_template::render_template;
#[cfg(test)]
use crate::providers::base::{stream_from_single_message, MessageStream};
use crate::providers::base::{Provider, ProviderUsage};
use crate::providers::errors::ProviderError;
use crate::{config::Config, token_counter::create_token_counter};
Expand Down Expand Up @@ -568,14 +570,14 @@ mod tests {
"mock"
}

async fn complete_with_model(
async fn stream(
&self,
_session_id: Option<&str>,
_model_config: &ModelConfig,
_session_id: &str,
_system: &str,
messages: &[Message],
_tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
) -> Result<MessageStream, ProviderError> {
// If max_tool_responses is set, fail if we have too many
if let Some(max) = self.max_tool_responses {
let tool_response_count = messages
Expand All @@ -595,10 +597,9 @@ mod tests {
}
}

Ok((
self.message.clone(),
ProviderUsage::new("mock-model".to_string(), Usage::default()),
))
let message = self.message.clone();
let usage = ProviderUsage::new("mock-model".to_string(), Usage::default());
Ok(stream_from_single_message(message, usage))
}

fn get_model_config(&self) -> ModelConfig {
Expand Down
2 changes: 2 additions & 0 deletions crates/goose/src/permission/permission_judge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,10 @@ pub async fn detect_read_only_tools(
let system_prompt = render_template("permission_judge.md", &context)
.unwrap_or_else(|_| "You are a good analyst and can detect operations whether they have read-only operations.".to_string());

let model_config = provider.get_model_config();
let res = provider
.complete(
&model_config,
session_id,
&system_prompt,
check_messages.messages(),
Expand Down
Loading
Loading