diff --git a/crates/goose/src/providers/anthropic.rs b/crates/goose/src/providers/anthropic.rs index 6a3c930460ce..58b4c299b75f 100644 --- a/crates/goose/src/providers/anthropic.rs +++ b/crates/goose/src/providers/anthropic.rs @@ -73,7 +73,7 @@ impl AnthropicProvider { }) } - async fn post(&self, headers: HeaderMap, payload: Value) -> Result { + async fn post(&self, headers: HeaderMap, payload: &Value) -> Result { let base_url = url::Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; let url = base_url.join("v1/messages").map_err(|e| { @@ -84,7 +84,7 @@ impl AnthropicProvider { .client .post(url) .headers(headers) - .json(&payload) + .json(payload) .send() .await?; @@ -198,10 +198,10 @@ impl Provider for AnthropicProvider { } // Make request - let response = self.post(headers, payload.clone()).await?; + let response = self.post(headers, &payload).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; tracing::debug!("🔍 Anthropic non-streaming parsed usage: input_tokens={:?}, output_tokens={:?}, total_tokens={:?}", usage.input_tokens, usage.output_tokens, usage.total_tokens); diff --git a/crates/goose/src/providers/azure.rs b/crates/goose/src/providers/azure.rs index 46c1f0062d88..1ffdf4ed5419 100644 --- a/crates/goose/src/providers/azure.rs +++ b/crates/goose/src/providers/azure.rs @@ -87,7 +87,7 @@ impl AzureProvider { }) } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let mut base_url = url::Url::parse(&self.endpoint) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; @@ -143,7 +143,7 @@ impl AzureProvider { } } - let response_result = request_builder.json(&payload).send().await; + let response_result = request_builder.json(payload).send().await; match response_result { Ok(response) => match handle_response_openai_compat(response).await { @@ -249,9 +249,9 @@ impl Provider for AzureProvider { tools: &[Tool], ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(&self.model, system, messages, tools, &ImageFormat::OpenAi)?; - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() diff --git a/crates/goose/src/providers/databricks.rs b/crates/goose/src/providers/databricks.rs index 99768c1972ae..77adf5569968 100644 --- a/crates/goose/src/providers/databricks.rs +++ b/crates/goose/src/providers/databricks.rs @@ -273,7 +273,7 @@ impl DatabricksProvider { } } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { // Check if this is an embedding request by looking at the payload structure let is_embedding = payload.get("input").is_some() && payload.get("messages").is_none(); let path = if is_embedding { @@ -284,7 +284,7 @@ impl DatabricksProvider { format!("serving-endpoints/{}/invocations", self.model.model_name) }; - match self.post_with_retry(path.as_str(), &payload).await { + match self.post_with_retry(path.as_str(), payload).await { Ok(res) => res.json().await.map_err(|_| { ProviderError::RequestFailed("Response body is not valid JSON".to_string()) }), @@ -451,10 +451,10 @@ impl Provider for DatabricksProvider { .expect("payload should have model key") .remove("model"); - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() @@ -619,7 +619,7 @@ impl EmbeddingCapable for DatabricksProvider { "input": texts, }); - let response = self.post(request).await?; + let response = self.post(&request).await?; let embeddings = response["data"] .as_array() diff --git a/crates/goose/src/providers/formats/anthropic.rs b/crates/goose/src/providers/formats/anthropic.rs index 3001a08d0387..48d6ac0a5832 100644 --- a/crates/goose/src/providers/formats/anthropic.rs +++ b/crates/goose/src/providers/formats/anthropic.rs @@ -207,7 +207,7 @@ pub fn format_system(system: &str) -> Value { } /// Convert Anthropic's API response to internal Message format -pub fn response_to_message(response: Value) -> Result { +pub fn response_to_message(response: &Value) -> Result { let content_blocks = response .get(CONTENT_FIELD) .and_then(|c| c.as_array()) @@ -690,7 +690,7 @@ mod tests { } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; if let MessageContent::Text(text) = &message.content[0] { @@ -731,7 +731,7 @@ mod tests { } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; if let MessageContent::ToolRequest(tool_request) = &message.content[0] { @@ -781,7 +781,7 @@ mod tests { } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; assert_eq!(message.content.len(), 3); diff --git a/crates/goose/src/providers/formats/databricks.rs b/crates/goose/src/providers/formats/databricks.rs index 10a59cc1b6a0..91992eaa98dd 100644 --- a/crates/goose/src/providers/formats/databricks.rs +++ b/crates/goose/src/providers/formats/databricks.rs @@ -268,8 +268,8 @@ pub fn format_tools(tools: &[Tool]) -> anyhow::Result> { } /// Convert Databricks' API response to internal Message format -pub fn response_to_message(response: Value) -> anyhow::Result { - let original = response["choices"][0]["message"].clone(); +pub fn response_to_message(response: &Value) -> anyhow::Result { + let original = &response["choices"][0]["message"]; let mut content = Vec::new(); // Handle array-based content @@ -737,7 +737,7 @@ mod tests { // Get the ID from the tool request to use in the response let tool_id = if let MessageContent::ToolRequest(request) = &messages[2].content[0] { - request.id.clone() + &request.id } else { panic!("should be tool request"); }; @@ -770,7 +770,7 @@ mod tests { // Get the ID from the tool request to use in the response let tool_id = if let MessageContent::ToolRequest(request) = &messages[0].content[0] { - request.id.clone() + &request.id } else { panic!("should be tool request"); }; @@ -891,7 +891,7 @@ mod tests { } }); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 1); if let MessageContent::Text(text) = &message.content[0] { assert_eq!(text.text, "Hello from John Cena!"); @@ -906,7 +906,7 @@ mod tests { #[test] fn test_response_to_message_valid_toolrequest() -> anyhow::Result<()> { let response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 1); if let MessageContent::ToolRequest(request) = &message.content[0] { @@ -926,7 +926,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["name"] = json!("invalid fn"); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { match &request.tool_call { @@ -948,7 +948,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = json!("invalid json {"); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { match &request.tool_call { @@ -970,7 +970,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = serde_json::Value::String("".to_string()); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { let tool_call = request.tool_call.as_ref().unwrap(); @@ -1107,7 +1107,7 @@ mod tests { }] }); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 2); if let MessageContent::Thinking(thinking) = &message.content[0] { @@ -1154,7 +1154,7 @@ mod tests { }] }); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 2); if let MessageContent::RedactedThinking(redacted) = &message.content[0] { diff --git a/crates/goose/src/providers/formats/gcpvertexai.rs b/crates/goose/src/providers/formats/gcpvertexai.rs index 8b2b4f36f25d..ab3399651db2 100644 --- a/crates/goose/src/providers/formats/gcpvertexai.rs +++ b/crates/goose/src/providers/formats/gcpvertexai.rs @@ -332,7 +332,7 @@ pub fn create_request( /// * `Result` - Converted message pub fn response_to_message(response: Value, request_context: RequestContext) -> Result { match request_context.provider() { - ModelProvider::Anthropic => anthropic::response_to_message(response), + ModelProvider::Anthropic => anthropic::response_to_message(&response), ModelProvider::Google => google::response_to_message(response), } } diff --git a/crates/goose/src/providers/formats/openai.rs b/crates/goose/src/providers/formats/openai.rs index d6b62933cd96..83d7ac29980f 100644 --- a/crates/goose/src/providers/formats/openai.rs +++ b/crates/goose/src/providers/formats/openai.rs @@ -268,8 +268,8 @@ pub fn format_tools(tools: &[Tool]) -> anyhow::Result> { } /// Convert OpenAI's API response to internal Message format -pub fn response_to_message(response: Value) -> anyhow::Result { - let original = response["choices"][0]["message"].clone(); +pub fn response_to_message(response: &Value) -> anyhow::Result { + let original = &response["choices"][0]["message"]; let mut content = Vec::new(); if let Some(text) = original.get("content") { @@ -910,7 +910,7 @@ mod tests { } }); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 1); if let MessageContent::Text(text) = &message.content[0] { assert_eq!(text.text, "Hello from John Cena!"); @@ -925,7 +925,7 @@ mod tests { #[test] fn test_response_to_message_valid_toolrequest() -> anyhow::Result<()> { let response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - let message = response_to_message(response)?; + let message = response_to_message(&response)?; assert_eq!(message.content.len(), 1); if let MessageContent::ToolRequest(request) = &message.content[0] { @@ -945,7 +945,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["name"] = json!("invalid fn"); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { match &request.tool_call { @@ -967,7 +967,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = json!("invalid json {"); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { match &request.tool_call { @@ -989,7 +989,7 @@ mod tests { response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = serde_json::Value::String("".to_string()); - let message = response_to_message(response)?; + let message = response_to_message(&response)?; if let MessageContent::ToolRequest(request) = &message.content[0] { let tool_call = request.tool_call.as_ref().unwrap(); diff --git a/crates/goose/src/providers/formats/snowflake.rs b/crates/goose/src/providers/formats/snowflake.rs index d29e8448cc64..7592d93c2ab8 100644 --- a/crates/goose/src/providers/formats/snowflake.rs +++ b/crates/goose/src/providers/formats/snowflake.rs @@ -198,7 +198,7 @@ pub fn parse_streaming_response(sse_data: &str) -> Result { } /// Convert Snowflake's API response to internal Message format -pub fn response_to_message(response: Value) -> Result { +pub fn response_to_message(response: &Value) -> Result { let mut message = Message::assistant(); let content_list = response.get("content_list").and_then(|cl| cl.as_array()); @@ -380,7 +380,7 @@ mod tests { } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; if let MessageContent::Text(text) = &message.content[0] { @@ -417,7 +417,7 @@ mod tests { } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; if let MessageContent::ToolRequest(tool_request) = &message.content[0] { @@ -625,7 +625,7 @@ data: {"id":"a9537c2c-2017-4906-9817-2456168d89fa","model":"claude-3-5-sonnet"," } }); - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; // Should have both text and tool request content assert_eq!(message.content.len(), 2); diff --git a/crates/goose/src/providers/gcpvertexai.rs b/crates/goose/src/providers/gcpvertexai.rs index 9f92dc569bc8..82463fa2848b 100644 --- a/crates/goose/src/providers/gcpvertexai.rs +++ b/crates/goose/src/providers/gcpvertexai.rs @@ -281,14 +281,14 @@ impl GcpVertexAIProvider { ) -> Result { // Create host URL for the specified location let host_url = if self.location == location { - self.host.clone() + &self.host } else { // Only allocate a new string if location differs - self.host.replace(&self.location, location) + &self.host.replace(&self.location, location) }; let base_url = - Url::parse(&host_url).map_err(|e| GcpVertexAIError::InvalidUrl(e.to_string()))?; + Url::parse(host_url).map_err(|e| GcpVertexAIError::InvalidUrl(e.to_string()))?; // Determine endpoint based on provider type let endpoint = match provider { @@ -470,10 +470,14 @@ impl GcpVertexAIProvider { /// # Arguments /// * `payload` - The request payload to send /// * `context` - Request context containing model information - async fn post(&self, payload: Value, context: &RequestContext) -> Result { + async fn post( + &self, + payload: &Value, + context: &RequestContext, + ) -> Result { // Try with user-specified location first let result = self - .post_with_location(&payload, context, &self.location) + .post_with_location(payload, context, &self.location) .await; // If location is already the known location for the model or request succeeded, return result @@ -492,7 +496,7 @@ impl GcpVertexAIProvider { "Trying known location {known_location} for {model_name} instead of {configured_location}: {msg}" ); - self.post_with_location(&payload, context, &known_location) + self.post_with_location(payload, context, &known_location) .await } // For any other error, return the original result @@ -609,7 +613,7 @@ impl Provider for GcpVertexAIProvider { let (request, context) = create_request(&self.model, system, messages, tools)?; // Send request and process response - let response = self.post(request.clone(), &context).await?; + let response = self.post(&request, &context).await?; let usage = get_usage(&response, &context)?; emit_debug_trace(&self.model, &request, &response, &usage); diff --git a/crates/goose/src/providers/githubcopilot.rs b/crates/goose/src/providers/githubcopilot.rs index ef7a9fbecc43..245b14801a6d 100644 --- a/crates/goose/src/providers/githubcopilot.rs +++ b/crates/goose/src/providers/githubcopilot.rs @@ -137,7 +137,7 @@ impl GithubCopilotProvider { }) } - async fn post(&self, mut payload: Value) -> Result { + async fn post(&self, payload: &mut Value) -> Result { use crate::providers::utils_universal_openai_stream::{OAIStreamChunk, OAIStreamCollector}; use futures::StreamExt; // Detect gpt-4.1 and stream @@ -159,7 +159,7 @@ impl GithubCopilotProvider { .post(url) .headers(self.get_github_headers()) .header("Authorization", format!("Bearer {}", token)) - .json(&payload) + .json(payload) .send() .await?; if stream_only_model { @@ -408,13 +408,14 @@ impl Provider for GithubCopilotProvider { messages: &[Message], tools: &[Tool], ) -> Result<(Message, ProviderUsage), ProviderError> { - let payload = create_request(&self.model, system, messages, tools, &ImageFormat::OpenAi)?; + let mut payload = + create_request(&self.model, system, messages, tools, &ImageFormat::OpenAi)?; // Make request - let response = self.post(payload.clone()).await?; + let response = self.post(&mut payload).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() diff --git a/crates/goose/src/providers/google.rs b/crates/goose/src/providers/google.rs index dbe9b331d160..2e807bc684ed 100644 --- a/crates/goose/src/providers/google.rs +++ b/crates/goose/src/providers/google.rs @@ -86,7 +86,7 @@ impl GoogleProvider { }) } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let base_url = Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; @@ -178,7 +178,7 @@ impl Provider for GoogleProvider { let payload = create_request(&self.model, system, messages, tools)?; // Make request - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; // Parse response let message = response_to_message(unescape_json_values(&response))?; diff --git a/crates/goose/src/providers/groq.rs b/crates/goose/src/providers/groq.rs index 9c8c5af9fecf..c508d290df33 100644 --- a/crates/goose/src/providers/groq.rs +++ b/crates/goose/src/providers/groq.rs @@ -54,7 +54,7 @@ impl GroqProvider { }) } - async fn post(&self, payload: Value) -> anyhow::Result { + async fn post(&self, payload: &Value) -> anyhow::Result { let base_url = Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; let url = base_url.join("openai/v1/chat/completions").map_err(|e| { @@ -65,7 +65,7 @@ impl GroqProvider { .client .post(url) .header("Authorization", format!("Bearer {}", self.api_key)) - .json(&payload) + .json(payload) .send() .await?; @@ -136,9 +136,9 @@ impl Provider for GroqProvider { &super::utils::ImageFormat::OpenAi, )?; - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() diff --git a/crates/goose/src/providers/litellm.rs b/crates/goose/src/providers/litellm.rs index 591337bac6e7..028731a4a2b6 100644 --- a/crates/goose/src/providers/litellm.rs +++ b/crates/goose/src/providers/litellm.rs @@ -128,7 +128,7 @@ impl LiteLLMProvider { Ok(models) } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let base_url = Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; let url = base_url.join(&self.base_path).map_err(|e| { @@ -142,7 +142,7 @@ impl LiteLLMProvider { let request = self.add_headers(request); - let response = request.json(&payload).send().await?; + let response = request.json(payload).send().await?; handle_response_openai_compat(response).await } @@ -196,9 +196,9 @@ impl Provider for LiteLLMProvider { payload = update_request_for_cache_control(&payload); } - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; - let message = super::formats::openai::response_to_message(response.clone())?; + let message = super::formats::openai::response_to_message(&response)?; let usage = super::formats::openai::get_usage(&response); let model = get_model(&response); emit_debug_trace(&self.model, &payload, &response, &usage); diff --git a/crates/goose/src/providers/ollama.rs b/crates/goose/src/providers/ollama.rs index bd18d593adcc..1503f9e79097 100644 --- a/crates/goose/src/providers/ollama.rs +++ b/crates/goose/src/providers/ollama.rs @@ -56,12 +56,12 @@ impl OllamaProvider { fn get_base_url(&self) -> Result { // OLLAMA_HOST is sometimes just the 'host' or 'host:port' without a scheme let base = if self.host.starts_with("http://") || self.host.starts_with("https://") { - self.host.clone() + &self.host } else { - format!("http://{}", self.host) + &format!("http://{}", self.host) }; - let mut base_url = Url::parse(&base) + let mut base_url = Url::parse(base) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; // Set the default port if missing @@ -80,7 +80,7 @@ impl OllamaProvider { Ok(base_url) } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { // TODO: remove this later when the UI handles provider config refresh let base_url = self.get_base_url()?; @@ -88,7 +88,7 @@ impl OllamaProvider { ProviderError::RequestFailed(format!("Failed to construct endpoint URL: {e}")) })?; - let response = self.client.post(url).json(&payload).send().await?; + let response = self.client.post(url).json(payload).send().await?; handle_response_openai_compat(response).await } @@ -138,8 +138,8 @@ impl Provider for OllamaProvider { filtered_tools, &super::utils::ImageFormat::OpenAi, )?; - let response = self.post(payload.clone()).await?; - let message = response_to_message(response.clone())?; + let response = self.post(&payload).await?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); diff --git a/crates/goose/src/providers/openai.rs b/crates/goose/src/providers/openai.rs index 767a8f47913a..4280ffd266cf 100644 --- a/crates/goose/src/providers/openai.rs +++ b/crates/goose/src/providers/openai.rs @@ -113,7 +113,7 @@ impl OpenAiProvider { request } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let base_url = url::Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; let url = base_url.join(&self.base_path).map_err(|e| { @@ -178,10 +178,10 @@ impl Provider for OpenAiProvider { let payload = create_request(&self.model, system, messages, tools, &ImageFormat::OpenAi)?; // Make request - let response = handle_response_openai_compat(self.post(payload.clone()).await?).await?; + let response = handle_response_openai_compat(self.post(&payload).await?).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() @@ -258,7 +258,7 @@ impl Provider for OpenAiProvider { "include_usage": true, }); - let response = handle_status_openai_compat(self.post(payload.clone()).await?).await?; + let response = handle_status_openai_compat(self.post(&payload).await?).await?; let stream = response.bytes_stream().map_err(io::Error::other); diff --git a/crates/goose/src/providers/openrouter.rs b/crates/goose/src/providers/openrouter.rs index 782e5adf9210..21fd1e5e3b3d 100644 --- a/crates/goose/src/providers/openrouter.rs +++ b/crates/goose/src/providers/openrouter.rs @@ -65,7 +65,7 @@ impl OpenRouterProvider { }) } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let base_url = Url::parse(&self.host) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; let url = base_url.join("api/v1/chat/completions").map_err(|e| { @@ -79,12 +79,12 @@ impl OpenRouterProvider { .header("Authorization", format!("Bearer {}", self.api_key)) .header("HTTP-Referer", "https://block.github.io/goose") .header("X-Title", "Goose") - .json(&payload) + .json(payload) .send() .await?; // Handle Google-compatible model responses differently - if is_google_model(&payload) { + if is_google_model(payload) { return handle_response_google_compat(response).await; } @@ -259,10 +259,10 @@ impl Provider for OpenRouterProvider { let payload = create_request_based_on_model(self, system, messages, tools)?; // Make request - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default() diff --git a/crates/goose/src/providers/snowflake.rs b/crates/goose/src/providers/snowflake.rs index 54309ec4e54a..0ab6be58588b 100644 --- a/crates/goose/src/providers/snowflake.rs +++ b/crates/goose/src/providers/snowflake.rs @@ -108,7 +108,7 @@ impl SnowflakeProvider { } } - async fn post(&self, payload: Value) -> Result { + async fn post(&self, payload: &Value) -> Result { let base_url_str = if !self.host.starts_with("https://") && !self.host.starts_with("http://") { format!("https://{}", self.host) @@ -318,7 +318,7 @@ impl SnowflakeProvider { .unwrap_or_else(|| "Invalid credentials".to_string()); Err(ProviderError::Authentication(format!( - "Authentication failed. Please check your SNOWFLAKE_TOKEN and SNOWFLAKE_HOST configuration. Error: {}", + "Authentication failed. Please check your SNOWFLAKE_TOKEN and SNOWFLAKE_HOST configuration. Error: {}", error_msg ))) } @@ -426,10 +426,10 @@ impl Provider for SnowflakeProvider { ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(&self.model, system, messages, tools)?; - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; // Parse response - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = get_usage(&response)?; let model = get_model(&response); super::utils::emit_debug_trace(&self.model, &payload, &response, &usage); diff --git a/crates/goose/src/providers/toolshim.rs b/crates/goose/src/providers/toolshim.rs index f07f655667ac..e1bec2239f40 100644 --- a/crates/goose/src/providers/toolshim.rs +++ b/crates/goose/src/providers/toolshim.rs @@ -89,12 +89,12 @@ impl OllamaInterpreter { // Format the URL correctly with http:// prefix if needed let base = if host.starts_with("http://") || host.starts_with("https://") { - host.clone() + &host } else { - format!("http://{}", host) + &format!("http://{}", host) }; - let mut base_url = url::Url::parse(&base) + let mut base_url = url::Url::parse(base) .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; // Set the default port if missing diff --git a/crates/goose/src/providers/xai.rs b/crates/goose/src/providers/xai.rs index cdaebdc0b1b8..9904531a3e01 100644 --- a/crates/goose/src/providers/xai.rs +++ b/crates/goose/src/providers/xai.rs @@ -72,7 +72,7 @@ impl XaiProvider { }) } - async fn post(&self, payload: Value) -> anyhow::Result { + async fn post(&self, payload: &Value) -> anyhow::Result { // Ensure the host ends with a slash for proper URL joining let host = if self.host.ends_with('/') { self.host.clone() @@ -163,9 +163,9 @@ impl Provider for XaiProvider { &super::utils::ImageFormat::OpenAi, )?; - let response = self.post(payload.clone()).await?; + let response = self.post(&payload).await?; - let message = response_to_message(response.clone())?; + let message = response_to_message(&response)?; let usage = response.get("usage").map(get_usage).unwrap_or_else(|| { tracing::debug!("Failed to get usage data"); Usage::default()