Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions crates/goose/src/providers/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -386,18 +386,15 @@ pub trait Provider: Send + Sync {
RetryConfig::default()
}

/// Optional hook to fetch supported models.
async fn fetch_supported_models(&self) -> Result<Option<Vec<String>>, ProviderError> {
Ok(None)
}

/// Check if this provider supports embeddings
fn supports_embeddings(&self) -> bool {
false
}

/// Check if this provider supports cache control
fn supports_cache_control(&self) -> bool {
async fn supports_cache_control(&self) -> bool {
false
}

Expand Down
8 changes: 3 additions & 5 deletions crates/goose/src/providers/litellm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ impl Provider for LiteLLMProvider {
&ImageFormat::OpenAi,
)?;

if self.supports_cache_control() {
if self.supports_cache_control().await {
payload = update_request_for_cache_control(&payload);
}

Expand All @@ -197,10 +197,8 @@ impl Provider for LiteLLMProvider {
true
}

fn supports_cache_control(&self) -> bool {
if let Ok(models) = tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(self.fetch_models())
}) {
async fn supports_cache_control(&self) -> bool {
if let Ok(models) = self.fetch_models().await {
if let Some(model_info) = models.iter().find(|m| m.name == self.model.model_name) {
return model_info.supports_cache_control.unwrap_or(false);
}
Expand Down
1 change: 1 addition & 0 deletions crates/goose/src/providers/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ impl Provider for OpenAiProvider {
tracing::debug!("Failed to get usage data");
Usage::default()
});

let model = get_model(&json_response);
log.write(&json_response, Some(&usage))?;
Ok((message, ProviderUsage::new(model, usage)))
Expand Down
9 changes: 4 additions & 5 deletions crates/goose/src/providers/openrouter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ fn update_request_for_anthropic(original_payload: &Value) -> Value {
payload
}

fn create_request_based_on_model(
async fn create_request_based_on_model(
provider: &OpenRouterProvider,
system: &str,
messages: &[Message],
Expand All @@ -207,7 +207,7 @@ fn create_request_based_on_model(
&super::utils::ImageFormat::OpenAi,
)?;

if provider.supports_cache_control() {
if provider.supports_cache_control().await {
payload = update_request_for_anthropic(&payload);
}

Expand Down Expand Up @@ -257,8 +257,7 @@ impl Provider for OpenRouterProvider {
messages: &[Message],
tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
// Create the base payload
let payload = create_request_based_on_model(self, system, messages, tools)?;
let payload = create_request_based_on_model(self, system, messages, tools).await?;
let mut log = RequestLog::start(model_config, &payload)?;

// Make request
Expand Down Expand Up @@ -357,7 +356,7 @@ impl Provider for OpenRouterProvider {
Ok(Some(models))
}

fn supports_cache_control(&self) -> bool {
async fn supports_cache_control(&self) -> bool {
self.model
.model_name
.starts_with(OPENROUTER_MODEL_PREFIX_ANTHROPIC)
Expand Down
1 change: 0 additions & 1 deletion crates/goose/src/providers/tetrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,6 @@ impl Provider for TetrateProvider {
messages: &[Message],
tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
// Create the base payload using the provided model_config
let payload = create_request(
model_config,
system,
Expand Down
Loading