Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions crates/goose/src/providers/formats/databricks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -515,11 +515,13 @@ pub fn create_request(
let model_name = model_config.model_name.to_string();
let is_o1 = model_name.starts_with("o1") || model_name.starts_with("goose-o1");
let is_o3 = model_name.starts_with("o3") || model_name.starts_with("goose-o3");
let is_gpt_5 = model_name.starts_with("gpt-5") || model_name.starts_with("goose-gpt-5");
let is_openai_reasoning_model = is_o1 || is_o3 || is_gpt_5;
let is_claude_sonnet =
model_name.contains("claude-3-7-sonnet") || model_name.contains("claude-4-sonnet"); // can be goose- or databricks-

// Only extract reasoning effort for O1/O3 models
let (model_name, reasoning_effort) = if is_o1 || is_o3 {
let (model_name, reasoning_effort) = if is_openai_reasoning_model {
let parts: Vec<&str> = model_config.model_name.split('-').collect();
let last_part = parts.last().unwrap();

Expand All @@ -539,7 +541,7 @@ pub fn create_request(
};

let system_message = DatabricksMessage {
role: if is_o1 || is_o3 {
role: if is_openai_reasoning_model {
"developer"
} else {
"system"
Expand Down Expand Up @@ -613,8 +615,8 @@ pub fn create_request(
.unwrap()
.insert("temperature".to_string(), json!(2));
} else {
// o1, o3 models currently don't support temperature
if !is_o1 && !is_o3 {
// open ai reasoning models currently don't support temperature
if !is_openai_reasoning_model {
if let Some(temp) = model_config.temperature {
payload
.as_object_mut()
Expand All @@ -623,9 +625,9 @@ pub fn create_request(
}
}

// o1 models use max_completion_tokens instead of max_tokens
// open ai reasoning models use max_completion_tokens instead of max_tokens
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we need this at all? in fact shouldn't we just check whether we have max_tokens in there and if not check if there is max_completion_tokens in there?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

By reading the open ai doc here, max_token is deprecated.

We may have a separate PR to use max_completion_tokens and test it.

if let Some(tokens) = model_config.max_tokens {
let key = if is_o1 || is_o3 {
let key = if is_openai_reasoning_model {
"max_completion_tokens"
} else {
"max_tokens"
Expand Down
Loading