-
Notifications
You must be signed in to change notification settings - Fork 2.4k
Claude 3.7 is out. we had some harcoded stuff #6197
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
78a8c55
8f78b15
805a062
474755b
d505ad6
88a7112
bf25242
26c2627
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -394,9 +394,18 @@ pub fn create_request( | |||||||||
| return Err(anyhow!("No valid messages to send to Anthropic API")); | ||||||||||
| } | ||||||||||
|
|
||||||||||
| // https://docs.anthropic.com/en/docs/about-claude/models/all-models#model-comparison-table | ||||||||||
| // Claude 3.7 supports max output tokens up to 8192 | ||||||||||
| let max_tokens = model_config.max_tokens.unwrap_or(8192); | ||||||||||
| // https://platform.claude.com/docs/en/about-claude/models/overview | ||||||||||
| // 64k output tokens works for most claude models, but not old opus: | ||||||||||
| let max_tokens = model_config.max_tokens.unwrap_or_else(|| { | ||||||||||
| let name = &model_config.model_name; | ||||||||||
| if name.contains("claude-3-haiku") { | ||||||||||
| 4096 | ||||||||||
| } else if name.contains("claude-opus-4-0") || name.contains("claude-opus-4-1") { | ||||||||||
| 32000 | ||||||||||
| } else { | ||||||||||
| 64000 | ||||||||||
| } | ||||||||||
| }); | ||||||||||
| let mut payload = json!({ | ||||||||||
| "model": model_config.model_name, | ||||||||||
| "messages": anthropic_messages, | ||||||||||
|
|
@@ -421,18 +430,15 @@ pub fn create_request( | |||||||||
|
|
||||||||||
| // Add temperature if specified and not using extended thinking model | ||||||||||
| if let Some(temp) = model_config.temperature { | ||||||||||
| // Claude 3.7 models with thinking enabled don't support temperature | ||||||||||
| if !model_config.model_name.starts_with("claude-3-7-sonnet-") { | ||||||||||
| payload | ||||||||||
| .as_object_mut() | ||||||||||
| .unwrap() | ||||||||||
| .insert("temperature".to_string(), json!(temp)); | ||||||||||
| } | ||||||||||
| payload | ||||||||||
| .as_object_mut() | ||||||||||
| .unwrap() | ||||||||||
| .insert("temperature".to_string(), json!(temp)); | ||||||||||
| } | ||||||||||
|
|
||||||||||
| // Add thinking parameters for claude-3-7-sonnet model | ||||||||||
|
||||||||||
| // Add thinking parameters for claude-3-7-sonnet model | |
| // Add thinking parameters for all models when CLAUDE_THINKING_ENABLED is set |
Copilot
AI
Jan 6, 2026
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This comment still mentions "claude-3-7-sonnet model" but the thinking parameters are now applied to all models when CLAUDE_THINKING_ENABLED is set. Update the comment to reflect that thinking is no longer model-specific.
| // Add thinking parameters for claude-3-7-sonnet model | |
| // Add thinking parameters when CLAUDE_THINKING_ENABLED is set (applies to all models) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Temperature is now always added regardless of model when specified in model_config. The previous code excluded temperature for claude-3-7-sonnet models with thinking enabled. If users can still specify claude-3-7-sonnet models (even though they're removed from the known models list), this could cause API errors. Consider either: (1) explicitly blocking claude-3-7 model names, or (2) keeping the temperature exclusion for backward compatibility.