diff --git a/.github/workflows/update-contributors.yml b/.github/workflows/update-contributors.yml index c3c9327607b6..5709bdc10a09 100644 --- a/.github/workflows/update-contributors.yml +++ b/.github/workflows/update-contributors.yml @@ -1,46 +1,67 @@ -name: Update Contributors +name: Update Contributors # Refresh contrib.rocks image cache on: - push: - branches: - - main workflow_dispatch: +permissions: + contents: write + pull-requests: write + jobs: - update-contributors: + refresh-contrib-cache: runs-on: ubuntu-latest - permissions: - contents: write # Needed for pushing changes. - pull-requests: write # Needed for creating PRs. steps: - - name: Checkout code + - name: Checkout uses: actions/checkout@v4 - - name: Setup Node.js and pnpm - uses: ./.github/actions/setup-node-pnpm - - name: Disable Husky + + - name: Bump cacheBust in all README files run: | - echo "HUSKY=0" >> $GITHUB_ENV - git config --global core.hooksPath /dev/null - - name: Update contributors and format + set -euo pipefail + TS="$(date +%s)" + # Target only the root README.md and localized READMEs under locales/*/README.md + mapfile -t FILES < <(git ls-files README.md 'locales/*/README.md' || true) + + if [ "${#FILES[@]}" -eq 0 ]; then + echo "No target README files found." >&2 + exit 1 + fi + + UPDATED=0 + for f in "${FILES[@]}"; do + if grep -q 'cacheBust=' "$f"; then + # Use portable sed in GNU environment of ubuntu-latest + sed -i -E "s/cacheBust=[0-9]+/cacheBust=${TS}/g" "$f" + echo "Updated cacheBust in $f" + UPDATED=1 + else + echo "Warning: cacheBust parameter not found in $f" >&2 + fi + done + + if [ "$UPDATED" -eq 0 ]; then + echo "No files were updated. Ensure READMEs embed contrib.rocks with cacheBust param." >&2 + exit 1 + fi + + - name: Detect changes + id: changes run: | - pnpm update-contributors - npx prettier --write README.md locales/*/README.md - if git diff --quiet; then echo "changes=false" >> $GITHUB_OUTPUT; else echo "changes=true" >> $GITHUB_OUTPUT; fi - id: check-changes - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if git diff --quiet; then + echo "changed=false" >> $GITHUB_OUTPUT + else + echo "changed=true" >> $GITHUB_OUTPUT + fi + - name: Create Pull Request - if: steps.check-changes.outputs.changes == 'true' + if: steps.changes.outputs.changed == 'true' uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.GITHUB_TOKEN }} commit-message: "docs: update contributors list [skip ci]" committer: "github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>" - branch: update-contributors + branch: refresh-contrib-cache delete-branch: true - title: "Update contributors list" + title: "Refresh contrib.rocks image cache (all READMEs)" body: | - Automated update of contributors list and related files - - This PR was created automatically by a GitHub Action workflow and includes all changed files. + Automated refresh of the contrib.rocks image cache by bumping the cacheBust parameter in README.md and locales/*/README.md. base: main diff --git a/apps/web-roo-code/src/components/homepage/testimonials.tsx b/apps/web-roo-code/src/components/homepage/testimonials.tsx index b3d4856c0c6d..faa1d2130e51 100644 --- a/apps/web-roo-code/src/components/homepage/testimonials.tsx +++ b/apps/web-roo-code/src/components/homepage/testimonials.tsx @@ -4,45 +4,114 @@ import { useRef, useCallback, useEffect } from "react" import { motion } from "framer-motion" import useEmblaCarousel from "embla-carousel-react" import AutoPlay from "embla-carousel-autoplay" -import { ChevronLeft, ChevronRight } from "lucide-react" +import { ChevronLeft, ChevronRight, Star } from "lucide-react" export interface Testimonial { - id: number name: string role: string - company: string - image?: string + origin: string quote: string + image?: string + stars?: number } export const testimonials: Testimonial[] = [ { - id: 1, name: "Luca", role: "Reviewer", - company: "VS Code Marketplace", + origin: "VS Code Marketplace", quote: "Costrict is an absolute game-changer! 🚀 It makes coding faster, easier, and more intuitive with its smart AI-powered suggestions, real-time debugging, and automation features. The seamless integration with VS Code is a huge plus, and the constant updates ensure it keeps getting better", + stars: 5, }, { - id: 2, name: "Taro Woollett-Chiba", role: "AI Product Lead", - company: "Vendidit", + origin: "Vendidit", quote: "Easily the best AI code editor. Costrict has the best features and capabilities, along with the best development team. I swear, they're the fastest to support new models and implement useful functionality whenever users mention it... simply amazing.", }, { - id: 3, name: "Can Nuri", role: "Reviewer", - company: "VS Code Marketplace", + origin: "VS Code Marketplace", quote: "Costrict is one of the most inspiring projects I have seen for a long time. It shapes the way I think and deal with software development.", + stars: 5, }, { - id: 4, name: "Michael", role: "Reviewer", - company: "VS Code Marketplace", + origin: "VS Code Marketplace", quote: "I switched from Windsurf to Costrict in January and honestly, it's been a huge upgrade. Windsurf kept making mistakes and being dumb when I ask it for things. Costrict just gets it. Projects that used to take a full day now wrap up before lunch. ", + stars: 5, + }, + { + name: "Darien Hardin", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "By far the best coding tool I have used. Looking forward to where this goes in the future. Also, their Discord is an excellent resource with many knowledgeable users sharing their discoveries.", + stars: 5, + }, + { + name: "Wiliam Azzam", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "I've tried Cursor, Windsurf, Cline, Trae and others, and although using RooCode with OpenRouter is more expensive, it is also far more effective. Its agents and initial setup, and learning how to use Code/Architect/Orchestrator, help a great deal in developing quality projects.", + stars: 5, + }, + { + name: "Matěj Zapletal", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "Definitely the best AI coding agent extension.", + stars: 5, + }, + { + name: "Ali Davachi", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "We tried the rest, now we are using the best. The alternatives are more restrictive. I didn't use competitors for a reason. This team is killing it.", + stars: 5, + }, + { + name: "Ryan Booth", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "I work inside Costrict about 60+ hours a week and usually Costrict is building something at all hours of the day. An amazing tool by an amazing team!", + stars: 5, + }, + { + name: "Matthew Martin", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "i spent a fortune trying to dial in various tools to get them to work the way i want, and then i found roocode. customizable for your flavors on your terms. this is what i always wanted.", + stars: 5, + }, + { + name: "Edwin Jacques", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "The BEST. Super fast, no-nonsense, UI that makes sense, many API provider choices, responsive, helpful developer community.", + stars: 5, + }, + { + name: "Sean McCann", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "Costrict is impressively capable while staying refreshingly simple. It integrates seamlessly into VS Code and handles everything from generating code to refactoring with accuracy and speed. It feels like a natural part of the workflow—no clutter, just results. Extra points for the flexibility of the different agents and the ability to customize them to fit the job.", + stars: 5, + }, + { + name: "Colin Tate", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "Absolutely amazing extension. I had tried Cursor previously, and this just beats it hands down. I've used it for several large projects now, and it is now my go-to for creating things that would normally take weeks or months. Highly recommended.", + stars: 5, + }, + { + name: "Michael Scott", + role: "Reviewer", + origin: "VS Code Marketplace", + quote: "I've used all the IDEs and all the assistants - Costrict is hands down the best of them. It's also one of the few that lets you bring your own API keys - no subscriptions required, just pay as you need/go! Fantastic team and support as well!", + stars: 5, }, ] @@ -58,8 +127,8 @@ export function Testimonials() { [ AutoPlay({ playOnInit: true, - delay: 4000, - stopOnInteraction: true, + delay: 3_500, + stopOnInteraction: false, stopOnMouseEnter: true, stopOnFocusIn: true, }), @@ -122,17 +191,17 @@ export function Testimonials() {
-
+

- AI-forward developers are using Roo Code + Developers really shipping with AI are using Costrict

- Join more than 800k people revolutionizing their workflow worldwide + Join more than 1M people revolutionizing their workflow worldwide

@@ -148,27 +217,27 @@ export function Testimonials() { {/* Next Button */} {/* Gradient Overlays */} -
-
+
+
{/* Embla Carousel Container */}
{testimonials.map((testimonial) => (
+ key={testimonial.name} + className="relative min-w-0 flex-[0_0_85%] px-2 md:flex-[0_0_70%] md:px-4 lg:flex-[0_0_30%]">
-
+
-

+

{testimonial.quote}

@@ -178,7 +247,15 @@ export function Testimonials() { {testimonial.name}

- {testimonial.role} at {testimonial.company} + {testimonial.role} at {testimonial.origin} + {testimonial.stars && ( + + {" "} + {Array.from({ length: testimonial.stars }, (_, i) => ( + + ))} + + )}

diff --git a/packages/types/src/providers/anthropic.ts b/packages/types/src/providers/anthropic.ts index 2cb38537a4d6..759ebdea5558 100644 --- a/packages/types/src/providers/anthropic.ts +++ b/packages/types/src/providers/anthropic.ts @@ -6,6 +6,28 @@ export type AnthropicModelId = keyof typeof anthropicModels export const anthropicDefaultModelId: AnthropicModelId = "claude-sonnet-4-20250514" export const anthropicModels = { + "claude-sonnet-4-5": { + maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false. + contextWindow: 200_000, // Default 200K, extendable to 1M with beta flag 'context-1m-2025-08-07' + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + inputPrice: 3.0, // $3 per million input tokens (≤200K context) + outputPrice: 15.0, // $15 per million output tokens (≤200K context) + cacheWritesPrice: 3.75, // $3.75 per million tokens + cacheReadsPrice: 0.3, // $0.30 per million tokens + supportsReasoningBudget: true, + // Tiered pricing for extended context (requires beta flag 'context-1m-2025-08-07') + tiers: [ + { + contextWindow: 1_000_000, // 1M tokens with beta flag + inputPrice: 6.0, // $6 per million input tokens (>200K context) + outputPrice: 22.5, // $22.50 per million output tokens (>200K context) + cacheWritesPrice: 7.5, // $7.50 per million tokens (>200K context) + cacheReadsPrice: 0.6, // $0.60 per million tokens (>200K context) + }, + ], + }, "claude-sonnet-4-20250514": { maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false. contextWindow: 200_000, // Default 200K, extendable to 1M with beta flag 'context-1m-2025-08-07' diff --git a/packages/types/src/providers/bedrock.ts b/packages/types/src/providers/bedrock.ts index 67215e77968b..6b8045911879 100644 --- a/packages/types/src/providers/bedrock.ts +++ b/packages/types/src/providers/bedrock.ts @@ -13,6 +13,21 @@ export const bedrockDefaultPromptRouterModelId: BedrockModelId = "anthropic.clau // of the default prompt routers AWS enabled for GA of the promot router // feature. export const bedrockModels = { + "anthropic.claude-sonnet-4-5-20250929-v1:0": { + maxTokens: 8192, + contextWindow: 200_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + supportsReasoningBudget: true, + inputPrice: 3.0, + outputPrice: 15.0, + cacheWritesPrice: 3.75, + cacheReadsPrice: 0.3, + minTokensPerCachePoint: 1024, + maxCachePoints: 4, + cachableFields: ["system", "messages", "tools"], + }, "amazon.nova-pro-v1:0": { maxTokens: 5000, contextWindow: 300_000, @@ -443,3 +458,7 @@ export const BEDROCK_REGIONS = [ ].sort((a, b) => a.value.localeCompare(b.value)) export const BEDROCK_CLAUDE_SONNET_4_MODEL_ID = "anthropic.claude-sonnet-4-20250514-v1:0" +export const BEDROCK_1M_CONTEXT_MODEL_IDS = [ + "anthropic.claude-sonnet-4-20250514-v1:0", + "anthropic.claude-sonnet-4-5-20250929-v1:0", +] as const diff --git a/packages/types/src/providers/claude-code.ts b/packages/types/src/providers/claude-code.ts index d9b658319a49..a00066d87f5f 100644 --- a/packages/types/src/providers/claude-code.ts +++ b/packages/types/src/providers/claude-code.ts @@ -40,6 +40,14 @@ export function getClaudeCodeModelId(baseModelId: ClaudeCodeModelId, useVertex = } export const claudeCodeModels = { + "claude-sonnet-4-5": { + ...anthropicModels["claude-sonnet-4-5"], + supportsImages: false, + supportsPromptCache: true, // Claude Code does report cache tokens + supportsReasoningEffort: false, + supportsReasoningBudget: false, + requiredReasoningBudget: false, + }, "claude-sonnet-4-20250514": { ...anthropicModels["claude-sonnet-4-20250514"], supportsImages: false, diff --git a/packages/types/src/providers/lite-llm.ts b/packages/types/src/providers/lite-llm.ts index fdfef95bc62d..715b279c36b5 100644 --- a/packages/types/src/providers/lite-llm.ts +++ b/packages/types/src/providers/lite-llm.ts @@ -30,6 +30,7 @@ export const LITELLM_COMPUTER_USE_MODELS = new Set([ "vertex_ai/claude-opus-4-1@20250805", "vertex_ai/claude-opus-4@20250514", "vertex_ai/claude-sonnet-4@20250514", + "vertex_ai/claude-sonnet-4-5@20250929", "openrouter/anthropic/claude-3.5-sonnet", "openrouter/anthropic/claude-3.5-sonnet:beta", "openrouter/anthropic/claude-3.7-sonnet", diff --git a/packages/types/src/providers/openrouter.ts b/packages/types/src/providers/openrouter.ts index 51d096130bd1..dde6730e52e0 100644 --- a/packages/types/src/providers/openrouter.ts +++ b/packages/types/src/providers/openrouter.ts @@ -38,6 +38,7 @@ export const OPEN_ROUTER_PROMPT_CACHING_MODELS = new Set([ "anthropic/claude-3.7-sonnet:beta", "anthropic/claude-3.7-sonnet:thinking", "anthropic/claude-sonnet-4", + "anthropic/claude-sonnet-4.5", "anthropic/claude-opus-4", "anthropic/claude-opus-4.1", "google/gemini-2.5-flash-preview", @@ -59,6 +60,7 @@ export const OPEN_ROUTER_COMPUTER_USE_MODELS = new Set([ "anthropic/claude-3.7-sonnet:beta", "anthropic/claude-3.7-sonnet:thinking", "anthropic/claude-sonnet-4", + "anthropic/claude-sonnet-4.5", "anthropic/claude-opus-4", "anthropic/claude-opus-4.1", ]) @@ -81,6 +83,7 @@ export const OPEN_ROUTER_REASONING_BUDGET_MODELS = new Set([ "anthropic/claude-opus-4", "anthropic/claude-opus-4.1", "anthropic/claude-sonnet-4", + "anthropic/claude-sonnet-4.5", "google/gemini-2.5-pro-preview", "google/gemini-2.5-pro", "google/gemini-2.5-flash-preview-05-20", diff --git a/packages/types/src/providers/vertex.ts b/packages/types/src/providers/vertex.ts index 8010fccf8e2f..f03d0b58052e 100644 --- a/packages/types/src/providers/vertex.ts +++ b/packages/types/src/providers/vertex.ts @@ -3,7 +3,7 @@ import type { ModelInfo } from "../model.js" // https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude export type VertexModelId = keyof typeof vertexModels -export const vertexDefaultModelId: VertexModelId = "claude-sonnet-4@20250514" +export const vertexDefaultModelId: VertexModelId = "claude-sonnet-4-5@20250929" export const vertexModels = { "gemini-2.5-flash-preview-05-20:thinking": { @@ -175,6 +175,18 @@ export const vertexModels = { cacheReadsPrice: 0.3, supportsReasoningBudget: true, }, + "claude-sonnet-4-5@20250929": { + maxTokens: 8192, + contextWindow: 200_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + inputPrice: 3.0, + outputPrice: 15.0, + cacheWritesPrice: 3.75, + cacheReadsPrice: 0.3, + supportsReasoningBudget: true, + }, "claude-opus-4-1@20250805": { maxTokens: 8192, contextWindow: 200_000, diff --git a/releases/3.28.10-release.png b/releases/3.28.10-release.png new file mode 100644 index 000000000000..97f6cd429f44 Binary files /dev/null and b/releases/3.28.10-release.png differ diff --git a/src/api/providers/__tests__/anthropic.spec.ts b/src/api/providers/__tests__/anthropic.spec.ts index b1d0a2f6b35b..b05e50125b80 100644 --- a/src/api/providers/__tests__/anthropic.spec.ts +++ b/src/api/providers/__tests__/anthropic.spec.ts @@ -264,5 +264,29 @@ describe("AnthropicHandler", () => { expect(result.reasoningBudget).toBeUndefined() expect(result.temperature).toBe(0) }) + + it("should handle Claude 4.5 Sonnet model correctly", () => { + const handler = new AnthropicHandler({ + apiKey: "test-api-key", + apiModelId: "claude-sonnet-4-5", + }) + const model = handler.getModel() + expect(model.id).toBe("claude-sonnet-4-5") + expect(model.info.maxTokens).toBe(64000) + expect(model.info.contextWindow).toBe(200000) + expect(model.info.supportsReasoningBudget).toBe(true) + }) + + it("should enable 1M context for Claude 4.5 Sonnet when beta flag is set", () => { + const handler = new AnthropicHandler({ + apiKey: "test-api-key", + apiModelId: "claude-sonnet-4-5", + anthropicBeta1MContext: true, + }) + const model = handler.getModel() + expect(model.info.contextWindow).toBe(1000000) + expect(model.info.inputPrice).toBe(6.0) + expect(model.info.outputPrice).toBe(22.5) + }) }) }) diff --git a/src/api/providers/__tests__/bedrock-reasoning.spec.ts b/src/api/providers/__tests__/bedrock-reasoning.spec.ts index f8d9beb0eb7e..abf73ff8e974 100644 --- a/src/api/providers/__tests__/bedrock-reasoning.spec.ts +++ b/src/api/providers/__tests__/bedrock-reasoning.spec.ts @@ -182,7 +182,7 @@ describe("AwsBedrockHandler - Extended Thinking", () => { ) }) - it("should include topP when thinking is disabled", async () => { + it("should not include topP when thinking is disabled (global removal)", async () => { handler = new AwsBedrockHandler({ apiProvider: "bedrock", apiModelId: "anthropic.claude-3-7-sonnet-20250219-v1:0", @@ -216,10 +216,10 @@ describe("AwsBedrockHandler - Extended Thinking", () => { chunks.push(chunk) } - // Verify that topP IS present when thinking is disabled + // Verify that topP is NOT present for any model (removed globally) expect(mockSend).toHaveBeenCalledTimes(1) expect(capturedPayload).toBeDefined() - expect(capturedPayload.inferenceConfig).toHaveProperty("topP", 0.1) + expect(capturedPayload.inferenceConfig).not.toHaveProperty("topP") // Verify that additionalModelRequestFields is not present or empty expect(capturedPayload.additionalModelRequestFields).toBeUndefined() diff --git a/src/api/providers/anthropic.ts b/src/api/providers/anthropic.ts index cb48492b6067..3fb60c0e4fd1 100644 --- a/src/api/providers/anthropic.ts +++ b/src/api/providers/anthropic.ts @@ -45,12 +45,16 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa const cacheControl: CacheControlEphemeral = { type: "ephemeral" } let { id: modelId, betas = [], maxTokens, temperature, reasoning: thinking } = this.getModel() - // Add 1M context beta flag if enabled for Claude Sonnet 4 - if (modelId === "claude-sonnet-4-20250514" && this.options.anthropicBeta1MContext) { + // Add 1M context beta flag if enabled for Claude Sonnet 4 and 4.5 + if ( + (modelId === "claude-sonnet-4-20250514" || modelId === "claude-sonnet-4-5") && + this.options.anthropicBeta1MContext + ) { betas.push("context-1m-2025-08-07") } switch (modelId) { + case "claude-sonnet-4-5": case "claude-sonnet-4-20250514": case "claude-opus-4-1-20250805": case "claude-opus-4-20250514": @@ -110,6 +114,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa // Then check for models that support prompt caching switch (modelId) { + case "claude-sonnet-4-5": case "claude-sonnet-4-20250514": case "claude-opus-4-1-20250805": case "claude-opus-4-20250514": @@ -243,8 +248,8 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa let id = modelId && modelId in anthropicModels ? (modelId as AnthropicModelId) : anthropicDefaultModelId let info: ModelInfo = anthropicModels[id] - // If 1M context beta is enabled for Claude Sonnet 4, update the model info - if (id === "claude-sonnet-4-20250514" && this.options.anthropicBeta1MContext) { + // If 1M context beta is enabled for Claude Sonnet 4 or 4.5, update the model info + if ((id === "claude-sonnet-4-20250514" || id === "claude-sonnet-4-5") && this.options.anthropicBeta1MContext) { // Use the tier pricing for 1M context const tier = info.tiers?.[0] if (tier) { diff --git a/src/api/providers/bedrock.ts b/src/api/providers/bedrock.ts index be1e04f79db8..66432196aa88 100644 --- a/src/api/providers/bedrock.ts +++ b/src/api/providers/bedrock.ts @@ -22,6 +22,7 @@ import { BEDROCK_DEFAULT_CONTEXT, AWS_INFERENCE_PROFILE_MAPPING, BEDROCK_CLAUDE_SONNET_4_MODEL_ID, + BEDROCK_1M_CONTEXT_MODEL_IDS, } from "@roo-code/types" import { ApiStream } from "../transform/stream" @@ -44,7 +45,6 @@ import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from ". interface BedrockInferenceConfig { maxTokens: number temperature?: number - topP?: number } // Define interface for Bedrock additional model request fields @@ -374,14 +374,11 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH temperature: modelConfig.temperature ?? (this.options.modelTemperature as number), } - if (!thinkingEnabled) { - inferenceConfig.topP = 0.1 - } - // Check if 1M context is enabled for Claude Sonnet 4 // Use parseBaseModelId to handle cross-region inference prefixes const baseModelId = this.parseBaseModelId(modelConfig.id) - const is1MContextEnabled = baseModelId === BEDROCK_CLAUDE_SONNET_4_MODEL_ID && this.options.awsBedrock1MContext + const is1MContextEnabled = + BEDROCK_1M_CONTEXT_MODEL_IDS.includes(baseModelId as any) && this.options.awsBedrock1MContext // Add anthropic_beta for 1M context to additionalModelRequestFields if (is1MContextEnabled) { @@ -647,7 +644,6 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH const inferenceConfig: BedrockInferenceConfig = { maxTokens: modelConfig.maxTokens || (modelConfig.info.maxTokens as number), temperature: modelConfig.temperature ?? (this.options.modelTemperature as number), - ...(thinkingEnabled ? {} : { topP: 0.1 }), // Only set topP when thinking is NOT enabled } // For completePrompt, use a unique conversation ID based on the prompt @@ -976,10 +972,10 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH } } - // Check if 1M context is enabled for Claude Sonnet 4 + // Check if 1M context is enabled for Claude Sonnet 4 / 4.5 // Use parseBaseModelId to handle cross-region inference prefixes const baseModelId = this.parseBaseModelId(modelConfig.id) - if (baseModelId === BEDROCK_CLAUDE_SONNET_4_MODEL_ID && this.options.awsBedrock1MContext) { + if (BEDROCK_1M_CONTEXT_MODEL_IDS.includes(baseModelId as any) && this.options.awsBedrock1MContext) { // Update context window to 1M tokens when 1M context beta is enabled modelConfig.info = { ...modelConfig.info, diff --git a/src/api/providers/fetchers/__tests__/openrouter.spec.ts b/src/api/providers/fetchers/__tests__/openrouter.spec.ts index adac95f728c6..667e0c7fb8b6 100644 --- a/src/api/providers/fetchers/__tests__/openrouter.spec.ts +++ b/src/api/providers/fetchers/__tests__/openrouter.spec.ts @@ -35,6 +35,7 @@ describe.skip("OpenRouter API", () => { "google/gemini-2.5-flash", // OpenRouter doesn't report this as supporting prompt caching "google/gemini-2.5-flash-lite-preview-06-17", // OpenRouter doesn't report this as supporting prompt caching "anthropic/claude-opus-4.1", // Not yet available in OpenRouter API + "anthropic/claude-sonnet-4.5", // Not yet available in OpenRouter API ]) const ourCachingModels = Array.from(OPEN_ROUTER_PROMPT_CACHING_MODELS).filter( @@ -53,6 +54,7 @@ describe.skip("OpenRouter API", () => { const excludedComputerUseModels = new Set([ "anthropic/claude-opus-4.1", // Not yet available in OpenRouter API + "anthropic/claude-sonnet-4.5", // Not yet available in OpenRouter API ]) const expectedComputerUseModels = Array.from(OPEN_ROUTER_COMPUTER_USE_MODELS) @@ -135,6 +137,7 @@ describe.skip("OpenRouter API", () => { "google/gemini-2.5-flash-lite-preview-06-17", "google/gemini-2.5-pro", "anthropic/claude-opus-4.1", // Not yet available in OpenRouter API + "anthropic/claude-sonnet-4.5", // Not yet available in OpenRouter API ]) const expectedReasoningBudgetModels = Array.from(OPEN_ROUTER_REASONING_BUDGET_MODELS) diff --git a/webview-ui/src/components/settings/providers/Anthropic.tsx b/webview-ui/src/components/settings/providers/Anthropic.tsx index ede2b902086c..feef788d49ea 100644 --- a/webview-ui/src/components/settings/providers/Anthropic.tsx +++ b/webview-ui/src/components/settings/providers/Anthropic.tsx @@ -22,7 +22,8 @@ export const Anthropic = ({ apiConfiguration, setApiConfigurationField }: Anthro const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl) // Check if the current model supports 1M context beta - const supports1MContextBeta = selectedModel?.id === "claude-sonnet-4-20250514" + const supports1MContextBeta = + selectedModel?.id === "claude-sonnet-4-20250514" || selectedModel?.id === "claude-sonnet-4-5" const handleInputChange = useCallback( ( diff --git a/webview-ui/src/components/settings/providers/Bedrock.tsx b/webview-ui/src/components/settings/providers/Bedrock.tsx index 6c871e570a4e..1b3143fa0831 100644 --- a/webview-ui/src/components/settings/providers/Bedrock.tsx +++ b/webview-ui/src/components/settings/providers/Bedrock.tsx @@ -2,12 +2,7 @@ import { useCallback, useState, useEffect } from "react" import { Checkbox } from "vscrui" import { VSCodeTextField } from "@vscode/webview-ui-toolkit/react" -import { - type ProviderSettings, - type ModelInfo, - BEDROCK_REGIONS, - BEDROCK_CLAUDE_SONNET_4_MODEL_ID, -} from "@roo-code/types" +import { type ProviderSettings, type ModelInfo, BEDROCK_REGIONS, BEDROCK_1M_CONTEXT_MODEL_IDS } from "@roo-code/types" import { useAppTranslation } from "@src/i18n/TranslationContext" import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue, StandardTooltip } from "@src/components/ui" @@ -24,8 +19,9 @@ export const Bedrock = ({ apiConfiguration, setApiConfigurationField, selectedMo const { t } = useAppTranslation() const [awsEndpointSelected, setAwsEndpointSelected] = useState(!!apiConfiguration?.awsBedrockEndpointEnabled) - // Check if the selected model supports 1M context (Claude Sonnet 4) - const supports1MContextBeta = apiConfiguration?.apiModelId === BEDROCK_CLAUDE_SONNET_4_MODEL_ID + // Check if the selected model supports 1M context (Claude Sonnet 4 / 4.5) + const supports1MContextBeta = + !!apiConfiguration?.apiModelId && BEDROCK_1M_CONTEXT_MODEL_IDS.includes(apiConfiguration.apiModelId as any) // Update the endpoint enabled state when the configuration changes useEffect(() => { diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index a79875be720b..f4610c6e612f 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -59,7 +59,7 @@ import { qwenCodeDefaultModelId, qwenCodeModels, vercelAiGatewayDefaultModelId, - BEDROCK_CLAUDE_SONNET_4_MODEL_ID, + BEDROCK_1M_CONTEXT_MODEL_IDS, deepInfraDefaultModelId, } from "@roo-code/types" @@ -231,8 +231,8 @@ function getSelectedModel({ } } - // Apply 1M context for Claude Sonnet 4 when enabled - if (id === BEDROCK_CLAUDE_SONNET_4_MODEL_ID && apiConfiguration.awsBedrock1MContext && baseInfo) { + // Apply 1M context for Claude Sonnet 4 / 4.5 when enabled + if (BEDROCK_1M_CONTEXT_MODEL_IDS.includes(id as any) && apiConfiguration.awsBedrock1MContext && baseInfo) { // Create a new ModelInfo object with updated context window const info: ModelInfo = { ...baseInfo, diff --git a/webview-ui/src/hooks/__tests__/useZgsmUserInfo.spec.ts b/webview-ui/src/hooks/__tests__/useZgsmUserInfo.spec.ts index 5dab03cc57fa..a0d3d0f286b7 100644 --- a/webview-ui/src/hooks/__tests__/useZgsmUserInfo.spec.ts +++ b/webview-ui/src/hooks/__tests__/useZgsmUserInfo.spec.ts @@ -88,7 +88,7 @@ describe("useZgsmUserInfo", () => { expect(result.current.userInfo).toEqual({ id: "user123", name: "testuser", - picture: undefined, + picture: "https://example.com/avatar.png", email: "test@example.com", phone: "1234567890", organizationName: "Test Org",