From acac0bf692821e7ba1a56b7d9c0b294066d7347c Mon Sep 17 00:00:00 2001 From: Benson Wong Date: Thu, 1 Jan 2026 12:46:44 -0800 Subject: [PATCH] proxy: add /v1/responses and /v1/audio/voices endpoints --- README.md | 2 ++ proxy/proxymanager.go | 1 + 2 files changed, 3 insertions(+) diff --git a/README.md b/README.md index 2a0b5c3e..22add2bd 100644 --- a/README.md +++ b/README.md @@ -18,9 +18,11 @@ Built in Go for performance and simplicity, llama-swap has zero dependencies and - ✅ OpenAI API supported endpoints: - `v1/completions` - `v1/chat/completions` + - `v1/responses` - `v1/embeddings` - `v1/audio/speech` ([#36](https://github.com/mostlygeek/llama-swap/issues/36)) - `v1/audio/transcriptions` ([docs](https://github.com/mostlygeek/llama-swap/issues/41#issuecomment-2722637867)) + - `v1/audio/voices` - `v1/images/generations` - `v1/images/edits` - ✅ Anthropic API supported endpoints: diff --git a/proxy/proxymanager.go b/proxy/proxymanager.go index 37c17091..f94ba408 100644 --- a/proxy/proxymanager.go +++ b/proxy/proxymanager.go @@ -299,6 +299,7 @@ func (pm *ProxyManager) setupGinEngine() { // Support audio/speech endpoint pm.ginEngine.POST("/v1/audio/speech", pm.apiKeyAuth(), pm.proxyInferenceHandler) + pm.ginEngine.POST("/v1/audio/voices", pm.apiKeyAuth(), pm.proxyInferenceHandler) pm.ginEngine.POST("/v1/audio/transcriptions", pm.apiKeyAuth(), pm.proxyOAIPostFormHandler) pm.ginEngine.POST("/v1/images/generations", pm.apiKeyAuth(), pm.proxyInferenceHandler) pm.ginEngine.POST("/v1/images/edits", pm.apiKeyAuth(), pm.proxyOAIPostFormHandler)