diff --git a/README.md b/README.md index 2a0b5c3e..22add2bd 100644 --- a/README.md +++ b/README.md @@ -18,9 +18,11 @@ Built in Go for performance and simplicity, llama-swap has zero dependencies and - ✅ OpenAI API supported endpoints: - `v1/completions` - `v1/chat/completions` + - `v1/responses` - `v1/embeddings` - `v1/audio/speech` ([#36](https://github.com/mostlygeek/llama-swap/issues/36)) - `v1/audio/transcriptions` ([docs](https://github.com/mostlygeek/llama-swap/issues/41#issuecomment-2722637867)) + - `v1/audio/voices` - `v1/images/generations` - `v1/images/edits` - ✅ Anthropic API supported endpoints: diff --git a/proxy/proxymanager.go b/proxy/proxymanager.go index 37c17091..f94ba408 100644 --- a/proxy/proxymanager.go +++ b/proxy/proxymanager.go @@ -299,6 +299,7 @@ func (pm *ProxyManager) setupGinEngine() { // Support audio/speech endpoint pm.ginEngine.POST("/v1/audio/speech", pm.apiKeyAuth(), pm.proxyInferenceHandler) + pm.ginEngine.POST("/v1/audio/voices", pm.apiKeyAuth(), pm.proxyInferenceHandler) pm.ginEngine.POST("/v1/audio/transcriptions", pm.apiKeyAuth(), pm.proxyOAIPostFormHandler) pm.ginEngine.POST("/v1/images/generations", pm.apiKeyAuth(), pm.proxyInferenceHandler) pm.ginEngine.POST("/v1/images/edits", pm.apiKeyAuth(), pm.proxyOAIPostFormHandler)