Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
249 changes: 222 additions & 27 deletions core/bifrost.go

Large diffs are not rendered by default.

55 changes: 55 additions & 0 deletions core/custom_provider_context_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package bifrost

import (
"context"
"testing"

schemas "github.com/maximhq/bifrost/core/schemas"
)

func TestSetProviderContextMetadata_MarksCustomProvider(t *testing.T) {
ctx := schemas.NewBifrostContext(context.Background(), schemas.NoDeadline)
config := &schemas.ProviderConfig{
CustomProviderConfig: &schemas.CustomProviderConfig{
CustomProviderKey: "lmstudio",
BaseProviderType: schemas.OpenAI,
SupportsResponsesAPI: schemas.Ptr(false),
},
}

setProviderContextMetadata(ctx, config)

isCustomProvider, ok := ctx.Value(schemas.BifrostContextKeyIsCustomProvider).(bool)
if !ok || !isCustomProvider {
t.Fatalf("expected custom provider flag to be true, got %v", ctx.Value(schemas.BifrostContextKeyIsCustomProvider))
}
metadata, ok := schemas.GetCustomProviderContextMetadata(ctx)
if !ok || metadata == nil {
t.Fatal("expected custom provider metadata to be stored in context")
}
if metadata.ProviderKey != "lmstudio" {
t.Fatalf("expected custom provider key lmstudio, got %s", metadata.ProviderKey)
}
if metadata.BaseProviderType != schemas.OpenAI {
t.Fatalf("expected base provider type openai, got %s", metadata.BaseProviderType)
}
if metadata.SupportsResponsesAPI == nil || *metadata.SupportsResponsesAPI {
t.Fatalf("expected supports_responses_api=false metadata, got %+v", metadata.SupportsResponsesAPI)
}
}

func TestSetProviderContextMetadata_ClearsCustomProviderValues(t *testing.T) {
ctx := schemas.NewBifrostContext(context.Background(), schemas.NoDeadline)
ctx.SetValue(schemas.BifrostContextKeyIsCustomProvider, true)
ctx.SetValue(schemas.BifrostContextKeyCustomProviderMetadata, &schemas.CustomProviderContextMetadata{ProviderKey: "lmstudio"})

setProviderContextMetadata(ctx, &schemas.ProviderConfig{})

isCustomProvider, ok := ctx.Value(schemas.BifrostContextKeyIsCustomProvider).(bool)
if !ok || isCustomProvider {
t.Fatalf("expected custom provider flag to be false, got %v", ctx.Value(schemas.BifrostContextKeyIsCustomProvider))
}
if metadata := ctx.Value(schemas.BifrostContextKeyCustomProviderMetadata); metadata != nil {
t.Fatalf("expected custom provider metadata to be cleared, got %+v", metadata)
}
}
23 changes: 17 additions & 6 deletions core/providers/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ func (provider *OpenAIProvider) buildRequestURL(ctx *schemas.BifrostContext, def
return provider.networkConfig.BaseURL + path
}



func (provider *OpenAIProvider) ListModels(ctx *schemas.BifrostContext, keys []schemas.Key, request *schemas.BifrostListModelsRequest) (*schemas.BifrostListModelsResponse, *schemas.BifrostError) {
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ListModelsRequest); err != nil {
return nil, err
Expand Down Expand Up @@ -730,8 +732,10 @@ func HandleOpenAITextCompletionStreaming(
// Returns a BifrostResponse containing the completion results or an error if the request fails.
func (provider *OpenAIProvider) ChatCompletion(ctx *schemas.BifrostContext, key schemas.Key, request *schemas.BifrostChatRequest) (*schemas.BifrostChatResponse, *schemas.BifrostError) {
// Check if chat completion is allowed for this provider
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ChatCompletionRequest); err != nil {
return nil, err
if !schemas.ShouldSkipOperationCheck(ctx) {
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ChatCompletionRequest); err != nil {
return nil, err
}
}

if provider.disableStore {
Expand Down Expand Up @@ -895,8 +899,10 @@ func HandleOpenAIChatCompletionRequest(
// Returns a channel for streaming responses and any error that occurred.
func (provider *OpenAIProvider) ChatCompletionStream(ctx *schemas.BifrostContext, postHookRunner schemas.PostHookRunner, postHookSpanFinalizer func(context.Context), key schemas.Key, request *schemas.BifrostChatRequest) (chan *schemas.BifrostStreamChunk, *schemas.BifrostError) {
// Check if chat completion stream is allowed for this provider
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ChatCompletionStreamRequest); err != nil {
return nil, err
if !schemas.ShouldSkipOperationCheck(ctx) {
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ChatCompletionStreamRequest); err != nil {
return nil, err
}
}
var authHeader map[string]string
if key.Value.GetValue() != "" {
Expand Down Expand Up @@ -1352,7 +1358,7 @@ func (provider *OpenAIProvider) Responses(ctx *schemas.BifrostContext, key schem
request.Params.Store = schemas.Ptr(false)
}

return HandleOpenAIResponsesRequest(
response, err := HandleOpenAIResponsesRequest(
ctx,
provider.client,
provider.buildRequestURL(ctx, "/v1/responses", schemas.ResponsesRequest),
Expand All @@ -1366,6 +1372,8 @@ func (provider *OpenAIProvider) Responses(ctx *schemas.BifrostContext, key schem
nil,
provider.logger,
)

return response, err
}

// HandleOpenAIResponsesRequest handles a responses request to OpenAI's API.
Expand Down Expand Up @@ -1507,6 +1515,7 @@ func (provider *OpenAIProvider) ResponsesStream(ctx *schemas.BifrostContext, pos
if err := providerUtils.CheckOperationAllowed(schemas.OpenAI, provider.customProviderConfig, schemas.ResponsesStreamRequest); err != nil {
return nil, err
}

var authHeader map[string]string
if key.Value.GetValue() != "" {
authHeader = map[string]string{"Authorization": "Bearer " + key.Value.GetValue()}
Expand All @@ -1519,7 +1528,7 @@ func (provider *OpenAIProvider) ResponsesStream(ctx *schemas.BifrostContext, pos
}

// Use shared streaming logic
return HandleOpenAIResponsesStreaming(
streamChan, err := HandleOpenAIResponsesStreaming(
ctx,
provider.streamingClient,
provider.buildRequestURL(ctx, "/v1/responses", schemas.ResponsesStreamRequest),
Expand All @@ -1537,6 +1546,8 @@ func (provider *OpenAIProvider) ResponsesStream(ctx *schemas.BifrostContext, pos
provider.logger,
postHookSpanFinalizer,
)

return streamChan, err
}

// HandleOpenAIResponsesStreaming handles streaming for OpenAI-compatible APIs.
Expand Down
Loading