Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions chat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,42 @@ func TestChatCompletions(t *testing.T) {
checks.NoError(t, err, "CreateChatCompletion error")
}

func TestAzureChatCompletions(t *testing.T) {
client, server, teardown := setupAzureTestServer()
defer teardown()
server.RegisterHandler("/openai/deployments/*", handleChatCompletionEndpoint)

_, err := client.CreateChatCompletion(context.Background(), ChatCompletionRequest{
MaxTokens: 5,
Model: GPT3Dot5Turbo,
Messages: []ChatCompletionMessage{
{
Role: ChatMessageRoleUser,
Content: "Hello!",
},
},
})
checks.NoError(t, err, "CreateAzureChatCompletion error")
}

func TestAzureChatCompletionsWithCustomDeploymentName(t *testing.T) {
client, server, teardown := setupAzureTestServerWithCustomDeploymentName()
defer teardown()
server.RegisterHandler("/openai/deployments/*", handleChatCompletionEndpoint)

_, err := client.CreateChatCompletion(context.Background(), ChatCompletionRequest{
MaxTokens: 5,
Model: GPT3Dot5Turbo,
Messages: []ChatCompletionMessage{
{
Role: ChatMessageRoleUser,
Content: "Hello!",
},
},
})
checks.NoError(t, err, "CreateAzureChatCompletionWithCustomDeploymentName error")
}

// handleChatCompletionEndpoint Handles the ChatGPT completion endpoint by the test server.
func handleChatCompletionEndpoint(w http.ResponseWriter, r *http.Request) {
var err error
Expand Down
14 changes: 9 additions & 5 deletions internal/test/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"log"
"net/http"
"net/http/httptest"
"regexp"
)

const testAPI = "this-is-my-secure-token-do-not-steal!!"
Expand Down Expand Up @@ -36,11 +37,14 @@ func (ts *ServerTest) OpenAITestServer() *httptest.Server {
return
}

handlerCall, ok := ts.handlers[r.URL.Path]
if !ok {
http.Error(w, "the resource path doesn't exist", http.StatusNotFound)
return
// Handle /path/* routes.
for route, handler := range ts.handlers {
pattern, _ := regexp.Compile(route)
if pattern.MatchString(r.URL.Path) {
handler(w, r)
return
}
}
handlerCall(w, r)
http.Error(w, "the resource path doesn't exist", http.StatusNotFound)
}))
}
22 changes: 22 additions & 0 deletions openai_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,25 @@ func setupAzureTestServer() (client *Client, server *test.ServerTest, teardown f
client = NewClientWithConfig(config)
return
}

func setupAzureTestServerWithCustomDeploymentName() (client *Client, server *test.ServerTest, teardown func()) {
server = test.NewTestServer()
ts := server.OpenAITestServer()
ts.Start()
teardown = ts.Close
config := DefaultAzureConfig(test.GetTestToken(), "https://dummylab.openai.azure.com/")
config.BaseURL = ts.URL
config.AzureModelMapperFunc = func(model string) string {
azureModelMapping := map[string]string{
"gpt-3.5-turbo": "custom-gpt-3.5-turbo",
"gpt-3.5-turbo-0301": "custom-gpt-3.5-turbo-03-01",
"gpt-4": "custom-gpt-4",
"gpt-4-0314": "custom-gpt-4-03-14",
"gpt-4-32k": "custom-gpt-4-32k",
"gpt-4-32k-0314": "custom-gpt-4-32k-03-14",
}
return azureModelMapping[model]
}
client = NewClientWithConfig(config)
return
}