Skip to content

Commit b095938

Browse files
authored
extract and split integration tests (#389)
1 parent e49d771 commit b095938

File tree

5 files changed

+357
-353
lines changed

5 files changed

+357
-353
lines changed

api_integration_test.go

+136
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
package openai_test
2+
3+
import (
4+
"context"
5+
"errors"
6+
"io"
7+
"os"
8+
"testing"
9+
10+
. "github.com/sashabaranov/go-openai"
11+
"github.com/sashabaranov/go-openai/internal/test/checks"
12+
)
13+
14+
func TestAPI(t *testing.T) {
15+
apiToken := os.Getenv("OPENAI_TOKEN")
16+
if apiToken == "" {
17+
t.Skip("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it.")
18+
}
19+
20+
var err error
21+
c := NewClient(apiToken)
22+
ctx := context.Background()
23+
_, err = c.ListEngines(ctx)
24+
checks.NoError(t, err, "ListEngines error")
25+
26+
_, err = c.GetEngine(ctx, "davinci")
27+
checks.NoError(t, err, "GetEngine error")
28+
29+
fileRes, err := c.ListFiles(ctx)
30+
checks.NoError(t, err, "ListFiles error")
31+
32+
if len(fileRes.Files) > 0 {
33+
_, err = c.GetFile(ctx, fileRes.Files[0].ID)
34+
checks.NoError(t, err, "GetFile error")
35+
} // else skip
36+
37+
embeddingReq := EmbeddingRequest{
38+
Input: []string{
39+
"The food was delicious and the waiter",
40+
"Other examples of embedding request",
41+
},
42+
Model: AdaSearchQuery,
43+
}
44+
_, err = c.CreateEmbeddings(ctx, embeddingReq)
45+
checks.NoError(t, err, "Embedding error")
46+
47+
_, err = c.CreateChatCompletion(
48+
ctx,
49+
ChatCompletionRequest{
50+
Model: GPT3Dot5Turbo,
51+
Messages: []ChatCompletionMessage{
52+
{
53+
Role: ChatMessageRoleUser,
54+
Content: "Hello!",
55+
},
56+
},
57+
},
58+
)
59+
60+
checks.NoError(t, err, "CreateChatCompletion (without name) returned error")
61+
62+
_, err = c.CreateChatCompletion(
63+
ctx,
64+
ChatCompletionRequest{
65+
Model: GPT3Dot5Turbo,
66+
Messages: []ChatCompletionMessage{
67+
{
68+
Role: ChatMessageRoleUser,
69+
Name: "John_Doe",
70+
Content: "Hello!",
71+
},
72+
},
73+
},
74+
)
75+
checks.NoError(t, err, "CreateChatCompletion (with name) returned error")
76+
77+
stream, err := c.CreateCompletionStream(ctx, CompletionRequest{
78+
Prompt: "Ex falso quodlibet",
79+
Model: GPT3Ada,
80+
MaxTokens: 5,
81+
Stream: true,
82+
})
83+
checks.NoError(t, err, "CreateCompletionStream returned error")
84+
defer stream.Close()
85+
86+
counter := 0
87+
for {
88+
_, err = stream.Recv()
89+
if err != nil {
90+
if errors.Is(err, io.EOF) {
91+
break
92+
}
93+
t.Errorf("Stream error: %v", err)
94+
} else {
95+
counter++
96+
}
97+
}
98+
if counter == 0 {
99+
t.Error("Stream did not return any responses")
100+
}
101+
}
102+
103+
func TestAPIError(t *testing.T) {
104+
apiToken := os.Getenv("OPENAI_TOKEN")
105+
if apiToken == "" {
106+
t.Skip("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it.")
107+
}
108+
109+
var err error
110+
c := NewClient(apiToken + "_invalid")
111+
ctx := context.Background()
112+
_, err = c.ListEngines(ctx)
113+
checks.HasError(t, err, "ListEngines should fail with an invalid key")
114+
115+
var apiErr *APIError
116+
if !errors.As(err, &apiErr) {
117+
t.Fatalf("Error is not an APIError: %+v", err)
118+
}
119+
120+
if apiErr.HTTPStatusCode != 401 {
121+
t.Fatalf("Unexpected API error status code: %d", apiErr.HTTPStatusCode)
122+
}
123+
124+
switch v := apiErr.Code.(type) {
125+
case string:
126+
if v != "invalid_api_key" {
127+
t.Fatalf("Unexpected API error code: %s", v)
128+
}
129+
default:
130+
t.Fatalf("Unexpected API error code type: %T", v)
131+
}
132+
133+
if apiErr.Error() == "" {
134+
t.Fatal("Empty error message occurred")
135+
}
136+
}

0 commit comments

Comments
 (0)