Skip to content

Commit ae7427a

Browse files
raulrajaMontagon
andauthored
update openai client to 3.4.1 (#388)
Co-authored-by: José Carlos Montañez <[email protected]>
1 parent de40a7b commit ae7427a

File tree

2 files changed

+10
-14
lines changed

2 files changed

+10
-14
lines changed

gradle/libs.versions.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ pdfbox = "3.0.0"
3333
mysql = "8.0.33"
3434
semverGradle = "0.5.0-rc.5"
3535
scala = "3.3.0"
36-
openai-client-version = "3.3.2"
36+
openai-client-version = "3.4.1"
3737
gpt4all-java = "1.1.5"
3838
ai-djl = "0.23.0"
3939
jackson = "2.15.2"
@@ -144,4 +144,4 @@ semver-gradle = { id="com.javiersc.semver", version.ref="semverGradle" }
144144
suspend-transform-plugin = { id="love.forte.plugin.suspend-transform", version.ref="suspend-transform" }
145145
resources = { id="com.goncalossilva.resources", version.ref="resources-kmp" }
146146
detekt = { id="io.gitlab.arturbosch.detekt", version.ref="detekt"}
147-
node-gradle = { id = "com.github.node-gradle.node", version.ref = "node-gradle" }
147+
node-gradle = { id = "com.github.node-gradle.node", version.ref = "node-gradle" }

openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAIClient.kt

+8-12
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package com.xebia.functional.xef.conversation.llm.openai
22

33
import com.aallam.openai.api.BetaOpenAI
4+
import com.aallam.openai.api.LegacyOpenAI
45
import com.aallam.openai.api.chat.*
56
import com.aallam.openai.api.chat.ChatChunk as OpenAIChatChunk
67
import com.aallam.openai.api.chat.ChatCompletionChunk as OpenAIChatCompletionChunk
@@ -58,9 +59,10 @@ class OpenAIModel(
5859
headers = mapOf("Authorization" to " Bearer $openAI.token")
5960
)
6061

62+
@OptIn(LegacyOpenAI::class)
6163
override suspend fun createCompletion(request: CompletionRequest): CompletionResult {
6264
fun completionChoice(it: OpenAIChoice): CompletionChoice =
63-
CompletionChoice(it.text, it.index, null, it.finishReason)
65+
CompletionChoice(it.text, it.index, null, it.finishReason.value)
6466

6567
val response = client.completion(toCompletionRequest(request))
6668
return CompletionResult(
@@ -73,7 +75,6 @@ class OpenAIModel(
7375
)
7476
}
7577

76-
@OptIn(BetaOpenAI::class)
7778
override suspend fun createChatCompletion(
7879
request: ChatCompletionRequest
7980
): ChatCompletionResponse {
@@ -86,8 +87,8 @@ class OpenAIModel(
8687

8788
fun toChoice(choice: ChatChoice): Choice =
8889
Choice(
89-
message = choice.message?.let { chatMessage(it) },
90-
finishReason = choice.finishReason,
90+
message = chatMessage(choice.message),
91+
finishReason = choice.finishReason.value,
9192
index = choice.index,
9293
)
9394

@@ -102,7 +103,6 @@ class OpenAIModel(
102103
)
103104
}
104105

105-
@OptIn(BetaOpenAI::class)
106106
override suspend fun createChatCompletions(
107107
request: ChatCompletionRequest
108108
): Flow<ChatCompletionChunk> {
@@ -114,7 +114,7 @@ class OpenAIModel(
114114
)
115115

116116
fun chatChunk(chunk: OpenAIChatChunk): ChatChunk =
117-
ChatChunk(chunk.index, chunk.delta?.let { chatDelta(it) }, chunk.finishReason)
117+
ChatChunk(chunk.index, chatDelta(chunk.delta), chunk.finishReason?.value)
118118

119119
fun chatCompletionChunk(response: OpenAIChatCompletionChunk): ChatCompletionChunk =
120120
ChatCompletionChunk(
@@ -128,7 +128,6 @@ class OpenAIModel(
128128
return client.chatCompletions(toChatCompletionRequest(request)).map { chatCompletionChunk(it) }
129129
}
130130

131-
@OptIn(BetaOpenAI::class)
132131
override suspend fun createChatCompletionWithFunctions(
133132
request: ChatCompletionRequest
134133
): ChatCompletionResponseWithFunctions {
@@ -169,8 +168,8 @@ class OpenAIModel(
169168

170169
fun choiceWithFunctions(choice: ChatChoice): ChoiceWithFunctions =
171170
ChoiceWithFunctions(
172-
message = choice.message?.let { fromOpenAI(it) },
173-
finishReason = choice.finishReason,
171+
message = fromOpenAI(choice.message),
172+
finishReason = choice.finishReason.value,
174173
index = choice.index,
175174
)
176175

@@ -243,7 +242,6 @@ class OpenAIModel(
243242
totalTokens = usage?.totalTokens,
244243
)
245244

246-
@OptIn(BetaOpenAI::class)
247245
private fun toRole(it: ChatRole?) =
248246
when (it) {
249247
ChatRole.User -> Role.USER
@@ -253,15 +251,13 @@ class OpenAIModel(
253251
else -> Role.ASSISTANT
254252
}
255253

256-
@OptIn(BetaOpenAI::class)
257254
private fun fromRole(it: Role) =
258255
when (it) {
259256
Role.USER -> ChatRole.User
260257
Role.ASSISTANT -> ChatRole.Assistant
261258
Role.SYSTEM -> ChatRole.System
262259
}
263260

264-
@OptIn(BetaOpenAI::class)
265261
private fun toChatCompletionRequest(request: ChatCompletionRequest): OpenAIChatCompletionRequest =
266262
chatCompletionRequest {
267263
model = ModelId(request.model)

0 commit comments

Comments
 (0)