Skip to content

Commit

Permalink
feat(chat): add stream options (#363)
Browse files Browse the repository at this point in the history
  • Loading branch information
aallam authored Jun 23, 2024
1 parent e133f28 commit 0ee50e9
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 0 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
# Unreleased

### Added
- **chat**: add stream options (#363)

# 3.8.1
> Published 28 Jun 2024
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,4 +203,40 @@ class TestChatCompletions : TestOpenAI() {

assertTrue(job.isCancelled, "Job should be cancelled")
}

@Test
fun streamOptions() = test {
val request = chatCompletionRequest {
model = ModelId("gpt-3.5-turbo")
messages {
message {
role = ChatRole.System
content = "You are a helpful assistant.!"
}
message {
role = ChatRole.User
content = "Who won the world series in 2020?"
}
message {
role = ChatRole.Assistant
content = "The Los Angeles Dodgers won the World Series in 2020."
}
message {
role = ChatRole.User
content = "Where was it played?"
}
}
streamOptions = streamOptions {
includeUsage = true
}
}

val results = mutableListOf<ChatCompletionChunk>()
openAI.chatCompletions(request).onEach { results += it }.launchIn(this).join()

assertNotNull(results.last().usage)
assertNotNull(results.last().usage?.promptTokens)
assertNotNull(results.last().usage?.completionTokens)
assertNotNull(results.last().usage?.totalTokens)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,11 @@ public class ChatCompletionRequest(
* A unique identifier representing the Multi LORA reserved instance.
*/
@SerialName("instance_id") public val instanceId: String? = null,

/**
* Options for streaming response. Only used when in streaming mode.
*/
@SerialName("stream_options") public val streamOptions: StreamOptions? = null
)

/**
Expand Down Expand Up @@ -338,6 +343,11 @@ public class ChatCompletionRequestBuilder {
functions = FunctionsBuilder().apply(block).functions
}

/**
* Options for streaming response. Only used when in streaming mode.
*/
public var streamOptions: StreamOptions? = null

/**
* Builder of [ChatCompletionRequest] instances.
*/
Expand All @@ -361,6 +371,7 @@ public class ChatCompletionRequestBuilder {
logprobs = logprobs,
topLogprobs = topLogprobs,
instanceId = instanceId,
streamOptions = streamOptions,
)
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package com.aallam.openai.api.chat

import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable

/**
* Options for streaming response.
*/
@Serializable
public data class StreamOptions(
/**
* If set, an additional chunk will be streamed before the data: `[DONE]` message.
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will
* always be an empty array. All other chunks will also include a usage field, but with a null value.
*/
@SerialName("include_usage") public val includeUsage: Boolean? = null,
)

/**
* Create a new [StreamOptions] instance.
*/
public fun streamOptions(block: StreamOptionsBuilder.() -> Unit): StreamOptions {
return StreamOptionsBuilder().apply(block).build()
}

/**
* Builder for [StreamOptions].
*/
public class StreamOptionsBuilder {

/**
* If set, an additional chunk will be streamed before the data: `[DONE]` message.
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will
* always be an empty array. All other chunks will also include a usage field, but with a null value.
*/
public var includeUsage: Boolean? = null

/**
* Build the [StreamOptions] instance.
*/
public fun build(): StreamOptions = StreamOptions(
includeUsage = includeUsage,
)
}

0 comments on commit 0ee50e9

Please sign in to comment.