From ac668b7877df295a38c2806ffb4cdf076b6389d7 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 16:43:28 +0300 Subject: [PATCH 01/15] Add streaming session and ability to use streaming --- Sources/OpenAI/OpenAI.swift | 28 ++++++- Sources/OpenAI/Private/StreamingSession.swift | 73 +++++++++++++++++++ .../OpenAI/Private/URLSessionProtocol.swift | 5 ++ Sources/OpenAI/Public/Models/ChatResult.swift | 15 +++- Tests/OpenAITests/Mocks/URLSessionMock.swift | 4 + 5 files changed, 120 insertions(+), 5 deletions(-) create mode 100644 Sources/OpenAI/Private/StreamingSession.swift diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index 4a89f3b1..0f1424f9 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -35,6 +35,7 @@ final public class OpenAI: OpenAIProtocol { } private let session: URLSessionProtocol + private var streamingSessions: [NSObject] = [] public let configuration: Configuration @@ -64,7 +65,11 @@ final public class OpenAI: OpenAIProtocol { } public func chats(query: ChatQuery, completion: @escaping (Result) -> Void) { - performRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) + if query.stream == true { + performSteamingRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) + } else { + performRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) + } } public func edits(query: EditsQuery, completion: @escaping (Result) -> Void) { @@ -127,7 +132,26 @@ extension OpenAI { task.resume() } catch { completion(.failure(error)) - return + } + } + + func performSteamingRequest(request: any URLRequestBuildable, completion: @escaping (Result) -> Void) { + do { + let request = try request.build(token: configuration.token, organizationIdentifier: configuration.organizationIdentifier, timeoutInterval: configuration.timeoutInterval) + let session = StreamingSession(urlRequest: request) + session.onReceiveContent = {_, object in + completion(.success(object)) + } + session.onProcessingError = {_, error in + completion(.failure(error)) + } + session.onComplete = { [weak self] object, error in + self?.streamingSessions.removeAll(where: { $0 == object }) + } + session.perform() + streamingSessions.append(session) + } catch { + completion(.failure(error)) } } } diff --git a/Sources/OpenAI/Private/StreamingSession.swift b/Sources/OpenAI/Private/StreamingSession.swift new file mode 100644 index 00000000..a2371ea7 --- /dev/null +++ b/Sources/OpenAI/Private/StreamingSession.swift @@ -0,0 +1,73 @@ +// +// File 2.swift +// +// +// Created by Sergii Kryvoblotskyi on 18/04/2023. +// + +import Foundation + +final class StreamingSession: NSObject, Identifiable, URLSessionDelegate, URLSessionDataDelegate { + + enum StreamingError: Error { + case unknownContent + case emptyContent + } + + var onReceiveContent: ((StreamingSession, ResultType) -> Void)? + var onProcessingError: ((StreamingSession, Error) -> Void)? + var onComplete: ((StreamingSession, Error?) -> Void)? + + private let streamingCompletionMarker = "[DONE]" + private let urlRequest: URLRequest + private lazy var urlSession: URLSession = { + let session = URLSession(configuration: .default, delegate: self, delegateQueue: nil) + return session + }() + + init(urlRequest: URLRequest) { + self.urlRequest = urlRequest + } + + func perform() { + self.urlSession + .dataTask(with: self.urlRequest) + .resume() + } + + func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) { + onComplete?(self, error) + } + + func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) { + guard let stringContent = String(data: data, encoding: .utf8) else { + onProcessingError?(self, StreamingError.unknownContent) + return + } + let jsonObjects = stringContent + .components(separatedBy: "data:") + .filter { $0.isEmpty == false } + .map { $0.trimmingCharacters(in: .whitespacesAndNewlines) } + + guard jsonObjects.isEmpty == false, jsonObjects.first != streamingCompletionMarker else { + onProcessingError?(self, StreamingError.emptyContent) + return + } + jsonObjects.forEach { jsonContent in + guard jsonContent != streamingCompletionMarker else { + return + } + guard let jsonData = jsonContent.data(using: .utf8) else { + onProcessingError?(self, StreamingError.unknownContent) + return + } + do { + let decoder = JSONDecoder() + let object = try decoder.decode(ResultType.self, from: jsonData) + onReceiveContent?(self, object) + } catch { + onProcessingError?(self, error) + } + } + } +} diff --git a/Sources/OpenAI/Private/URLSessionProtocol.swift b/Sources/OpenAI/Private/URLSessionProtocol.swift index 145ea9d6..1aa936c4 100644 --- a/Sources/OpenAI/Private/URLSessionProtocol.swift +++ b/Sources/OpenAI/Private/URLSessionProtocol.swift @@ -13,10 +13,15 @@ import FoundationNetworking protocol URLSessionProtocol { func dataTask(with request: URLRequest, completionHandler: @escaping @Sendable (Data?, URLResponse?, Error?) -> Void) -> URLSessionDataTaskProtocol + func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol } extension URLSession: URLSessionProtocol { + func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol { + dataTask(with: request) as URLSessionDataTask + } + func dataTask(with request: URLRequest, completionHandler: @escaping @Sendable (Data?, URLResponse?, Error?) -> Void) -> URLSessionDataTaskProtocol { dataTask(with: request, completionHandler: completionHandler) as URLSessionDataTask } diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index ead8cb19..2aa6d263 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -7,16 +7,25 @@ import Foundation +//"delta":{"content":" read"}, + public struct ChatResult: Codable, Equatable { public struct Choice: Codable, Equatable { + public struct Delta: Codable, Equatable { + public let content: String? + public let role: Chat.Role? + } + public let index: Int - public let message: Chat - public let finishReason: String + public let message: Chat? + public let delta: Delta? + public let finishReason: String? enum CodingKeys: String, CodingKey { case index case message + case delta case finishReason = "finish_reason" } } @@ -38,7 +47,7 @@ public struct ChatResult: Codable, Equatable { public let created: TimeInterval public let model: Model public let choices: [Choice] - public let usage: Usage + public let usage: Usage? enum CodingKeys: String, CodingKey { case id diff --git a/Tests/OpenAITests/Mocks/URLSessionMock.swift b/Tests/OpenAITests/Mocks/URLSessionMock.swift index 23e39f2f..b3f5a228 100644 --- a/Tests/OpenAITests/Mocks/URLSessionMock.swift +++ b/Tests/OpenAITests/Mocks/URLSessionMock.swift @@ -19,4 +19,8 @@ class URLSessionMock: URLSessionProtocol { dataTask.completion = completionHandler return dataTask } + + func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol { + dataTask + } } From 37a5e73e425816e882eca6abf53a2e42a1cec5fe Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 16:45:42 +0300 Subject: [PATCH 02/15] Fix tests --- Tests/OpenAITests/OpenAITests.swift | 6 +++--- Tests/OpenAITests/OpenAITestsCombine.swift | 6 +++--- Tests/OpenAITests/OpenAITestsDecoder.swift | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index 84d55555..e775269b 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -68,9 +68,9 @@ class OpenAITests: XCTestCase { .init(role: .user, content: "Who wrote Harry Potter?") ]) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar"), delta: nil, finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1"), delta: nil, finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2"), delta: nil, finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index ca55da1d..0827bcf4 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -42,9 +42,9 @@ final class OpenAITestsCombine: XCTestCase { .init(role: .user, content: "Who wrote Harry Potter?") ]) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar"), delta: nil, finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1"), delta: nil, finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2"), delta: nil, finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) let result = try awaitPublisher(openAI.chats(query: query)) diff --git a/Tests/OpenAITests/OpenAITestsDecoder.swift b/Tests/OpenAITests/OpenAITestsDecoder.swift index 2b272b29..6b548a2d 100644 --- a/Tests/OpenAITests/OpenAITestsDecoder.swift +++ b/Tests/OpenAITests/OpenAITestsDecoder.swift @@ -98,7 +98,7 @@ class OpenAITestsDecoder: XCTestCase { """ let expectedValue = ChatResult(id: "chatcmpl-123", object: "chat.completion", created: 1677652288, model: .gpt4, choices: [ - .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), finishReason: "stop") + .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), delta: nil, finishReason: "stop") ], usage: .init(promptTokens: 9, completionTokens: 12, totalTokens: 21)) try decode(data, expectedValue) } From f9b5cde52f79ae84703401acfd0db930a8dfd612 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 16:53:30 +0300 Subject: [PATCH 03/15] Fix linux build --- Sources/OpenAI/Private/StreamingSession.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Sources/OpenAI/Private/StreamingSession.swift b/Sources/OpenAI/Private/StreamingSession.swift index a2371ea7..1c2ca33a 100644 --- a/Sources/OpenAI/Private/StreamingSession.swift +++ b/Sources/OpenAI/Private/StreamingSession.swift @@ -6,6 +6,9 @@ // import Foundation +#if canImport(FoundationNetworking) +import FoundationNetworking +#endif final class StreamingSession: NSObject, Identifiable, URLSessionDelegate, URLSessionDataDelegate { From 1d03919887ec37b3c99a913a31944ee94a7ea6b9 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 16:57:28 +0300 Subject: [PATCH 04/15] remove unused code --- Sources/OpenAI/Public/Models/ChatResult.swift | 2 -- 1 file changed, 2 deletions(-) diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index 2aa6d263..e806511f 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -7,8 +7,6 @@ import Foundation -//"delta":{"content":" read"}, - public struct ChatResult: Codable, Equatable { public struct Choice: Codable, Equatable { From ffa609f2472ac6ebe8dd6d7cc6b35c4153209917 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 17:17:56 +0300 Subject: [PATCH 05/15] Add AsyncThrowingStream --- .../OpenAI/Public/Protocols/OpenAIProtocol+Async.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 3e73563b..0dcb0148 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -72,6 +72,14 @@ public extension OpenAIProtocol { } } + func chatsStream( + query: ChatQuery + ) -> AsyncThrowingStream { + return AsyncThrowingStream { continuation in + return chats(query: query) { continuation.yield(with: $0) } + } + } + func edits( query: EditsQuery ) async throws -> EditsResult { From 0c16d1ebe8da3c0ac25ab69284f851296d8e07c9 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Tue, 18 Apr 2023 17:31:33 +0300 Subject: [PATCH 06/15] Fix filename --- Sources/OpenAI/Private/StreamingSession.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/OpenAI/Private/StreamingSession.swift b/Sources/OpenAI/Private/StreamingSession.swift index 1c2ca33a..5fc69158 100644 --- a/Sources/OpenAI/Private/StreamingSession.swift +++ b/Sources/OpenAI/Private/StreamingSession.swift @@ -1,5 +1,5 @@ // -// File 2.swift +// StreamingSession.swift // // // Created by Sergii Kryvoblotskyi on 18/04/2023. From 04cf5ba5d2cf46cef38592a0139d91db0fff6cdd Mon Sep 17 00:00:00 2001 From: Sihao Lu Date: Wed, 19 Apr 2023 02:39:05 -0700 Subject: [PATCH 07/15] Adopt streaming in Demo app --- Demo/DemoChat/Sources/ChatStore.swift | 87 +++++++++++++------ .../Sources/UI/ConversationView.swift | 5 +- Demo/DemoChat/Sources/UI/DetailView.swift | 56 +++++++++++- Sources/OpenAI/Public/Models/ChatQuery.swift | 1 + Sources/OpenAI/Public/Models/ChatResult.swift | 3 + 5 files changed, 122 insertions(+), 30 deletions(-) diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 59c74a77..470a23ce 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -53,17 +53,27 @@ public final class ChatStore: ObservableObject { } @MainActor - func sendMessage(_ message: Message, conversationId: Conversation.ID) async { + func sendMessage( + _ message: Message, + conversationId: Conversation.ID, + model: Model + ) async { guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else { return } conversations[conversationIndex].messages.append(message) - await completeChat(conversationId: conversationId) + await completeChat( + conversationId: conversationId, + model: model + ) } @MainActor - func completeChat(conversationId: Conversation.ID) async { + func completeChat( + conversationId: Conversation.ID, + model: Model + ) async { guard let conversation = conversations.first(where: { $0.id == conversationId }) else { return } @@ -71,35 +81,62 @@ public final class ChatStore: ObservableObject { conversationErrors[conversationId] = nil do { - let response = try await openAIClient.chats( + guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else { + return + } + + let chatsStream = openAIClient.chatsStream( query: ChatQuery( - model: .gpt3_5Turbo, + model: model, messages: conversation.messages.map { message in Chat(role: message.role, content: message.content) - } + }, + stream: true ) ) - - guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else { - return - } - - let existingMessages = conversations[conversationIndex].messages - - for completionMessage in response.choices.map(\.message) { - let message = Message( - id: response.id, - role: completionMessage.role, - content: completionMessage.content, - createdAt: Date(timeIntervalSince1970: TimeInterval(response.created)) - ) - - if existingMessages.contains(message) { - continue + + for try await partialChatResult in chatsStream { + for choice in partialChatResult.choices { + let existingMessages = conversations[conversationIndex].messages + + let message: Message + if let delta = choice.delta { + message = Message( + id: partialChatResult.id, + role: delta.role ?? .assistant, + content: delta.content ?? "", + createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created)) + ) + if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) { + // Meld into previous message + let previousMessage = existingMessages[existingMessageIndex] + let combinedMessage = Message( + id: message.id, // id stays the same for different deltas + role: message.role, + content: previousMessage.content + message.content, + createdAt: message.createdAt + ) + conversations[conversationIndex].messages[existingMessageIndex] = combinedMessage + } else { + conversations[conversationIndex].messages.append(message) + } + } else { + let choiceMessage = choice.message! + + message = Message( + id: partialChatResult.id, + role: choiceMessage.role, + content: choiceMessage.content, + createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created)) + ) + + if existingMessages.contains(message) { + continue + } + conversations[conversationIndex].messages.append(message) + } } - conversations[conversationIndex].messages.append(message) } - } catch { conversationErrors[conversationId] = error } diff --git a/Demo/DemoChat/Sources/UI/ConversationView.swift b/Demo/DemoChat/Sources/UI/ConversationView.swift index c4e17e40..1b872c21 100644 --- a/Demo/DemoChat/Sources/UI/ConversationView.swift +++ b/Demo/DemoChat/Sources/UI/ConversationView.swift @@ -46,7 +46,7 @@ public struct ChatView: View { DetailView( conversation: conversation, error: store.conversationErrors[conversation.id], - sendMessage: { message in + sendMessage: { message, selectedModel in Task { await store.sendMessage( Message( @@ -55,7 +55,8 @@ public struct ChatView: View { content: message, createdAt: dateProvider() ), - conversationId: conversation.id + conversationId: conversation.id, + model: selectedModel ) } } diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index a762b76d..cd8529ac 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -10,15 +10,20 @@ import UIKit #elseif os(macOS) import AppKit #endif +import OpenAI import SwiftUI struct DetailView: View { @State var inputText: String = "" @FocusState private var isFocused: Bool + @State private var showsModelSelectionSheet = false + @State private var selectedChatModel: Model = .gpt3_5Turbo + + private let availableChatModels: [Model] = [.gpt3_5Turbo, .gpt4] let conversation: Conversation let error: Error? - let sendMessage: (String) -> Void + let sendMessage: (String, Model) -> Void private var fillColor: Color { #if os(iOS) @@ -61,6 +66,51 @@ struct DetailView: View { inputBar(scrollViewProxy: scrollViewProxy) } .navigationTitle("Chat") + .safeAreaInset(edge: .top) { + HStack { + Text( + "Model: \(selectedChatModel)" + ) + .font(.caption) + .foregroundColor(.secondary) + Spacer() + } + .padding(.horizontal, 16) + .padding(.vertical, 8) + } + .toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button(action: { + showsModelSelectionSheet.toggle() + }) { + Image(systemName: "cpu") + } + } + } + .confirmationDialog( + "Select model", + isPresented: $showsModelSelectionSheet, + titleVisibility: .visible, + actions: { + ForEach(availableChatModels, id: \.self) { model in + Button { + selectedChatModel = model + } label: { + Text(model) + } + } + + Button("Cancel", role: .cancel) { + showsModelSelectionSheet = false + } + }, + message: { + Text( + "View https://platform.openai.com/docs/models/overview for details" + ) + .font(.caption) + } + ) } } } @@ -133,7 +183,7 @@ struct DetailView: View { private func tapSendMessage( scrollViewProxy: ScrollViewProxy ) { - sendMessage(inputText) + sendMessage(inputText, selectedChatModel) inputText = "" // if let lastMessage = conversation.messages.last { @@ -206,7 +256,7 @@ struct DetailView_Previews: PreviewProvider { ] ), error: nil, - sendMessage: { _ in } + sendMessage: { _, _ in } ) } } diff --git a/Sources/OpenAI/Public/Models/ChatQuery.swift b/Sources/OpenAI/Public/Models/ChatQuery.swift index 90ff4217..d4bd7719 100644 --- a/Sources/OpenAI/Public/Models/ChatQuery.swift +++ b/Sources/OpenAI/Public/Models/ChatQuery.swift @@ -35,6 +35,7 @@ public struct ChatQuery: Codable { /// How many chat completion choices to generate for each input message. public let n: Int? /// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only `server-sent events` as they become available, with the stream terminated by a data: [DONE] message. + /// If you want to perform the query in a streaming fashion, set this to `true` and use `OpenAI.chatsStream(query:)` method. public let stream: Bool? /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. public let stop: [String]? diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index e806511f..7695ea6c 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -16,8 +16,11 @@ public struct ChatResult: Codable, Equatable { } public let index: Int + /// Exists only if it is a complete message. public let message: Chat? + /// Exists only if it is a partial message obtained throught a streaming query. public let delta: Delta? + /// Exists only if it is a complete message. public let finishReason: String? enum CodingKeys: String, CodingKey { From 79fb479e57b6890015950f2abddc0f17e075660c Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 13:58:38 +0300 Subject: [PATCH 08/15] Add dedicated results for streaming content --- Sources/OpenAI/OpenAI.swift | 23 +++++---- Sources/OpenAI/Private/StreamingSession.swift | 2 - Sources/OpenAI/Public/Models/ChatQuery.swift | 9 ++-- .../Public/Models/ChatStreamResult.swift | 50 +++++++++++++++++++ .../Public/Models/CompletionsQuery.swift | 5 +- .../Public/Models/CompletionsResult.swift | 9 +++- .../Public/Models/StreamableQuery.swift | 24 +++++++++ .../Protocols/OpenAIProtocol+Async.swift | 20 +++++++- .../Protocols/OpenAIProtocol+Combine.swift | 28 +++++++++++ .../Public/Protocols/OpenAIProtocol.swift | 36 +++++++++++++ 10 files changed, 186 insertions(+), 20 deletions(-) create mode 100644 Sources/OpenAI/Public/Models/ChatStreamResult.swift create mode 100644 Sources/OpenAI/Public/Models/StreamableQuery.swift diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index 0f1424f9..deeffcd0 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -56,6 +56,10 @@ final public class OpenAI: OpenAIProtocol { performRequest(request: JSONRequest(body: query, url: buildURL(path: .completions)), completion: completion) } + public func completionsStream(query: CompletionsQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) { + performSteamingRequest(request: JSONRequest(body: query.makeStreamable(), url: buildURL(path: .completions)), onResult: onResult, completion: completion) + } + public func images(query: ImagesQuery, completion: @escaping (Result) -> Void) { performRequest(request: JSONRequest(body: query, url: buildURL(path: .images)), completion: completion) } @@ -65,11 +69,11 @@ final public class OpenAI: OpenAIProtocol { } public func chats(query: ChatQuery, completion: @escaping (Result) -> Void) { - if query.stream == true { - performSteamingRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) - } else { - performRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) - } + performRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion) + } + + public func chatsStream(query: ChatQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) { + performSteamingRequest(request: JSONRequest(body: query.makeStreamable(), url: buildURL(path: .chats)), onResult: onResult, completion: completion) } public func edits(query: EditsQuery, completion: @escaping (Result) -> Void) { @@ -135,23 +139,24 @@ extension OpenAI { } } - func performSteamingRequest(request: any URLRequestBuildable, completion: @escaping (Result) -> Void) { + func performSteamingRequest(request: any URLRequestBuildable, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) { do { let request = try request.build(token: configuration.token, organizationIdentifier: configuration.organizationIdentifier, timeoutInterval: configuration.timeoutInterval) let session = StreamingSession(urlRequest: request) session.onReceiveContent = {_, object in - completion(.success(object)) + onResult(.success(object)) } session.onProcessingError = {_, error in - completion(.failure(error)) + onResult(.failure(error)) } session.onComplete = { [weak self] object, error in self?.streamingSessions.removeAll(where: { $0 == object }) + completion?(error) } session.perform() streamingSessions.append(session) } catch { - completion(.failure(error)) + completion?(error) } } } diff --git a/Sources/OpenAI/Private/StreamingSession.swift b/Sources/OpenAI/Private/StreamingSession.swift index 5fc69158..f85c0347 100644 --- a/Sources/OpenAI/Private/StreamingSession.swift +++ b/Sources/OpenAI/Private/StreamingSession.swift @@ -51,9 +51,7 @@ final class StreamingSession: NSObject, Identifiable, URLSe .components(separatedBy: "data:") .filter { $0.isEmpty == false } .map { $0.trimmingCharacters(in: .whitespacesAndNewlines) } - guard jsonObjects.isEmpty == false, jsonObjects.first != streamingCompletionMarker else { - onProcessingError?(self, StreamingError.emptyContent) return } jsonObjects.forEach { jsonContent in diff --git a/Sources/OpenAI/Public/Models/ChatQuery.swift b/Sources/OpenAI/Public/Models/ChatQuery.swift index 90ff4217..d7d4d8fc 100644 --- a/Sources/OpenAI/Public/Models/ChatQuery.swift +++ b/Sources/OpenAI/Public/Models/ChatQuery.swift @@ -23,7 +23,7 @@ public struct Chat: Codable, Equatable { } } -public struct ChatQuery: Codable { +public struct ChatQuery: Codable, Streamable { /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported. public let model: Model /// The messages to generate chat completions for @@ -34,8 +34,6 @@ public struct ChatQuery: Codable { public let topP: Double? /// How many chat completion choices to generate for each input message. public let n: Int? - /// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only `server-sent events` as they become available, with the stream terminated by a data: [DONE] message. - public let stream: Bool? /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. public let stop: [String]? /// The maximum number of tokens to generate in the completion. @@ -49,6 +47,8 @@ public struct ChatQuery: Codable { /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. public let user: String? + var stream: Bool = false + enum CodingKeys: String, CodingKey { case model case messages @@ -64,13 +64,12 @@ public struct ChatQuery: Codable { case user } - public init(model: Model, messages: [Chat], temperature: Double? = nil, topP: Double? = nil, n: Int? = nil, stream: Bool? = nil, stop: [String]? = nil, maxTokens: Int? = nil, presencePenalty: Double? = nil, frequencyPenalty: Double? = nil, logitBias: [String : Int]? = nil, user: String? = nil) { + public init(model: Model, messages: [Chat], temperature: Double? = nil, topP: Double? = nil, n: Int? = nil, stop: [String]? = nil, maxTokens: Int? = nil, presencePenalty: Double? = nil, frequencyPenalty: Double? = nil, logitBias: [String : Int]? = nil, user: String? = nil) { self.model = model self.messages = messages self.temperature = temperature self.topP = topP self.n = n - self.stream = stream self.stop = stop self.maxTokens = maxTokens self.presencePenalty = presencePenalty diff --git a/Sources/OpenAI/Public/Models/ChatStreamResult.swift b/Sources/OpenAI/Public/Models/ChatStreamResult.swift new file mode 100644 index 00000000..1475cc71 --- /dev/null +++ b/Sources/OpenAI/Public/Models/ChatStreamResult.swift @@ -0,0 +1,50 @@ +// +// ChatStreamResult.swift +// +// +// Created by Sergii Kryvoblotskyi on 15/05/2023. +// + +import Foundation + +public struct ChatStreamResult: Codable, Equatable { + + public struct Choice: Codable, Equatable { + public struct Delta: Codable, Equatable { + public let content: String? + public let role: Chat.Role? + } + + public let index: Int + public let delta: Delta + public let finishReason: String? + + enum CodingKeys: String, CodingKey { + case index + case delta + case finishReason = "finish_reason" + } + } + + public let id: String + public let object: String + public let created: TimeInterval + public let model: Model + public let choices: [Choice] + + enum CodingKeys: String, CodingKey { + case id + case object + case created + case model + case choices + } + + init(id: String, object: String, created: TimeInterval, model: Model, choices: [Choice]) { + self.id = id + self.object = object + self.created = created + self.model = model + self.choices = choices + } +} diff --git a/Sources/OpenAI/Public/Models/CompletionsQuery.swift b/Sources/OpenAI/Public/Models/CompletionsQuery.swift index 76cd4eb3..783b9a4e 100644 --- a/Sources/OpenAI/Public/Models/CompletionsQuery.swift +++ b/Sources/OpenAI/Public/Models/CompletionsQuery.swift @@ -7,7 +7,7 @@ import Foundation -public struct CompletionsQuery: Codable { +public struct CompletionsQuery: Codable, Streamable { /// ID of the model to use. public let model: Model /// The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays. @@ -27,9 +27,12 @@ public struct CompletionsQuery: Codable { /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. public let user: String? + var stream: Bool = true + enum CodingKeys: String, CodingKey { case model case prompt + case stream case temperature case maxTokens = "max_tokens" case topP = "top_p" diff --git a/Sources/OpenAI/Public/Models/CompletionsResult.swift b/Sources/OpenAI/Public/Models/CompletionsResult.swift index 3f4f7a87..07720f85 100644 --- a/Sources/OpenAI/Public/Models/CompletionsResult.swift +++ b/Sources/OpenAI/Public/Models/CompletionsResult.swift @@ -24,6 +24,13 @@ public struct CompletionsResult: Codable, Equatable { public struct Choice: Codable, Equatable { public let text: String public let index: Int + public let finishReason: String? + + enum CodingKeys: String, CodingKey { + case text + case index + case finishReason = "finish_reason" + } } public let id: String @@ -31,5 +38,5 @@ public struct CompletionsResult: Codable, Equatable { public let created: TimeInterval public let model: Model public let choices: [Choice] - public let usage: Usage + public let usage: Usage? } diff --git a/Sources/OpenAI/Public/Models/StreamableQuery.swift b/Sources/OpenAI/Public/Models/StreamableQuery.swift new file mode 100644 index 00000000..1210432f --- /dev/null +++ b/Sources/OpenAI/Public/Models/StreamableQuery.swift @@ -0,0 +1,24 @@ +// +// File.swift +// +// +// Created by Sergii Kryvoblotskyi on 15/05/2023. +// + +import Foundation + +protocol Streamable { + + var stream: Bool { get set } + func makeStreamable() -> Self +} + +extension Streamable { + + func makeStreamable() -> Self { + var copy = self + copy.stream = true + return copy + } +} + diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 0dcb0148..c44ab397 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -26,6 +26,18 @@ public extension OpenAIProtocol { } } } + + func completionsStream( + query: CompletionsQuery + ) -> AsyncThrowingStream { + return AsyncThrowingStream { continuation in + return completionsStream(query: query) { result in + continuation.yield(with: result) + } completion: { error in + continuation.finish(throwing: error) + } + } + } func images( query: ImagesQuery @@ -74,9 +86,13 @@ public extension OpenAIProtocol { func chatsStream( query: ChatQuery - ) -> AsyncThrowingStream { + ) -> AsyncThrowingStream { return AsyncThrowingStream { continuation in - return chats(query: query) { continuation.yield(with: $0) } + return chatsStream(query: query) { result in + continuation.yield(with: result) + } completion: { error in + continuation.finish(throwing: error) + } } } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift index 1686d981..a20d9946 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift @@ -21,6 +21,20 @@ public extension OpenAIProtocol { } .eraseToAnyPublisher() } + + func completionsStream(query: CompletionsQuery) -> AnyPublisher, Error> { + let progress = PassthroughSubject, Error>() + completionsStream(query: query) { result in + progress.send(result) + } completion: { error in + if let error { + progress.send(completion: .failure(error)) + } else { + progress.send(completion: .finished) + } + } + return progress.eraseToAnyPublisher() + } func images(query: ImagesQuery) -> AnyPublisher { Future { @@ -43,6 +57,20 @@ public extension OpenAIProtocol { .eraseToAnyPublisher() } + func chatsStream(query: ChatQuery) -> AnyPublisher, Error> { + let progress = PassthroughSubject, Error>() + chatsStream(query: query) { result in + progress.send(result) + } completion: { error in + if let error { + progress.send(completion: .failure(error)) + } else { + progress.send(completion: .finished) + } + } + return progress.eraseToAnyPublisher() + } + func edits(query: EditsQuery) -> AnyPublisher { Future { edits(query: query, completion: $0) diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index ec3215d0..5020594c 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -25,6 +25,24 @@ public protocol OpenAIProtocol { - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed. **/ func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void) + + /** + This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI's language models, like the powerful GPT-3. The result is returned by chunks. + + Example: + ``` + let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?") + openAI.completions(query: query) { result in + //Handle result here + } + ``` + + - Parameters: + - query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings. + - onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed. + - completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured + **/ + func completionsStream(query: CompletionsQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) /** This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI's powerful deep learning models. @@ -77,6 +95,24 @@ public protocol OpenAIProtocol { **/ func chats(query: ChatQuery, completion: @escaping (Result) -> Void) + /** + This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI's powerful natural language models, like GPT-3. The result is returned by chunks. + + Example: + ``` + let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")]) + openAI.chats(query: query) { result in + //Handle response here + } + ``` + + - Parameters: + - query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings. + - onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed. + - completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured + **/ + func chatsStream(query: ChatQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) + /** This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given. From 8a8aafd4f0651da6a5cadc179570398a7dba2ec0 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 14:07:37 +0300 Subject: [PATCH 09/15] Fix demo project --- Demo/DemoChat/Sources/ChatStore.swift | 53 +++++++++------------------ 1 file changed, 17 insertions(+), 36 deletions(-) diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 470a23ce..6dd8c0ba 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -85,54 +85,35 @@ public final class ChatStore: ObservableObject { return } - let chatsStream = openAIClient.chatsStream( + let chatsStream: AsyncThrowingStream = openAIClient.chatsStream( query: ChatQuery( model: model, messages: conversation.messages.map { message in Chat(role: message.role, content: message.content) - }, - stream: true + } ) ) for try await partialChatResult in chatsStream { for choice in partialChatResult.choices { let existingMessages = conversations[conversationIndex].messages - - let message: Message - if let delta = choice.delta { - message = Message( - id: partialChatResult.id, - role: delta.role ?? .assistant, - content: delta.content ?? "", - createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created)) + let message = Message( + id: partialChatResult.id, + role: choice.delta.role ?? .assistant, + content: choice.delta.content ?? "", + createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created)) + ) + if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) { + // Meld into previous message + let previousMessage = existingMessages[existingMessageIndex] + let combinedMessage = Message( + id: message.id, // id stays the same for different deltas + role: message.role, + content: previousMessage.content + message.content, + createdAt: message.createdAt ) - if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) { - // Meld into previous message - let previousMessage = existingMessages[existingMessageIndex] - let combinedMessage = Message( - id: message.id, // id stays the same for different deltas - role: message.role, - content: previousMessage.content + message.content, - createdAt: message.createdAt - ) - conversations[conversationIndex].messages[existingMessageIndex] = combinedMessage - } else { - conversations[conversationIndex].messages.append(message) - } + conversations[conversationIndex].messages[existingMessageIndex] = combinedMessage } else { - let choiceMessage = choice.message! - - message = Message( - id: partialChatResult.id, - role: choiceMessage.role, - content: choiceMessage.content, - createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created)) - ) - - if existingMessages.contains(message) { - continue - } conversations[conversationIndex].messages.append(message) } } From e612b6bfc20dc7740d7ea14e91e4fe1272cf4391 Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 14:12:09 +0300 Subject: [PATCH 10/15] Fix demo project --- Demo/DemoChat/Sources/UI/ModerationChatView.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Demo/DemoChat/Sources/UI/ModerationChatView.swift b/Demo/DemoChat/Sources/UI/ModerationChatView.swift index 0bb5e542..41658845 100644 --- a/Demo/DemoChat/Sources/UI/ModerationChatView.swift +++ b/Demo/DemoChat/Sources/UI/ModerationChatView.swift @@ -21,7 +21,7 @@ public struct ModerationChatView: View { DetailView( conversation: store.moderationConversation, error: store.moderationConversationError, - sendMessage: { message in + sendMessage: { message, _ in Task { await store.sendModerationMessage( Message( From db71d795ad4c6bb0a8df8f41360ecc8b1921bf6f Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 14:20:29 +0300 Subject: [PATCH 11/15] Update documentation --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 23dfc1fe..236c9fbb 100644 --- a/README.md +++ b/README.md @@ -175,8 +175,6 @@ Using the OpenAI Chat API, you can build your own applications with `gpt-3.5-tur public let topP: Double? /// How many chat completion choices to generate for each input message. public let n: Int? - /// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only `server-sent events` as they become available, with the stream terminated by a data: [DONE] message. - public let stream: Bool? /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. public let stop: [String]? /// The maximum number of tokens to generate in the completion. From c82c55e8848ad1b879348a7b62d20740a359bf2c Mon Sep 17 00:00:00 2001 From: Sergii Kryvoblotskyi Date: Mon, 15 May 2023 14:30:04 +0300 Subject: [PATCH 12/15] Update README.md Update documentation on streaming --- README.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/README.md b/README.md index 236c9fbb..e9dadf91 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Usage](#usage) - [Initialization](#initialization) - [Completions](#completions) + - [Completions Streaming](#streaming) - [Chats](#chats) - [Images](#images) - [Audio](#audio) @@ -146,6 +147,43 @@ let result = try await openAI.completions(query: query) - index : 0 ``` +#### Completions Streaming + +Completions streaming is available by using `completionsStream` function. Token will be sent one-by one. + +**Closures** +```swift +openAI.completionsStream(query: query) { partialResult in + switch partialResult { + case .success(let result): + print(result.choices) + case .failure(let error): + //Handle chunk error here + } +} completion: { error in + //Handle streaming error here +} +``` + +**Combine** + +```swift +openAI + .completionsStream(query: .init(model: .textDavinci_003, prompt: "What is 42?")) + .sink { completion in + //Handle completion result here + } receiveValue: { result in + //Handle chunk here + }.store(in: &cancellables) +``` + +**Structured concurrency** +```swift +for try await result in openAI.completionsStream(query: .init(model: .textDavinci_003, prompt: "what is 42")) { + //Handle result here +} +``` + Review [Completions Documentation](https://platform.openai.com/docs/api-reference/completions) for more info. ### Chats From 025852470ae8e469e2308daa3af265b7aba91d18 Mon Sep 17 00:00:00 2001 From: Sergii Kryvoblotskyi Date: Mon, 15 May 2023 14:35:27 +0300 Subject: [PATCH 13/15] Update README.md Update documentation --- README.md | 46 ++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index e9dadf91..d130c5fe 100644 --- a/README.md +++ b/README.md @@ -16,8 +16,9 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Usage](#usage) - [Initialization](#initialization) - [Completions](#completions) - - [Completions Streaming](#streaming) + - [Completions Streaming](#completions-streaming) - [Chats](#chats) + - [Chats Streaming](#chats-streaming) - [Images](#images) - [Audio](#audio) - [Audio Transcriptions](#audio-transcriptions) @@ -149,7 +150,7 @@ let result = try await openAI.completions(query: query) #### Completions Streaming -Completions streaming is available by using `completionsStream` function. Token will be sent one-by one. +Completions streaming is available by using `completionsStream` function. Tokens will be sent one-by-one. **Closures** ```swift @@ -169,7 +170,7 @@ openAI.completionsStream(query: query) { partialResult in ```swift openAI - .completionsStream(query: .init(model: .textDavinci_003, prompt: "What is 42?")) + .completionsStream(query: query) .sink { completion in //Handle completion result here } receiveValue: { result in @@ -179,7 +180,7 @@ openAI **Structured concurrency** ```swift -for try await result in openAI.completionsStream(query: .init(model: .textDavinci_003, prompt: "what is 42")) { +for try await result in openAI.completionsStream(query: query) { //Handle result here } ``` @@ -280,6 +281,43 @@ let result = try await openAI.chats(query: query) - total_tokens : 49 ``` +#### Chats Streaming + +Chats streaming is available by using `chatStream` function. Tokens will be sent one-by-one. + +**Closures** +```swift +openAI.chatsStream(query: query) { partialResult in + switch partialResult { + case .success(let result): + print(result.choices) + case .failure(let error): + //Handle chunk error here + } +} completion: { error in + //Handle streaming error here +} +``` + +**Combine** + +```swift +openAI + .chatsStream(query: query) + .sink { completion in + //Handle completion result here + } receiveValue: { result in + //Handle chunk here + }.store(in: &cancellables) +``` + +**Structured concurrency** +```swift +for try await result in openAI.chatsStream(query: query) { + //Handle result here +} +``` + Review [Chat Documentation](https://platform.openai.com/docs/guides/chat) for more info. ### Images From bb9e8f9d8e3e27d867550a95f167bbf552584a4e Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 14:52:09 +0300 Subject: [PATCH 14/15] Remove old artifact --- Sources/OpenAI/Public/Models/ChatResult.swift | 5 +---- Tests/OpenAITests/OpenAITests.swift | 6 +++--- Tests/OpenAITests/OpenAITestsCombine.swift | 6 +++--- Tests/OpenAITests/OpenAITestsDecoder.swift | 2 +- 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index 7695ea6c..ce1a749e 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -17,16 +17,13 @@ public struct ChatResult: Codable, Equatable { public let index: Int /// Exists only if it is a complete message. - public let message: Chat? - /// Exists only if it is a partial message obtained throught a streaming query. - public let delta: Delta? + public let message: Chat /// Exists only if it is a complete message. public let finishReason: String? enum CodingKeys: String, CodingKey { case index case message - case delta case finishReason = "finish_reason" } } diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index 2c57f6d3..96ea7931 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -68,9 +68,9 @@ class OpenAITests: XCTestCase { .init(role: .user, content: "Who wrote Harry Potter?") ]) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), delta: nil, finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), delta: nil, finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), delta: nil, finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index 09225952..e2b58458 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -42,9 +42,9 @@ final class OpenAITestsCombine: XCTestCase { .init(role: .user, content: "Who wrote Harry Potter?") ]) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), delta: nil, finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), delta: nil, finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), delta: nil, finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) let result = try awaitPublisher(openAI.chats(query: query)) diff --git a/Tests/OpenAITests/OpenAITestsDecoder.swift b/Tests/OpenAITests/OpenAITestsDecoder.swift index 1e72d0ac..f2111532 100644 --- a/Tests/OpenAITests/OpenAITestsDecoder.swift +++ b/Tests/OpenAITests/OpenAITestsDecoder.swift @@ -98,7 +98,7 @@ class OpenAITestsDecoder: XCTestCase { """ let expectedValue = ChatResult(id: "chatcmpl-123", object: "chat.completion", created: 1677652288, model: .gpt4, choices: [ - .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), delta: nil, finishReason: "stop") + .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), finishReason: "stop") ], usage: .init(promptTokens: 9, completionTokens: 12, totalTokens: 21)) try decode(data, expectedValue) } From 6bb14561d422c49d42b974113f86ca960d77b4ce Mon Sep 17 00:00:00 2001 From: Serg Krivoblotsky Date: Mon, 15 May 2023 14:53:06 +0300 Subject: [PATCH 15/15] Remove more artifacts --- Sources/OpenAI/Public/Models/ChatResult.swift | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/ChatResult.swift index ce1a749e..f1a80a0c 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/ChatResult.swift @@ -10,11 +10,7 @@ import Foundation public struct ChatResult: Codable, Equatable { public struct Choice: Codable, Equatable { - public struct Delta: Codable, Equatable { - public let content: String? - public let role: Chat.Role? - } - + public let index: Int /// Exists only if it is a complete message. public let message: Chat