From 9b72e812cb1577b0a98a3c3249a0e2c5d0cc433d Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Fri, 17 Nov 2023 07:29:23 -0700 Subject: [PATCH 1/9] feat: Assistants API --- Demo/App/APIProvidedView.swift | 9 ++ Demo/App/ContentView.swift | 21 ++- Demo/Demo.xcodeproj/project.pbxproj | 8 +- Demo/DemoChat/Sources/AssistantStore.swift | 94 +++++++++++ Demo/DemoChat/Sources/ChatStore.swift | 153 +++++++++++++++--- Demo/DemoChat/Sources/Models/Assistant.swift | 32 ++++ .../Sources/Models/Conversation.swift | 11 +- .../UI/AssistantModalContentView.swift | 85 ++++++++++ .../Sources/UI/AssistantsListView.swift | 28 ++++ Demo/DemoChat/Sources/UI/AssistantsView.swift | 114 +++++++++++++ Demo/DemoChat/Sources/UI/ChatView.swift | 151 ++++++++++++----- Demo/DemoChat/Sources/UI/DetailView.swift | 68 +++++--- Demo/DemoChat/Sources/UI/DocumentPicker.swift | 42 +++++ Demo/DemoChat/Sources/UI/ListView.swift | 26 ++- .../Sources/UI/ModerationChatView.swift | 4 +- Sources/OpenAI/OpenAI.swift | 70 +++++++- Sources/OpenAI/Private/JSONRequest.swift | 3 + .../Public/Models/AssistantsQuery.swift | 56 +++++++ .../Public/Models/AssistantsResult.swift | 30 ++++ Sources/OpenAI/Public/Models/FilesQuery.swift | 42 +++++ .../OpenAI/Public/Models/FilesResult.swift | 14 ++ .../Public/Models/RunRetrieveQuery.swift | 15 ++ .../Public/Models/RunRetrieveResult.swift | 13 ++ Sources/OpenAI/Public/Models/RunsQuery.swift | 22 +++ Sources/OpenAI/Public/Models/RunsResult.swift | 13 ++ .../Public/Models/ThreadAddMessageQuery.swift | 24 +++ .../Models/ThreadAddMessagesResult.swift | 13 ++ .../Public/Models/ThreadsMessagesResult.swift | 53 ++++++ .../OpenAI/Public/Models/ThreadsQuery.swift | 20 +++ .../OpenAI/Public/Models/ThreadsResult.swift | 13 ++ .../Protocols/OpenAIProtocol+Async.swift | 111 +++++++++++++ .../Protocols/OpenAIProtocol+Combine.swift | 38 +++++ .../Public/Protocols/OpenAIProtocol.swift | 25 +++ Tests/OpenAITests/OpenAITests.swift | 106 +++++++++++- Tests/OpenAITests/OpenAITestsCombine.swift | 51 ++++++ 35 files changed, 1480 insertions(+), 98 deletions(-) create mode 100644 Demo/DemoChat/Sources/AssistantStore.swift create mode 100644 Demo/DemoChat/Sources/Models/Assistant.swift create mode 100644 Demo/DemoChat/Sources/UI/AssistantModalContentView.swift create mode 100644 Demo/DemoChat/Sources/UI/AssistantsListView.swift create mode 100644 Demo/DemoChat/Sources/UI/AssistantsView.swift create mode 100644 Demo/DemoChat/Sources/UI/DocumentPicker.swift create mode 100644 Sources/OpenAI/Public/Models/AssistantsQuery.swift create mode 100644 Sources/OpenAI/Public/Models/AssistantsResult.swift create mode 100644 Sources/OpenAI/Public/Models/FilesQuery.swift create mode 100644 Sources/OpenAI/Public/Models/FilesResult.swift create mode 100644 Sources/OpenAI/Public/Models/RunRetrieveQuery.swift create mode 100644 Sources/OpenAI/Public/Models/RunRetrieveResult.swift create mode 100644 Sources/OpenAI/Public/Models/RunsQuery.swift create mode 100644 Sources/OpenAI/Public/Models/RunsResult.swift create mode 100644 Sources/OpenAI/Public/Models/ThreadAddMessageQuery.swift create mode 100644 Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift create mode 100644 Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift create mode 100644 Sources/OpenAI/Public/Models/ThreadsQuery.swift create mode 100644 Sources/OpenAI/Public/Models/ThreadsResult.swift diff --git a/Demo/App/APIProvidedView.swift b/Demo/App/APIProvidedView.swift index 7dc61b5d..dd52cc2b 100644 --- a/Demo/App/APIProvidedView.swift +++ b/Demo/App/APIProvidedView.swift @@ -13,7 +13,9 @@ struct APIProvidedView: View { @Binding var apiKey: String @StateObject var chatStore: ChatStore @StateObject var imageStore: ImageStore + @StateObject var assistantStore: AssistantStore @StateObject var miscStore: MiscStore + @State var isShowingAPIConfigModal: Bool = true @Environment(\.idProviderValue) var idProvider @@ -35,6 +37,12 @@ struct APIProvidedView: View { openAIClient: OpenAI(apiToken: apiKey.wrappedValue) ) ) + self._assistantStore = StateObject( + wrappedValue: AssistantStore( + openAIClient: OpenAI(apiToken: apiKey.wrappedValue), + idProvider: idProvider + ) + ) self._miscStore = StateObject( wrappedValue: MiscStore( openAIClient: OpenAI(apiToken: apiKey.wrappedValue) @@ -46,6 +54,7 @@ struct APIProvidedView: View { ContentView( chatStore: chatStore, imageStore: imageStore, + assistantStore: assistantStore, miscStore: miscStore ) .onChange(of: apiKey) { newApiKey in diff --git a/Demo/App/ContentView.swift b/Demo/App/ContentView.swift index 2826e6bc..2029d6fb 100644 --- a/Demo/App/ContentView.swift +++ b/Demo/App/ContentView.swift @@ -12,20 +12,32 @@ import SwiftUI struct ContentView: View { @ObservedObject var chatStore: ChatStore @ObservedObject var imageStore: ImageStore + @ObservedObject var assistantStore: AssistantStore @ObservedObject var miscStore: MiscStore + @State private var selectedTab = 0 @Environment(\.idProviderValue) var idProvider var body: some View { TabView(selection: $selectedTab) { ChatView( - store: chatStore + store: chatStore, + assistantStore: assistantStore ) .tabItem { Label("Chats", systemImage: "message") } .tag(0) + AssistantsView( + store: chatStore, + assistantStore: assistantStore + ) + .tabItem { + Label("Assistants", systemImage: "eyeglasses") + } + .tag(1) + TranscribeView( ) .tabItem { @@ -52,13 +64,6 @@ struct ContentView: View { } } -struct ChatsView: View { - var body: some View { - Text("Chats") - .font(.largeTitle) - } -} - struct TranscribeView: View { var body: some View { Text("Transcribe: TBD") diff --git a/Demo/Demo.xcodeproj/project.pbxproj b/Demo/Demo.xcodeproj/project.pbxproj index 60e31dba..e622c48c 100644 --- a/Demo/Demo.xcodeproj/project.pbxproj +++ b/Demo/Demo.xcodeproj/project.pbxproj @@ -232,6 +232,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; @@ -284,6 +285,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; SWIFT_COMPILATION_MODE = wholemodule; @@ -299,6 +301,7 @@ CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 72WEN2C47N; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; @@ -311,7 +314,7 @@ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.4; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 13.3; @@ -336,6 +339,7 @@ CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 72WEN2C47N; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; @@ -348,7 +352,7 @@ "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.4; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 13.3; diff --git a/Demo/DemoChat/Sources/AssistantStore.swift b/Demo/DemoChat/Sources/AssistantStore.swift new file mode 100644 index 00000000..e41a55b3 --- /dev/null +++ b/Demo/DemoChat/Sources/AssistantStore.swift @@ -0,0 +1,94 @@ +// +// ChatStore.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Foundation +import Combine +import OpenAI + +public final class AssistantStore: ObservableObject { + public var openAIClient: OpenAIProtocol + let idProvider: () -> String + @Published var selectedAssistantId: String? + + @Published var availableAssistants: [Assistant] = [] + + public init( + openAIClient: OpenAIProtocol, + idProvider: @escaping () -> String + ) { + self.openAIClient = openAIClient + self.idProvider = idProvider + } + + // MARK: Models + + @MainActor + func createAssistant(name: String, description: String, instructions: String, codeInterpreter: Bool, retrievel: Bool, fileIds: [String]? = nil) async -> String? { + do { + var tools = [Tool]() + if codeInterpreter { + tools.append(Tool(toolType: "code_interpreter")) + } + if retrievel { + tools.append(Tool(toolType: "retrieval")) + } + + // TODO: Replace with actual gpt-4-1106-preview model. + let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: name, description: description, instructions: instructions, tools:tools, fileIds: fileIds) + let response = try await openAIClient.assistants(query: query, method: "POST") + + // Returns assistantId + return response.id + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return nil + } + + @MainActor + func getAssistants(limit: Int) async -> [Assistant] { + do { + let response = try await openAIClient.assistants(query: nil, method: "GET") + + var assistants = [Assistant]() + for result in response.data ?? [] { + let codeInterpreter = response.tools?.filter { $0.toolType == "code_interpreter" }.first != nil + let retrieval = response.tools?.filter { $0.toolType == "retrieval" }.first != nil + + assistants.append(Assistant(id: result.id, name: result.name, description: result.description, instructions: result.instructions, codeInterpreter: codeInterpreter, retrieval: retrieval)) + } + availableAssistants = assistants + return assistants + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return [] + } + + func selectAssistant(_ assistantId: String?) { + selectedAssistantId = assistantId + } + + @MainActor + func uploadFile(url: URL) async -> String? { + do { + let fileData = try Data(contentsOf: url) + + // TODO: Support all the same types as openAI (not just pdf). + let result = try await openAIClient.files(query: FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf")) + return result.id + } + catch { + print("error = \(error)") + return nil + } + } +} diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 51ee6b11..19a67a7b 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -8,6 +8,7 @@ import Foundation import Combine import OpenAI +import SwiftUI public final class ChatStore: ObservableObject { public var openAIClient: OpenAIProtocol @@ -17,6 +18,15 @@ public final class ChatStore: ObservableObject { @Published var conversationErrors: [Conversation.ID: Error] = [:] @Published var selectedConversationID: Conversation.ID? + // Used for assistants API state. + private var timer: Timer? + private var timeInterval: TimeInterval = 1.0 + private var currentRunId: String? + private var currentThreadId: String? + private var currentConversationId: String? + + @Published var isSendingMessage = false + var selectedConversation: Conversation? { selectedConversationID.flatMap { id in conversations.first { $0.id == id } @@ -39,19 +49,19 @@ public final class ChatStore: ObservableObject { } // MARK: - Events - func createConversation() { - let conversation = Conversation(id: idProvider(), messages: []) + func createConversation(type: ConversationType = .normal, assistantId: String? = nil) { + let conversation = Conversation(id: idProvider(), messages: [], type: type, assistantId: assistantId) conversations.append(conversation) } - + func selectConversation(_ conversationId: Conversation.ID?) { selectedConversationID = conversationId } - + func deleteConversation(_ conversationId: Conversation.ID) { conversations.removeAll(where: { $0.id == conversationId }) } - + @MainActor func sendMessage( _ message: Message, @@ -61,14 +71,59 @@ public final class ChatStore: ObservableObject { guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else { return } - conversations[conversationIndex].messages.append(message) - await completeChat( - conversationId: conversationId, - model: model - ) + switch conversations[conversationIndex].type { + case .normal: + conversations[conversationIndex].messages.append(message) + + await completeChat( + conversationId: conversationId, + model: model + ) + // For assistant case we send chats to thread and then poll, polling will receive sent chat + new assistant messages. + case .assistant: + + // First message in an assistant thread. + if conversations[conversationIndex].messages.count == 0 { + do { + let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) + let threadsResult = try await openAIClient.threads(query: threadsQuery) + + guard let currentAssistantId = conversations[conversationIndex].assistantId else { return print("No assistant selected.")} + + let runsQuery = RunsQuery(assistantId: currentAssistantId) + let runsResult = try await openAIClient.runs(threadId: threadsResult.id, query: runsQuery) + + // check in on the run every time the poller gets hit. + startPolling(conversationId: conversationId, runId: runsResult.id, threadId: threadsResult.id) + } + catch { + print("error: \(error) creating thread w/ message") + } + } + // Subsequent messages on the assistant thread. + else { + do { + guard let currentThreadId else { return print("No thread to add message to.")} + + let _ = try await openAIClient.threadsAddMessage(threadId: currentThreadId, + query: ThreadAddMessageQuery(role: message.role.rawValue, content: message.content)) + + guard let currentAssistantId = conversations[conversationIndex].assistantId else { return print("No assistant selected.")} + + let runsQuery = RunsQuery(assistantId: currentAssistantId) + let runsResult = try await openAIClient.runs(threadId: currentThreadId, query: runsQuery) + + // check in on the run every time the poller gets hit. + startPolling(conversationId: conversationId, runId: runsResult.id, threadId: currentThreadId) + } + catch { + print("error: \(error) adding to thread w/ message") + } + } + } } - + @MainActor func completeChat( conversationId: Conversation.ID, @@ -77,7 +132,7 @@ public final class ChatStore: ObservableObject { guard let conversation = conversations.first(where: { $0.id == conversationId }) else { return } - + conversationErrors[conversationId] = nil do { @@ -89,16 +144,16 @@ public final class ChatStore: ObservableObject { name: "getWeatherData", description: "Get the current weather in a given location", parameters: .init( - type: .object, - properties: [ - "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA") - ], - required: ["location"] + type: .object, + properties: [ + "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA") + ], + required: ["location"] ) ) let functions = [weatherFunction] - + let chatsStream: AsyncThrowingStream = openAIClient.chatsStream( query: ChatQuery( model: model, @@ -117,10 +172,10 @@ public final class ChatStore: ObservableObject { // Function calls are also streamed, so we need to accumulate. if let functionCallDelta = choice.delta.functionCall { if let nameDelta = functionCallDelta.name { - functionCallName += nameDelta + functionCallName += nameDelta } if let argumentsDelta = functionCallDelta.arguments { - functionCallArguments += argumentsDelta + functionCallArguments += argumentsDelta } } var messageText = choice.delta.content ?? "" @@ -153,4 +208,62 @@ public final class ChatStore: ObservableObject { conversationErrors[conversationId] = error } } + + // Polling + func startPolling(conversationId: Conversation.ID, runId: String, threadId: String) { + currentRunId = runId + currentThreadId = threadId + currentConversationId = conversationId + isSendingMessage = true + timer = Timer.scheduledTimer(withTimeInterval: timeInterval, repeats: true) { [weak self] _ in + DispatchQueue.main.async { + self?.timerFired() + } + } + } + + func stopPolling() { + isSendingMessage = false + timer?.invalidate() + timer = nil + } + + private func timerFired() { + guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { + return + } + + Task { + let result = try await openAIClient.runRetrieve(threadId: currentThreadId ?? "", runId: currentRunId ?? "") + + switch result.status { + // Get threadsMesages. + case "completed": + stopPolling() + + var before: String? + if let lastMessageId = self.conversations[conversationIndex].messages.last?.id { + before = lastMessageId + } + + let result = try await openAIClient.threadsMessages(threadId: currentThreadId ?? "", before: before) + + DispatchQueue.main.async { + for item in result.data.reversed() { + let role = item.role + for innerItem in item.content { + let message = Message(id: item.id, role: Chat.Role(rawValue: role) ?? .user, content: innerItem.text.value, createdAt: Date()) + self.conversations[conversationIndex].messages.append(message) + } + } + } + break + case "failed": + stopPolling() + break + default: + break + } + } + } } diff --git a/Demo/DemoChat/Sources/Models/Assistant.swift b/Demo/DemoChat/Sources/Models/Assistant.swift new file mode 100644 index 00000000..d41dfeb0 --- /dev/null +++ b/Demo/DemoChat/Sources/Models/Assistant.swift @@ -0,0 +1,32 @@ +// +// Conversation.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Foundation + +struct Assistant: Hashable { + init(id: String, name: String, description: String? = nil, instructions: String? = nil, codeInterpreter: Bool, retrieval: Bool) { + self.id = id + self.name = name + self.description = description + self.instructions = instructions + self.codeInterpreter = codeInterpreter + self.retrieval = retrieval + } + + typealias ID = String + + let id: String + let name: String + let description: String? + let instructions: String? + + var codeInterpreter: Bool + var retrieval: Bool +} + + +extension Assistant: Equatable, Identifiable {} diff --git a/Demo/DemoChat/Sources/Models/Conversation.swift b/Demo/DemoChat/Sources/Models/Conversation.swift index 7d6f82b8..b1c3ab71 100644 --- a/Demo/DemoChat/Sources/Models/Conversation.swift +++ b/Demo/DemoChat/Sources/Models/Conversation.swift @@ -8,15 +8,24 @@ import Foundation struct Conversation { - init(id: String, messages: [Message] = []) { + init(id: String, messages: [Message] = [], type: ConversationType = .normal, assistantId: String? = nil) { self.id = id self.messages = messages + self.type = type + self.assistantId = assistantId } typealias ID = String let id: String var messages: [Message] + var type: ConversationType + var assistantId: String? +} + +enum ConversationType { + case normal + case assistant } extension Conversation: Equatable, Identifiable {} diff --git a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift new file mode 100644 index 00000000..681559e9 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift @@ -0,0 +1,85 @@ +// +// AssistantModalContentView.swift +// +// +// Created by Chris Dillard on 11/9/23. +// + +import SwiftUI + +struct AssistantModalContentView: View { + @Binding var name: String + @Binding var description: String + @Binding var customInstructions: String + + @Binding var codeInterpreter: Bool + @Binding var retrieval: Bool + + var modify: Bool + + @Environment(\.dismiss) var dismiss + + @Binding var isPickerPresented: Bool + @Binding var selectedFileURL: URL? + + + var onCommit: () -> Void + var onFileUpload: () -> Void + + + var body: some View { + NavigationView { + Form { + Section(header: Text("Name")) { + TextField("Name", text: $name) + } + Section(header: Text("Description")) { + TextEditor(text: $description) + .frame(minHeight: 50) + } + Section(header: Text("Custom Instructions")) { + TextEditor(text: $customInstructions) + .frame(minHeight: 100) + } + + Toggle(isOn: $codeInterpreter, label: { + Text("Code interpreter") + }) + + Toggle(isOn: $retrieval, label: { + Text("Retrieval") + }) + if let selectedFileURL { + HStack { + Text("File: \(selectedFileURL.lastPathComponent)") + + Button("Remove") { + self.selectedFileURL = nil + } + } + } + else { + Button("Upload File") { + isPickerPresented = true + } + .sheet(isPresented: $isPickerPresented) { + DocumentPicker { url in + selectedFileURL = url + onFileUpload() + } + } + } + } + .navigationTitle("Enter Assistant Details") + .navigationBarItems( + leading: Button("Cancel") { + dismiss() + }, + trailing: Button("OK") { + onCommit() + dismiss() + } + ) + } + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantsListView.swift b/Demo/DemoChat/Sources/UI/AssistantsListView.swift new file mode 100644 index 00000000..143e9411 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantsListView.swift @@ -0,0 +1,28 @@ +// +// ListView.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import SwiftUI + +struct AssistantsListView: View { + @Binding var conversations: [Assistant] + @Binding var selectedAssistantId: String? + + var body: some View { + List( + $conversations, + editActions: [.delete], + selection: $selectedAssistantId + ) { $conversation in + Text( + conversation.name + ) + .lineLimit(2) + + } + .navigationTitle("Assistants") + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift new file mode 100644 index 00000000..c216ab2f --- /dev/null +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -0,0 +1,114 @@ +// +// ChatView.swift +// DemoChat +// +// Created by Sihao Lu on 3/25/23. +// + +import Combine +import SwiftUI + +public struct AssistantsView: View { + @ObservedObject var store: ChatStore + @ObservedObject var assistantStore: AssistantStore + + @Environment(\.dateProviderValue) var dateProvider + @Environment(\.idProviderValue) var idProvider + + // state to select file + @State private var isPickerPresented: Bool = false + @State private var fileURL: URL? + @State private var uploadedFileId: String? + + // state to modify assistant + @State private var name: String = "" + @State private var description: String = "" + @State private var customInstructions: String = "" + + @State private var codeInterpreter: Bool = false + @State private var retrieval: Bool = false + + public init(store: ChatStore, assistantStore: AssistantStore) { + self.store = store + self.assistantStore = assistantStore + } + + public var body: some View { + ZStack { + NavigationSplitView { + AssistantsListView( + conversations: $assistantStore.availableAssistants, selectedAssistantId: Binding( + get: { + assistantStore.selectedAssistantId + + }, set: { newId in + assistantStore.selectAssistant(newId) + + let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first + + name = selectedAssistant?.name ?? "" + description = selectedAssistant?.description ?? "" + customInstructions = selectedAssistant?.instructions ?? "" + codeInterpreter = selectedAssistant?.codeInterpreter ?? false + retrieval = selectedAssistant?.retrieval ?? false + + + }) + ) + .toolbar { + ToolbarItem( + placement: .primaryAction + ) { + Menu { + Button("Get Assistants") { + Task { + let _ = await assistantStore.getAssistants(limit: 20) + } + } + } label: { + Image(systemName: "plus") + } + + .buttonStyle(.borderedProminent) + } + } + } detail: { + // TODO: Allow modifying Assistant. + if let selectedAssistantId = assistantStore.selectedAssistantId { + + + AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, + codeInterpreter: $codeInterpreter, retrieval: $retrieval, modify: true, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { + Task { + await handleOKTap() + } + } onFileUpload: { + Task { + guard let fileURL else { return } + + uploadedFileId = await assistantStore.uploadFile(url: fileURL) + } + } + } + } + } + } + + func handleOKTap() async { + + // When OK is tapped that means we should save the modified assistant and start a new thread. + var fileIds = [String]() + if let fileId = uploadedFileId { + fileIds.append(fileId) + } + + let asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) + + guard let asstId else { + print("failed to create Assistant.") + return + } + + store.createConversation(type: .assistant, assistantId: asstId) + } +} diff --git a/Demo/DemoChat/Sources/UI/ChatView.swift b/Demo/DemoChat/Sources/UI/ChatView.swift index 1b872c21..eac440c9 100644 --- a/Demo/DemoChat/Sources/UI/ChatView.swift +++ b/Demo/DemoChat/Sources/UI/ChatView.swift @@ -10,58 +10,129 @@ import SwiftUI public struct ChatView: View { @ObservedObject var store: ChatStore - + @ObservedObject var assistantStore: AssistantStore + @Environment(\.dateProviderValue) var dateProvider @Environment(\.idProviderValue) var idProvider - public init(store: ChatStore) { + + @State private var isModalPresented = false + @State private var name: String = "" + @State private var description: String = "" + @State private var customInstructions: String = "" + + @State private var codeInterpreter: Bool = false + @State private var retrieval: Bool = false + + + @State private var isPickerPresented: Bool = false + @State private var fileURL: URL? + @State private var uploadedFileId: String? + + + public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store + self.assistantStore = assistantStore } - + public var body: some View { - NavigationSplitView { - ListView( - conversations: $store.conversations, - selectedConversationId: Binding( - get: { - store.selectedConversationID - }, set: { newId in - store.selectConversation(newId) - }) - ) - .toolbar { - ToolbarItem( - placement: .primaryAction - ) { - Button(action: { - store.createConversation() - }) { - Image(systemName: "plus") + ZStack { + NavigationSplitView { + ListView( + conversations: $store.conversations, + selectedConversationId: Binding( + get: { + store.selectedConversationID + }, set: { newId in + store.selectConversation(newId) + }) + ) + .toolbar { + ToolbarItem( + placement: .primaryAction + ) { + + Menu { + Button("Create Chat") { + store.createConversation() + + } + Button("Create Assistant") { + isModalPresented = true + + } + } label: { + Image(systemName: "plus") + } + + .buttonStyle(.borderedProminent) } - .buttonStyle(.borderedProminent) + } + } detail: { + if let conversation = store.selectedConversation { + DetailView( + availableAssistants: assistantStore.availableAssistants, conversation: conversation, + error: store.conversationErrors[conversation.id], + sendMessage: { message, selectedModel in + Task { + await store.sendMessage( + Message( + id: idProvider(), + role: .user, + content: message, + createdAt: dateProvider() + ), + conversationId: conversation.id, + model: selectedModel + ) + } + }, isSendingMessage: $store.isSendingMessage + ) } } - } detail: { - if let conversation = store.selectedConversation { - DetailView( - conversation: conversation, - error: store.conversationErrors[conversation.id], - sendMessage: { message, selectedModel in - Task { - await store.sendMessage( - Message( - id: idProvider(), - role: .user, - content: message, - createdAt: dateProvider() - ), - conversationId: conversation.id, - model: selectedModel - ) + .sheet(isPresented: $isModalPresented) { + AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, + codeInterpreter: $codeInterpreter, retrieval: $retrieval, modify: false, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { + Task { + await handleOKTap() + } + } onFileUpload: { + Task { + guard let fileURL else { return } + + uploadedFileId = await assistantStore.uploadFile(url: fileURL) + if uploadedFileId == nil { + print("Failed to upload") } } - ) + } } } } + func handleOKTap() async { + + // Reset state for Assistant creator. + name = "" + description = "" + customInstructions = "" + + codeInterpreter = false + retrieval = false + fileURL = nil + uploadedFileId = nil + + var fileIds = [String]() + if let fileId = uploadedFileId { + fileIds.append(fileId) + } + + let asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) + + guard let asstId else { + print("failed to create Assistant.") + return + } + + store.createConversation(type: .assistant, assistantId: asstId) + } } diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index 9e2a07e9..21f15c81 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -18,6 +18,7 @@ struct DetailView: View { @FocusState private var isFocused: Bool @State private var showsModelSelectionSheet = false @State private var selectedChatModel: Model = .gpt4_0613 + var availableAssistants: [Assistant] private let availableChatModels: [Model] = [.gpt3_5Turbo0613, .gpt4_0613] @@ -25,6 +26,8 @@ struct DetailView: View { let error: Error? let sendMessage: (String, Model) -> Void + @Binding var isSendingMessage: Bool + private var fillColor: Color { #if os(iOS) return Color(uiColor: UIColor.systemBackground) @@ -65,11 +68,12 @@ struct DetailView: View { inputBar(scrollViewProxy: scrollViewProxy) } - .navigationTitle("Chat") + .navigationTitle(conversation.type == .assistant ? "Assistant: \(currentAssistantName())" : "Chat") .safeAreaInset(edge: .top) { HStack { + // TODO: Replace with actual gpt-4-1106-preview model. Text( - "Model: \(selectedChatModel)" + "Model: \(conversation.type == .assistant ? "gpt-4-1106-preview" : selectedChatModel)" ) .font(.caption) .foregroundColor(.secondary) @@ -79,11 +83,28 @@ struct DetailView: View { .padding(.vertical, 8) } .toolbar { - ToolbarItem(placement: .navigationBarTrailing) { - Button(action: { - showsModelSelectionSheet.toggle() - }) { - Image(systemName: "cpu") + if conversation.type == .assistant { + ToolbarItem(placement: .navigationBarTrailing) { + + Menu { + ForEach(availableAssistants, id: \.self) { item in + Button(item.name) { + print("Select assistant") + //selectedItem = item + } + } + } label: { + Image(systemName: "eyeglasses") + } + } + } + if conversation.type == .normal { + ToolbarItem(placement: .navigationBarTrailing) { + Button(action: { + showsModelSelectionSheet.toggle() + }) { + Image(systemName: "cpu") + } } } } @@ -165,18 +186,24 @@ struct DetailView: View { } .padding(.leading) - Button(action: { - withAnimation { - tapSendMessage(scrollViewProxy: scrollViewProxy) + if isSendingMessage { + ProgressView() + .progressViewStyle(CircularProgressViewStyle()) + .padding(.trailing) + } else { + Button(action: { + withAnimation { + tapSendMessage(scrollViewProxy: scrollViewProxy) + } + }) { + Image(systemName: "paperplane") + .resizable() + .aspectRatio(contentMode: .fit) + .frame(width: 24, height: 24) + .padding(.trailing) } - }) { - Image(systemName: "paperplane") - .resizable() - .aspectRatio(contentMode: .fit) - .frame(width: 24, height: 24) - .padding(.trailing) + .disabled(inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) } - .disabled(inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) } .padding(.bottom) } @@ -196,6 +223,10 @@ struct DetailView: View { // scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) // } } + + func currentAssistantName() -> String { + availableAssistants.filter { conversation.assistantId == $0.id }.first?.name ?? "" + } } struct ChatBubble: View { @@ -261,6 +292,7 @@ struct ChatBubble: View { struct DetailView_Previews: PreviewProvider { static var previews: some View { DetailView( + availableAssistants: [], conversation: Conversation( id: "1", messages: [ @@ -277,7 +309,7 @@ struct DetailView_Previews: PreviewProvider { ] ), error: nil, - sendMessage: { _, _ in } + sendMessage: { _, _ in }, isSendingMessage: Binding.constant(false) ) } } diff --git a/Demo/DemoChat/Sources/UI/DocumentPicker.swift b/Demo/DemoChat/Sources/UI/DocumentPicker.swift new file mode 100644 index 00000000..770ea0c3 --- /dev/null +++ b/Demo/DemoChat/Sources/UI/DocumentPicker.swift @@ -0,0 +1,42 @@ +// +// DocumentPicker.swift +// +// +// Created by Chris Dillard on 11/10/23. +// + +import SwiftUI +import UniformTypeIdentifiers + +struct DocumentPicker: UIViewControllerRepresentable { + var callback: (URL) -> Void + + func makeUIViewController(context: Context) -> UIDocumentPickerViewController { + // TODO: Support all the same file types as openAI. + let supportedTypes: [UTType] = [UTType.pdf] + let pickerViewController = UIDocumentPickerViewController(forOpeningContentTypes: supportedTypes, asCopy: true) + pickerViewController.allowsMultipleSelection = false + pickerViewController.shouldShowFileExtensions = true + pickerViewController.delegate = context.coordinator + return pickerViewController + } + + func updateUIViewController(_ uiViewController: UIDocumentPickerViewController, context: Context) {} + + func makeCoordinator() -> Coordinator { + return Coordinator(self) + } + + class Coordinator: NSObject, UIDocumentPickerDelegate { + var parent: DocumentPicker + + init(_ parent: DocumentPicker) { + self.parent = parent + } + + func documentPicker(_ controller: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) { + guard let url = urls.first else { return } + parent.callback(url) + } + } +} diff --git a/Demo/DemoChat/Sources/UI/ListView.swift b/Demo/DemoChat/Sources/UI/ListView.swift index bfbdfc56..d8be5585 100644 --- a/Demo/DemoChat/Sources/UI/ListView.swift +++ b/Demo/DemoChat/Sources/UI/ListView.swift @@ -17,10 +17,28 @@ struct ListView: View { editActions: [.delete], selection: $selectedConversationId ) { $conversation in - Text( - conversation.messages.last?.content ?? "New Conversation" - ) - .lineLimit(2) + if let convoContent = conversation.messages.last?.content { + Text( + convoContent + ) + .lineLimit(2) + } + else { + if conversation.type == .assistant { + Text( + "New Assistant" + ) + .lineLimit(2) + } + else { + Text( + "New Conversation" + ) + .lineLimit(2) + } + } + + } .navigationTitle("Conversations") } diff --git a/Demo/DemoChat/Sources/UI/ModerationChatView.swift b/Demo/DemoChat/Sources/UI/ModerationChatView.swift index 41658845..ec66425e 100644 --- a/Demo/DemoChat/Sources/UI/ModerationChatView.swift +++ b/Demo/DemoChat/Sources/UI/ModerationChatView.swift @@ -19,7 +19,7 @@ public struct ModerationChatView: View { public var body: some View { DetailView( - conversation: store.moderationConversation, + availableAssistants: [], conversation: store.moderationConversation, error: store.moderationConversationError, sendMessage: { message, _ in Task { @@ -32,7 +32,7 @@ public struct ModerationChatView: View { ) ) } - } + }, isSendingMessage: Binding.constant(false) ) } } diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index 720f0f23..b3963ff9 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -55,7 +55,38 @@ final public class OpenAI: OpenAIProtocol { public convenience init(configuration: Configuration, session: URLSession = URLSession.shared) { self.init(configuration: configuration, session: session as URLSessionProtocol) } - + + // UPDATES FROM 11-06-23 + public func threadsAddMessage(threadId: String, query: ThreadAddMessageQuery, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildRunsURL(path: .threadsMessages, threadId: threadId)), completion: completion) + } + + public func threadsMessages(threadId: String, before: String?, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: nil, url: buildRunsURL(path: .threadsMessages, threadId: threadId, before: before), method: "GET"), completion: completion) + } + + public func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieve, threadId: threadId, runId: runId), method: "GET"), completion: completion) + } + + public func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildRunsURL(path: .runs, threadId: threadId)), completion: completion) + } + + public func threads(query: ThreadsQuery, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildURL(path: .threads)), completion: completion) + } + + public func assistants(query: AssistantsQuery?, method: String, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildURL(path: .assistants), method: method), completion: completion) + } + + public func files(query: FilesQuery, completion: @escaping (Result) -> Void) { + performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .files)), completion: completion) + } + + // END UPDATES FROM 11-06-23 + public func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void) { performRequest(request: JSONRequest(body: query, url: buildURL(path: .completions)), completion: completion) } @@ -130,6 +161,9 @@ extension OpenAI { var apiError: Error? = nil do { + + let errorText = String(data: data, encoding: .utf8) + let decoded = try JSONDecoder().decode(ResultType.self, from: data) completion(.success(decoded)) } catch { @@ -159,6 +193,7 @@ extension OpenAI { onResult(.success(object)) } session.onProcessingError = {_, error in + print("OpenAI API error = \(error.localizedDescription)") onResult(.failure(error)) } session.onComplete = { [weak self] object, error in @@ -182,11 +217,42 @@ extension OpenAI { components.path = path return components.url! } + + func buildRunsURL(path: String, threadId: String, before: String? = nil) -> URL { + var components = URLComponents() + components.scheme = "https" + components.host = configuration.host + components.path = path.replacingOccurrences(of: "THREAD_ID", with: threadId) + if let before { + components.queryItems = [URLQueryItem(name: "before", value: before)] + } + return components.url! + } + + func buildRunRetrieveURL(path: String, threadId: String, runId: String) -> URL { + var components = URLComponents() + components.scheme = "https" + components.host = configuration.host + components.path = path.replacingOccurrences(of: "THREAD_ID", with: threadId) + .replacingOccurrences(of: "RUN_ID", with: runId) + return components.url! + } } typealias APIPath = String extension APIPath { - + // 1106 + static let assistants = "/v1/assistants" + // TODO: Implement Assistant Modify + static let assistantsModify = "/v1/assistants/ASST_ID" + + static let threads = "/v1/threads" + static let runs = "/v1/threads/THREAD_ID/runs" + static let runRetrieve = "/v1/threads/THREAD_ID/runs/RUN_ID" + static let threadsMessages = "/v1/threads/THREAD_ID/messages" + static let files = "/v1/files" + // 1106 end + static let completions = "/v1/completions" static let embeddings = "/v1/embeddings" static let chats = "/v1/chat/completions" diff --git a/Sources/OpenAI/Private/JSONRequest.swift b/Sources/OpenAI/Private/JSONRequest.swift index 526f95c9..afacfc0f 100644 --- a/Sources/OpenAI/Private/JSONRequest.swift +++ b/Sources/OpenAI/Private/JSONRequest.swift @@ -29,6 +29,9 @@ extension JSONRequest: URLRequestBuildable { var request = URLRequest(url: url, timeoutInterval: timeoutInterval) request.setValue("application/json", forHTTPHeaderField: "Content-Type") request.setValue("Bearer \(token)", forHTTPHeaderField: "Authorization") + // TODO: ONLY PASS IF ASSISTANTS API + request.setValue("assistants=v1", forHTTPHeaderField: "OpenAI-Beta") + if let organizationIdentifier { request.setValue(organizationIdentifier, forHTTPHeaderField: "OpenAI-Organization") } diff --git a/Sources/OpenAI/Public/Models/AssistantsQuery.swift b/Sources/OpenAI/Public/Models/AssistantsQuery.swift new file mode 100644 index 00000000..64c22cfe --- /dev/null +++ b/Sources/OpenAI/Public/Models/AssistantsQuery.swift @@ -0,0 +1,56 @@ +// +// AssistantsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct AssistantsQuery: Codable { + + public let model: Model + + public let name: String + + public let description: String + + public let instructions: String + + public let tools: [Tool]? + + public let fileIds: [String]? + + enum CodingKeys: String, CodingKey { + case model + case name + case description + case instructions + case tools + case fileIds = "file_ids" + } + + public init(model: Model, name: String, description: String, instructions: String, tools: [Tool], fileIds: [String]? = nil) { + self.model = model + self.name = name + + self.description = description + self.instructions = instructions + + self.tools = tools + self.fileIds = fileIds + } +} + +public struct Tool: Codable, Equatable { + public let toolType: String + + enum CodingKeys: String, CodingKey { + case toolType = "type" + } + + public init(toolType: String) { + self.toolType = toolType + } + +} diff --git a/Sources/OpenAI/Public/Models/AssistantsResult.swift b/Sources/OpenAI/Public/Models/AssistantsResult.swift new file mode 100644 index 00000000..58644e74 --- /dev/null +++ b/Sources/OpenAI/Public/Models/AssistantsResult.swift @@ -0,0 +1,30 @@ +// +// AssistantsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct AssistantsResult: Codable, Equatable { + + public let id: String? + + public let data: [AssistantContent]? + public let tools: [Tool]? + + enum CodingKeys: String, CodingKey { + case data + case id + case tools + } + + public struct AssistantContent: Codable, Equatable { + + public let id: String + public let name: String + public let description: String? + public let instructions: String? + } +} diff --git a/Sources/OpenAI/Public/Models/FilesQuery.swift b/Sources/OpenAI/Public/Models/FilesQuery.swift new file mode 100644 index 00000000..20ccfeb1 --- /dev/null +++ b/Sources/OpenAI/Public/Models/FilesQuery.swift @@ -0,0 +1,42 @@ +// +// AssistantsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct FilesQuery: Codable { + + public let purpose: String + + public let file: Data + public let fileName: String + + public let contentType: String + + enum CodingKeys: String, CodingKey { + case purpose + case file + case fileName + case contentType + } + + public init(purpose: String, file: Data, fileName: String, contentType: String) { + self.purpose = purpose + self.file = file + self.fileName = fileName + self.contentType = contentType + } +} + +extension FilesQuery: MultipartFormDataBodyEncodable { + func encode(boundary: String) -> Data { + let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [ + .string(paramName: "purpose", value: purpose), + .file(paramName: "file", fileName: fileName, fileData: file, contentType: contentType), + ]) + return bodyBuilder.build() + } +} diff --git a/Sources/OpenAI/Public/Models/FilesResult.swift b/Sources/OpenAI/Public/Models/FilesResult.swift new file mode 100644 index 00000000..3063c0fe --- /dev/null +++ b/Sources/OpenAI/Public/Models/FilesResult.swift @@ -0,0 +1,14 @@ +// +// FilesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct FilesResult: Codable, Equatable { + + public let id: String + +} diff --git a/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift b/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift new file mode 100644 index 00000000..eef7e5d0 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunRetrieveQuery.swift @@ -0,0 +1,15 @@ +// +// RunRetrieveQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunRetrieveQuery: Equatable, Codable { + + public init() { + + } +} diff --git a/Sources/OpenAI/Public/Models/RunRetrieveResult.swift b/Sources/OpenAI/Public/Models/RunRetrieveResult.swift new file mode 100644 index 00000000..5e377b9a --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunRetrieveResult.swift @@ -0,0 +1,13 @@ +// +// RunsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunRetreiveResult: Codable, Equatable { + + public let status: String +} diff --git a/Sources/OpenAI/Public/Models/RunsQuery.swift b/Sources/OpenAI/Public/Models/RunsQuery.swift new file mode 100644 index 00000000..b9ba2ad6 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunsQuery.swift @@ -0,0 +1,22 @@ +// +// AssistantsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunsQuery: Codable { + + public let assistantId: String + + enum CodingKeys: String, CodingKey { + case assistantId = "assistant_id" + } + + public init(assistantId: String) { + + self.assistantId = assistantId + } +} diff --git a/Sources/OpenAI/Public/Models/RunsResult.swift b/Sources/OpenAI/Public/Models/RunsResult.swift new file mode 100644 index 00000000..858f15f5 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunsResult.swift @@ -0,0 +1,13 @@ +// +// RunsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunsResult: Codable, Equatable { + + public let id: String +} diff --git a/Sources/OpenAI/Public/Models/ThreadAddMessageQuery.swift b/Sources/OpenAI/Public/Models/ThreadAddMessageQuery.swift new file mode 100644 index 00000000..153851e2 --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadAddMessageQuery.swift @@ -0,0 +1,24 @@ +// +// ThreadAddMessageQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadAddMessageQuery: Equatable, Codable { + public let role: String + public let content: String + + enum CodingKeys: String, CodingKey { + case role + case content + + } + + public init(role: String, content: String) { + self.role = role + self.content = content + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift b/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift new file mode 100644 index 00000000..f39736ef --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadAddMessagesResult.swift @@ -0,0 +1,13 @@ +// +// ThreadsMessagesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadAddMessageResult: Codable, Equatable { + public let id: String + +} diff --git a/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift new file mode 100644 index 00000000..c8c58c5d --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift @@ -0,0 +1,53 @@ +// +// ThreadsMessagesResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsMessagesResult: Codable, Equatable { + + public struct ThreadsMessage: Codable, Equatable { + + public struct ThreadsMessageContent: Codable, Equatable { + + public struct ThreadsMessageContentText: Codable, Equatable { + + public let value: String + + enum CodingKeys: String, CodingKey { + case value + } + } + + public let type: String + public let text: ThreadsMessageContentText + + enum CodingKeys: String, CodingKey { + case type + case text + } + } + public let id: String + + public let role: String + + public let content: [ThreadsMessageContent] + + enum CodingKeys: String, CodingKey { + case id + case content + case role + } + } + + + public let data: [ThreadsMessage] + + enum CodingKeys: String, CodingKey { + case data + } + +} diff --git a/Sources/OpenAI/Public/Models/ThreadsQuery.swift b/Sources/OpenAI/Public/Models/ThreadsQuery.swift new file mode 100644 index 00000000..1f27849f --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsQuery.swift @@ -0,0 +1,20 @@ +// +// ThreadsQuery.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsQuery: Equatable, Codable { + public let messages: [Chat] + + enum CodingKeys: String, CodingKey { + case messages + } + + public init(messages: [Chat]) { + self.messages = messages + } +} diff --git a/Sources/OpenAI/Public/Models/ThreadsResult.swift b/Sources/OpenAI/Public/Models/ThreadsResult.swift new file mode 100644 index 00000000..def6031c --- /dev/null +++ b/Sources/OpenAI/Public/Models/ThreadsResult.swift @@ -0,0 +1,13 @@ +// +// AssistantsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct ThreadsResult: Codable, Equatable { + + public let id: String +} diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 909704ca..2f28282f 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -213,4 +213,115 @@ public extension OpenAIProtocol { } } } + + // 1106 + func assistants( + query: AssistantsQuery?, + method: String + ) async throws -> AssistantsResult { + try await withCheckedThrowingContinuation { continuation in + assistants(query: query, method: method) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threads( + query: ThreadsQuery + ) async throws -> ThreadsResult { + try await withCheckedThrowingContinuation { continuation in + threads(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runs( + threadId: String, + query: RunsQuery + ) async throws -> RunsResult { + try await withCheckedThrowingContinuation { continuation in + runs(threadId: threadId, query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func runRetrieve( + threadId: String, + runId: String + ) async throws -> RunRetreiveResult { + try await withCheckedThrowingContinuation { continuation in + runRetrieve(threadId: threadId, runId: runId) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threadsMessages( + threadId: String, + before: String? + ) async throws -> ThreadsMessagesResult { + try await withCheckedThrowingContinuation { continuation in + threadsMessages(threadId: threadId, before: before) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func threadsAddMessage( + threadId: String, + query: ThreadAddMessageQuery + ) async throws -> ThreadAddMessageResult { + try await withCheckedThrowingContinuation { continuation in + threadsAddMessage(threadId: threadId, query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + func files( + query: FilesQuery + ) async throws -> FilesResult { + try await withCheckedThrowingContinuation { continuation in + files(query: query) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + // 1106 end } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift index da8b7dfb..45add1c7 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift @@ -126,6 +126,44 @@ public extension OpenAIProtocol { } .eraseToAnyPublisher() } + + // 1106 + func assistants(query: AssistantsQuery?, method: String) -> AnyPublisher { + Future { + assistants(query: query, method: method, completion: $0) + } + .eraseToAnyPublisher() + } + + func threads(query: ThreadsQuery) -> AnyPublisher { + Future { + threads(query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func runs(threadId: String, query: RunsQuery) -> AnyPublisher { + Future { + runs(threadId: threadId, query: query, completion: $0) + } + .eraseToAnyPublisher() + } + + func runRetrieve(threadId: String, runId: String) -> AnyPublisher { + Future { + + runRetrieve(threadId: threadId, runId: runId, completion: $0) + } + .eraseToAnyPublisher() + } + + func threadsMessages(threadId: String, before: String?) -> AnyPublisher { + Future { + threadsMessages(threadId: threadId, before: before, completion: $0) + } + .eraseToAnyPublisher() + } + // 1106 end } #endif diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index 6519e8fe..faed65b6 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -230,4 +230,29 @@ public protocol OpenAIProtocol { Returns a `Result` of type `AudioTranslationResult` if successful, or an `Error` if an error occurs. **/ func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result) -> Void) + + //1106 + + // TODO: Assistant Docs + func assistants(query: AssistantsQuery?, method: String, completion: @escaping (Result) -> Void) + + // TODO: Threads Docs + func threads(query: ThreadsQuery, completion: @escaping (Result) -> Void) + + // TODO: Runs Docs + func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) + + // TODO: Runs Retrieve Docs + func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) + + // TODO: Threads Messages Docs + func threadsMessages(threadId: String, before: String?, completion: @escaping (Result) -> Void) + + // TODO: Threads Add Message Docs + func threadsAddMessage(threadId: String, query: ThreadAddMessageQuery, completion: @escaping (Result) -> Void) + + // TODO: Files Docs + func files(query: FilesQuery, completion: @escaping (Result) -> Void) + + // 1106 end } diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index f195317e..8097af58 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -33,7 +33,7 @@ class OpenAITests: XCTestCase { let result = try await openAI.completions(query: query) XCTAssertEqual(result, expectedResult) } - + func testCompletionsAPIError() async throws { let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"]) let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") @@ -348,6 +348,110 @@ class OpenAITests: XCTestCase { let completionsURL = openAI.buildURL(path: .completions) XCTAssertEqual(completionsURL, URL(string: "https://my.host.com/v1/completions")) } + + // 1106 + func testAssistantQuery() async throws { + let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.")], tools: []) + try self.stub(result: expectedResult) + + let result = try await openAI.assistants(query: query, method: "POST") + XCTAssertEqual(result, expectedResult) + } + + func testAssistantQueryError() async throws { + let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.assistants(query: query, method: "POST") } + XCTAssertEqual(inError, apiError) + } + + func testThreadsQuery() async throws { + let query = ThreadsQuery(messages: [Chat(role: .user, content: "Hello, What is AI?")]) + let expectedResult = ThreadsResult(id: "thread_1234") + try self.stub(result: expectedResult) + + let result = try await openAI.threads(query: query) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsQueryError() async throws { + let query = ThreadsQuery(messages: [Chat(role: .user, content: "Hello, What is AI?")]) + + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threads(query: query) } + XCTAssertEqual(inError, apiError) + } + + func testRunsQuery() async throws { + let query = RunsQuery(assistantId: "asst_7654321") + let expectedResult = RunsResult(id: "run_1234") + try self.stub(result: expectedResult) + + let result = try await openAI.runs(threadId: "thread_1234", query: query) + XCTAssertEqual(result, expectedResult) + } + + func testRunsQueryError() async throws { + let query = RunsQuery(assistantId: "asst_7654321") + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runs(threadId: "thread_1234", query: query) } + XCTAssertEqual(inError, apiError) + } + + func testRunRetrieveQuery() async throws { + let expectedResult = RunRetreiveResult(status: "in_progress") + try self.stub(result: expectedResult) + + let result = try await openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234") + XCTAssertEqual(result, expectedResult) + } + + func testRunRetrieveQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234") } + XCTAssertEqual(inError, apiError) + } + + func testThreadsMessageQuery() async throws { + let expectedResult = ThreadsMessagesResult(data: [ThreadsMessagesResult.ThreadsMessage(id: "thread_1234", role: Chat.Role.user.rawValue, content: [ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent(type: "text", text: ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent.ThreadsMessageContentText(value: "Hello, What is AI?"))])]) + try self.stub(result: expectedResult) + + let result = try await openAI.threadsMessages(threadId: "thread_1234", before: nil) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsMessageQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.threadsMessages(threadId: "thread_1234", before: nil) } + XCTAssertEqual(inError, apiError) + } + + func testCustomRunsURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunsURL(path: .runs, threadId: "thread_4321") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com/v1/threads/thread_4321/runs")) + } + + func testCustomRunsRetrieveURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunRetrieveURL(path: .runRetrieve, threadId: "thread_4321", runId: "run_1234") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com/v1/threads/thread_4321/runs/run_1234")) + } + // 1106 end } @available(tvOS 13.0, *) diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index e2b58458..66aea66b 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -123,6 +123,57 @@ final class OpenAITestsCombine: XCTestCase { let result = try awaitPublisher(openAI.audioTranslations(query: query)) XCTAssertEqual(result, transcriptionResult) } + + // 1106 + func testAssistantQuery() throws { + let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.")], tools: []) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.assistants(query: query, method: "POST")) + XCTAssertEqual(result, expectedResult) + } + + func testThreadsQuery() throws { + let query = ThreadsQuery(messages: [Chat(role: .user, content: "Hello, What is AI?")]) + let expectedResult = ThreadsResult(id: "thread_1234") + + try self.stub(result: expectedResult) + let result = try awaitPublisher(openAI.threads(query: query)) + + XCTAssertEqual(result, expectedResult) + } + + func testRunsQuery() throws { + let query = RunsQuery(assistantId: "asst_7654321") + let expectedResult = RunsResult(id: "run_1234") + + try self.stub(result: expectedResult) + let result = try awaitPublisher(openAI.runs(threadId: "thread_1234", query: query)) + + XCTAssertEqual(result, expectedResult) + } + + func testRunRetrieveQuery() throws { + let expectedResult = RunRetreiveResult(status: "in_progress") + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.runRetrieve(threadId: "thread_1234", runId: "run_1234")) + + XCTAssertEqual(result, expectedResult) + } + + func testThreadsMessageQuery() throws { + let expectedResult = ThreadsMessagesResult(data: [ThreadsMessagesResult.ThreadsMessage(id: "thread_1234", role: Chat.Role.user.rawValue, content: [ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent(type: "text", text: ThreadsMessagesResult.ThreadsMessage.ThreadsMessageContent.ThreadsMessageContentText(value: "Hello, What is AI?"))])]) + try self.stub(result: expectedResult) + + let result = try awaitPublisher(openAI.threadsMessages(threadId: "thread_1234", before: nil)) + + XCTAssertEqual(result, expectedResult) + } + // 1106 end + + } @available(tvOS 13.0, *) From fd73a97bd3b943882491dfaac32fa64b957a31e3 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Fri, 17 Nov 2023 07:49:03 -0700 Subject: [PATCH 2/9] Rem accidentally committed Dev team --- Demo/Demo.xcodeproj/project.pbxproj | 2 -- 1 file changed, 2 deletions(-) diff --git a/Demo/Demo.xcodeproj/project.pbxproj b/Demo/Demo.xcodeproj/project.pbxproj index e622c48c..d4ea0b1c 100644 --- a/Demo/Demo.xcodeproj/project.pbxproj +++ b/Demo/Demo.xcodeproj/project.pbxproj @@ -301,7 +301,6 @@ CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = 72WEN2C47N; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; @@ -339,7 +338,6 @@ CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = 72WEN2C47N; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; From b222606e43627c8766b021f6848a32bee3c1c489 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Fri, 17 Nov 2023 07:51:46 -0700 Subject: [PATCH 3/9] Demoapp syntax fix for ImageCreationView --- Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift b/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift index ef6628ff..ca452b95 100644 --- a/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift +++ b/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift @@ -58,11 +58,11 @@ public struct ImageCreationView: View { Section("Images") { ForEach($store.images, id: \.self) { image in let urlString = image.wrappedValue.url - if let imageURL = URL(string: urlString), UIApplication.shared.canOpenURL(imageURL) { + if let imageURL = URL(string: urlString ?? ""), UIApplication.shared.canOpenURL(imageURL) { LinkPreview(previewURL: imageURL) .aspectRatio(contentMode: .fit) } else { - Text(urlString) + Text(urlString ?? "") .foregroundStyle(.secondary) } } From 2c96a1609dd4bd8177279618711253e33ed2f449 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Sat, 18 Nov 2023 00:08:45 -0700 Subject: [PATCH 4/9] assistant paging, modify, fix --- Demo/App/APIProvidedView.swift | 1 + Demo/DemoChat/Sources/AssistantStore.swift | 58 +++++--- Demo/DemoChat/Sources/Models/Assistant.swift | 5 +- .../UI/AssistantModalContentView.swift | 4 +- .../Sources/UI/AssistantsListView.swift | 28 +++- Demo/DemoChat/Sources/UI/AssistantsView.swift | 42 ++++-- Demo/DemoChat/Sources/UI/ChatView.swift | 24 ++-- Demo/DemoChat/Sources/UI/DetailView.swift | 3 +- .../Sources/UI/Images/ImageCreationView.swift | 6 +- Sources/OpenAI/OpenAI.swift | 25 +++- .../Public/Models/AssistantsResult.swift | 11 ++ .../Protocols/OpenAIProtocol+Async.swift | 21 ++- .../Protocols/OpenAIProtocol+Combine.swift | 4 +- .../Public/Protocols/OpenAIProtocol.swift | 130 ++++++++++++++++-- Tests/OpenAITests/OpenAITests.swift | 26 +++- Tests/OpenAITests/OpenAITestsCombine.swift | 6 +- 16 files changed, 312 insertions(+), 82 deletions(-) diff --git a/Demo/App/APIProvidedView.swift b/Demo/App/APIProvidedView.swift index 6abdc0c2..c9362a21 100644 --- a/Demo/App/APIProvidedView.swift +++ b/Demo/App/APIProvidedView.swift @@ -61,6 +61,7 @@ struct APIProvidedView: View { let client = OpenAI(apiToken: newApiKey) chatStore.openAIClient = client imageStore.openAIClient = client + assistantStore.openAIClient = client miscStore.openAIClient = client } } diff --git a/Demo/DemoChat/Sources/AssistantStore.swift b/Demo/DemoChat/Sources/AssistantStore.swift index e41a55b3..f667c12f 100644 --- a/Demo/DemoChat/Sources/AssistantStore.swift +++ b/Demo/DemoChat/Sources/AssistantStore.swift @@ -29,17 +29,26 @@ public final class AssistantStore: ObservableObject { @MainActor func createAssistant(name: String, description: String, instructions: String, codeInterpreter: Bool, retrievel: Bool, fileIds: [String]? = nil) async -> String? { do { - var tools = [Tool]() - if codeInterpreter { - tools.append(Tool(toolType: "code_interpreter")) - } - if retrievel { - tools.append(Tool(toolType: "retrieval")) - } + let tools = createToolsArray(codeInterpreter: codeInterpreter, retrieval: retrievel) + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools:tools, fileIds: fileIds) + let response = try await openAIClient.assistants(query: query, method: "POST", after: nil) + + // Returns assistantId + return response.id + + } catch { + // TODO: Better error handling + print(error.localizedDescription) + } + return nil + } - // TODO: Replace with actual gpt-4-1106-preview model. - let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: name, description: description, instructions: instructions, tools:tools, fileIds: fileIds) - let response = try await openAIClient.assistants(query: query, method: "POST") + @MainActor + func modifyAssistant(asstId: String, name: String, description: String, instructions: String, codeInterpreter: Bool, retrievel: Bool, fileIds: [String]? = nil) async -> String? { + do { + let tools = createToolsArray(codeInterpreter: codeInterpreter, retrieval: retrievel) + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools:tools, fileIds: fileIds) + let response = try await openAIClient.assistantModify(query: query, asstId: asstId) // Returns assistantId return response.id @@ -52,18 +61,24 @@ public final class AssistantStore: ObservableObject { } @MainActor - func getAssistants(limit: Int) async -> [Assistant] { + func getAssistants(limit: Int = 20, after: String? = nil) async -> [Assistant] { do { - let response = try await openAIClient.assistants(query: nil, method: "GET") + let response = try await openAIClient.assistants(query: nil, method: "GET", after: after) var assistants = [Assistant]() for result in response.data ?? [] { - let codeInterpreter = response.tools?.filter { $0.toolType == "code_interpreter" }.first != nil - let retrieval = response.tools?.filter { $0.toolType == "retrieval" }.first != nil + let codeInterpreter = result.tools?.filter { $0.toolType == "code_interpreter" }.first != nil + let retrieval = result.tools?.filter { $0.toolType == "retrieval" }.first != nil + let fileIds = result.fileIds ?? [] - assistants.append(Assistant(id: result.id, name: result.name, description: result.description, instructions: result.instructions, codeInterpreter: codeInterpreter, retrieval: retrieval)) + assistants.append(Assistant(id: result.id, name: result.name, description: result.description, instructions: result.instructions, codeInterpreter: codeInterpreter, retrieval: retrieval, fileIds: fileIds)) + } + if after == nil { + availableAssistants = assistants + } + else { + availableAssistants = availableAssistants + assistants } - availableAssistants = assistants return assistants } catch { @@ -91,4 +106,15 @@ public final class AssistantStore: ObservableObject { return nil } } + + func createToolsArray(codeInterpreter: Bool, retrieval: Bool) -> [Tool] { + var tools = [Tool]() + if codeInterpreter { + tools.append(Tool(toolType: "code_interpreter")) + } + if retrieval { + tools.append(Tool(toolType: "retrieval")) + } + return tools + } } diff --git a/Demo/DemoChat/Sources/Models/Assistant.swift b/Demo/DemoChat/Sources/Models/Assistant.swift index d41dfeb0..f1f8dfee 100644 --- a/Demo/DemoChat/Sources/Models/Assistant.swift +++ b/Demo/DemoChat/Sources/Models/Assistant.swift @@ -8,13 +8,14 @@ import Foundation struct Assistant: Hashable { - init(id: String, name: String, description: String? = nil, instructions: String? = nil, codeInterpreter: Bool, retrieval: Bool) { + init(id: String, name: String, description: String? = nil, instructions: String? = nil, codeInterpreter: Bool, retrieval: Bool, fileIds: [String]? = nil) { self.id = id self.name = name self.description = description self.instructions = instructions self.codeInterpreter = codeInterpreter self.retrieval = retrieval + self.fileIds = fileIds } typealias ID = String @@ -23,7 +24,7 @@ struct Assistant: Hashable { let name: String let description: String? let instructions: String? - + let fileIds: [String]? var codeInterpreter: Bool var retrieval: Bool } diff --git a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift index 681559e9..084b784c 100644 --- a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift @@ -14,6 +14,7 @@ struct AssistantModalContentView: View { @Binding var codeInterpreter: Bool @Binding var retrieval: Bool + @Binding var fileIds: [String] var modify: Bool @@ -26,7 +27,6 @@ struct AssistantModalContentView: View { var onCommit: () -> Void var onFileUpload: () -> Void - var body: some View { NavigationView { Form { @@ -70,7 +70,7 @@ struct AssistantModalContentView: View { } } } - .navigationTitle("Enter Assistant Details") + .navigationTitle("\(modify ? "Edit" : "Enter") Assistant Details") .navigationBarItems( leading: Button("Cancel") { dismiss() diff --git a/Demo/DemoChat/Sources/UI/AssistantsListView.swift b/Demo/DemoChat/Sources/UI/AssistantsListView.swift index 143e9411..16377649 100644 --- a/Demo/DemoChat/Sources/UI/AssistantsListView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantsListView.swift @@ -8,20 +8,34 @@ import SwiftUI struct AssistantsListView: View { - @Binding var conversations: [Assistant] + @Binding var assistants: [Assistant] @Binding var selectedAssistantId: String? + var onLoadMoreAssistants: () -> Void + @Binding var isLoadingMore: Bool var body: some View { - List( - $conversations, - editActions: [.delete], - selection: $selectedAssistantId - ) { $conversation in + VStack { + List( + $assistants, + editActions: [.delete], + selection: $selectedAssistantId + ) { $assistant in Text( - conversation.name + assistant.name ) .lineLimit(2) + .onAppear { + if assistant.id == assistants.last?.id { + onLoadMoreAssistants() + } + } + } + + if isLoadingMore { + ProgressView() + .padding() + } } .navigationTitle("Assistants") } diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift index c216ab2f..fa079737 100644 --- a/Demo/DemoChat/Sources/UI/AssistantsView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -24,9 +24,12 @@ public struct AssistantsView: View { @State private var name: String = "" @State private var description: String = "" @State private var customInstructions: String = "" + @State private var fileIds: [String] = [] @State private var codeInterpreter: Bool = false @State private var retrieval: Bool = false + @State var isLoadingMore = false + @State private var isModalPresented = false public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store @@ -37,11 +40,13 @@ public struct AssistantsView: View { ZStack { NavigationSplitView { AssistantsListView( - conversations: $assistantStore.availableAssistants, selectedAssistantId: Binding( + assistants: $assistantStore.availableAssistants, selectedAssistantId: Binding( get: { assistantStore.selectedAssistantId }, set: { newId in + guard newId != nil else { return } + assistantStore.selectAssistant(newId) let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first @@ -51,9 +56,12 @@ public struct AssistantsView: View { customInstructions = selectedAssistant?.instructions ?? "" codeInterpreter = selectedAssistant?.codeInterpreter ?? false retrieval = selectedAssistant?.retrieval ?? false + + isModalPresented = true - - }) + }), onLoadMoreAssistants: { + loadMoreAssistants() + }, isLoadingMore: $isLoadingMore ) .toolbar { ToolbarItem( @@ -62,7 +70,7 @@ public struct AssistantsView: View { Menu { Button("Get Assistants") { Task { - let _ = await assistantStore.getAssistants(limit: 20) + let _ = await assistantStore.getAssistants() } } } label: { @@ -73,12 +81,12 @@ public struct AssistantsView: View { } } } detail: { - // TODO: Allow modifying Assistant. - if let selectedAssistantId = assistantStore.selectedAssistantId { - + } + .sheet(isPresented: $isModalPresented) { + if let _ = assistantStore.selectedAssistantId { AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, - codeInterpreter: $codeInterpreter, retrieval: $retrieval, modify: true, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { + codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, modify: true, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { Task { await handleOKTap() } @@ -95,14 +103,15 @@ public struct AssistantsView: View { } func handleOKTap() async { + guard let selectedAssistantId = assistantStore.selectedAssistantId else { return print("Cannot modify assistant, not selected.") } - // When OK is tapped that means we should save the modified assistant and start a new thread. + // When OK is tapped that means we should save the modified assistant and start a new thread with it. var fileIds = [String]() if let fileId = uploadedFileId { fileIds.append(fileId) } - let asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) + let asstId = await assistantStore.modifyAssistant(asstId: selectedAssistantId, name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) guard let asstId else { print("failed to create Assistant.") @@ -111,4 +120,17 @@ public struct AssistantsView: View { store.createConversation(type: .assistant, assistantId: asstId) } + + func loadMoreAssistants() { + guard !isLoadingMore else { return } + + isLoadingMore = true + let lastAssistantId = assistantStore.availableAssistants.last?.id ?? "" + + Task { + // Fetch more assistants and append to the list + let _ = await assistantStore.getAssistants(after: lastAssistantId) + isLoadingMore = false + } + } } diff --git a/Demo/DemoChat/Sources/UI/ChatView.swift b/Demo/DemoChat/Sources/UI/ChatView.swift index eac440c9..6bb74bc4 100644 --- a/Demo/DemoChat/Sources/UI/ChatView.swift +++ b/Demo/DemoChat/Sources/UI/ChatView.swift @@ -23,6 +23,7 @@ public struct ChatView: View { @State private var codeInterpreter: Bool = false @State private var retrieval: Bool = false + @State private var fileIds: [String] = [] @State private var isPickerPresented: Bool = false @@ -64,7 +65,6 @@ public struct ChatView: View { } label: { Image(systemName: "plus") } - .buttonStyle(.borderedProminent) } } @@ -92,7 +92,7 @@ public struct ChatView: View { } .sheet(isPresented: $isModalPresented) { AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, - codeInterpreter: $codeInterpreter, retrieval: $retrieval, modify: false, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { + codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, modify: false, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { Task { await handleOKTap() } @@ -111,16 +111,6 @@ public struct ChatView: View { } func handleOKTap() async { - // Reset state for Assistant creator. - name = "" - description = "" - customInstructions = "" - - codeInterpreter = false - retrieval = false - fileURL = nil - uploadedFileId = nil - var fileIds = [String]() if let fileId = uploadedFileId { fileIds.append(fileId) @@ -133,6 +123,16 @@ public struct ChatView: View { return } + // Reset state for Assistant creator. + name = "" + description = "" + customInstructions = "" + + codeInterpreter = false + retrieval = false + fileURL = nil + uploadedFileId = nil + store.createConversation(type: .assistant, assistantId: asstId) } } diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index 21f15c81..34258eab 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -71,9 +71,8 @@ struct DetailView: View { .navigationTitle(conversation.type == .assistant ? "Assistant: \(currentAssistantName())" : "Chat") .safeAreaInset(edge: .top) { HStack { - // TODO: Replace with actual gpt-4-1106-preview model. Text( - "Model: \(conversation.type == .assistant ? "gpt-4-1106-preview" : selectedChatModel)" + "Model: \(conversation.type == .assistant ? Model.gpt4_1106_preview : selectedChatModel)" ) .font(.caption) .foregroundColor(.secondary) diff --git a/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift b/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift index ca452b95..98ea5216 100644 --- a/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift +++ b/Demo/DemoChat/Sources/UI/Images/ImageCreationView.swift @@ -57,12 +57,12 @@ public struct ImageCreationView: View { if !$store.images.isEmpty { Section("Images") { ForEach($store.images, id: \.self) { image in - let urlString = image.wrappedValue.url - if let imageURL = URL(string: urlString ?? ""), UIApplication.shared.canOpenURL(imageURL) { + let urlString = image.wrappedValue.url ?? "" + if let imageURL = URL(string: urlString), UIApplication.shared.canOpenURL(imageURL) { LinkPreview(previewURL: imageURL) .aspectRatio(contentMode: .fit) } else { - Text(urlString ?? "") + Text(urlString) .foregroundStyle(.secondary) } } diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index b3963ff9..395fca92 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -77,14 +77,17 @@ final public class OpenAI: OpenAIProtocol { performRequest(request: JSONRequest(body: query, url: buildURL(path: .threads)), completion: completion) } - public func assistants(query: AssistantsQuery?, method: String, completion: @escaping (Result) -> Void) { - performRequest(request: JSONRequest(body: query, url: buildURL(path: .assistants), method: method), completion: completion) + public func assistants(query: AssistantsQuery?, method: String, after: String?, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildURL(path: .assistants, after: after), method: method), completion: completion) + } + + public func assistantModify(query: AssistantsQuery, asstId: String, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: query, url: buildAssistantURL(path: .assistantsModify, assistantId: asstId)), completion: completion) } public func files(query: FilesQuery, completion: @escaping (Result) -> Void) { performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .files)), completion: completion) } - // END UPDATES FROM 11-06-23 public func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void) { @@ -210,11 +213,14 @@ extension OpenAI { extension OpenAI { - func buildURL(path: String) -> URL { + func buildURL(path: String, after: String? = nil) -> URL { var components = URLComponents() components.scheme = "https" components.host = configuration.host components.path = path + if let after { + components.queryItems = [URLQueryItem(name: "after", value: after)] + } return components.url! } @@ -237,15 +243,22 @@ extension OpenAI { .replacingOccurrences(of: "RUN_ID", with: runId) return components.url! } + + func buildAssistantURL(path: String, assistantId: String) -> URL { + var components = URLComponents() + components.scheme = "https" + components.host = configuration.host + components.path = path.replacingOccurrences(of: "ASST_ID", with: assistantId) + + return components.url! + } } typealias APIPath = String extension APIPath { // 1106 static let assistants = "/v1/assistants" - // TODO: Implement Assistant Modify static let assistantsModify = "/v1/assistants/ASST_ID" - static let threads = "/v1/threads" static let runs = "/v1/threads/THREAD_ID/runs" static let runRetrieve = "/v1/threads/THREAD_ID/runs/RUN_ID" diff --git a/Sources/OpenAI/Public/Models/AssistantsResult.swift b/Sources/OpenAI/Public/Models/AssistantsResult.swift index 58644e74..39ad2a5e 100644 --- a/Sources/OpenAI/Public/Models/AssistantsResult.swift +++ b/Sources/OpenAI/Public/Models/AssistantsResult.swift @@ -26,5 +26,16 @@ public struct AssistantsResult: Codable, Equatable { public let name: String public let description: String? public let instructions: String? + public let tools: [Tool]? + public let fileIds: [String]? + + enum CodingKeys: String, CodingKey { + case id + case name + case description + case instructions + case tools + case fileIds = "file_ids" + } } } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 2f28282f..bc0e04e5 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -217,10 +217,27 @@ public extension OpenAIProtocol { // 1106 func assistants( query: AssistantsQuery?, - method: String + method: String, + after: String? ) async throws -> AssistantsResult { try await withCheckedThrowingContinuation { continuation in - assistants(query: query, method: method) { result in + assistants(query: query, method: method, after: after) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + + func assistantModify( + query: AssistantsQuery, + asstId: String + ) async throws -> AssistantsResult { + try await withCheckedThrowingContinuation { continuation in + assistantModify(query: query, asstId: asstId) { result in switch result { case let .success(success): return continuation.resume(returning: success) diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift index 45add1c7..0853a654 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift @@ -128,9 +128,9 @@ public extension OpenAIProtocol { } // 1106 - func assistants(query: AssistantsQuery?, method: String) -> AnyPublisher { + func assistants(query: AssistantsQuery?, method: String, after: String?) -> AnyPublisher { Future { - assistants(query: query, method: method, completion: $0) + assistants(query: query, method: method, after: after, completion: $0) } .eraseToAnyPublisher() } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index faed65b6..16719754 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -231,28 +231,138 @@ public protocol OpenAIProtocol { **/ func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result) -> Void) - //1106 + /// + // The following functions represent new functionality added to OpenAI Beta on 11-06-23 + /// + /// + /** + This function sends a assistants query to the OpenAI API and creates an assistant. The Assistants API in this usage enables you to create an assistant. - // TODO: Assistant Docs - func assistants(query: AssistantsQuery?, method: String, completion: @escaping (Result) -> Void) - - // TODO: Threads Docs + Example: Create Assistant + ``` + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) + openAI.assistants(query: query) { result in + //Handle response here + } + ``` + + Example: List Assistants + ``` + openAI.assistants(query: nil, method: "GET") { result in + //Handle response here + } + ``` + + - Parameter query: The `AssistantsQuery?` instance, containing the information required for the assistant request. Passing nil is used for GET form of request. + - Parameter method: The method to use with the HTTP request. Supports POST (default) and GET. + - Parameter completion: The completion handler to be executed upon completion of the assistant request. + Returns a `Result` of type `AssistantsResult` if successful, or an `Error` if an error occurs. + **/ + func assistants(query: AssistantsQuery?, method: String, after: String?, completion: @escaping (Result) -> Void) + + /** + This function sends a assistants query to the OpenAI API and modifies an assistant. The Assistants API in this usage enables you to modify an assistant. + + Example: Create Assistant + ``` + let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) + openAI.assistantModify(query: query, asstId: "asst_1234") { result in + //Handle response here + } + ``` + + - Parameter query: The `AssistantsQuery` instance, containing the information required for the assistant request. + - Parameter asstId: The assistant id for the assistant to modify. + - Parameter completion: The completion handler to be executed upon completion of the assistant request. + Returns a `Result` of type `AssistantsResult` if successful, or an `Error` if an error occurs. + **/ + func assistantModify(query: AssistantsQuery, asstId: String, completion: @escaping (Result) -> Void) + + /** + This function sends a threads query to the OpenAI API and creates a thread. The Threads API in this usage enables you to create a thread. + + ``` + let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) + openAI.threads(query: threadsQuery) { result in + //Handle response here + } + + ``` + - Parameter query: The `ThreadsQuery` instance, containing the information required for the threads request. + - Parameter completion: The completion handler to be executed upon completion of the threads request. + Returns a `Result` of type `ThreadsResult` if successful, or an `Error` if an error occurs. + **/ func threads(query: ThreadsQuery, completion: @escaping (Result) -> Void) - // TODO: Runs Docs + /** + This function sends a runs query to the OpenAI API and creates a run. The Runs API in this usage enables you to create a run. + + ``` + let runsQuery = RunsQuery(assistantId: currentAssistantId) + openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter query: The `RunsQuery` instance, containing the information required for the runs request. + - Parameter completion: The completion handler to be executed upon completion of the runs request. + Returns a `Result` of type `RunsResult` if successful, or an `Error` if an error occurs. + **/ func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) - // TODO: Runs Retrieve Docs + /** + This function sends a thread id and run id to the OpenAI API and retrieves a run. The Runs API in this usage enables you to retrieve a run. + + ``` + openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here + } + ``` + - Parameter threadId: The thread id for the thread to run. + - Parameter runId: The run id for the run to retrieve. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `RunRetreiveResult` if successful, or an `Error` if an error occurs. + **/ func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) - // TODO: Threads Messages Docs + /** + This function sends a thread id and run id to the OpenAI API and retrieves a threads messages. + The Thread API in this usage enables you to retrieve a threads messages. + + + ``` + openAI.threadsMessages(threadId: currentThreadId, before: nil) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter before: String?: The message id for the message taht defines your place in the list of messages. Pass nil to get all. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `ThreadsMessagesResult` if successful, or an `Error` if an error occurs. + **/ func threadsMessages(threadId: String, before: String?, completion: @escaping (Result) -> Void) - // TODO: Threads Add Message Docs + /** + This function sends a thread id and message contents to the OpenAI API and returns a run. + + ``` + let query = ThreadAddMessageQuery(role: message.role.rawValue, content: message.content) + openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in + //Handle response here + } + ``` + + - Parameter threadId: The thread id for the thread to run. + - Parameter query: The `ThreadAddMessageQuery` instance, containing the information required for the threads request. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `ThreadAddMessageResult` if successful, or an `Error` if an error occurs. + **/ func threadsAddMessage(threadId: String, query: ThreadAddMessageQuery, completion: @escaping (Result) -> Void) // TODO: Files Docs func files(query: FilesQuery, completion: @escaping (Result) -> Void) - // 1106 end + // END new functionality added to OpenAI Beta on 11-06-23 end } diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index 8097af58..424f41f1 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -351,21 +351,37 @@ class OpenAITests: XCTestCase { // 1106 func testAssistantQuery() async throws { - let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) - let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.")], tools: []) + let query = AssistantsQuery(model: .gpt4_1106_preview, name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: nil, fileIds: nil)], tools: []) try self.stub(result: expectedResult) - let result = try await openAI.assistants(query: query, method: "POST") + let result = try await openAI.assistants(query: query, method: "POST", after: nil) XCTAssertEqual(result, expectedResult) } func testAssistantQueryError() async throws { - let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + let query = AssistantsQuery(model: .gpt4_1106_preview, name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") self.stub(error: inError) - let apiError: APIError = try await XCTExpectError { try await openAI.assistants(query: query, method: "POST") } + let apiError: APIError = try await XCTExpectError { try await openAI.assistants(query: query, method: "POST", after: nil) } + XCTAssertEqual(inError, apiError) + } + + func testListAssistantQuery() async throws { + let expectedResult = AssistantsResult(id: nil, data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: nil, fileIds: nil)], tools: nil) + try self.stub(result: expectedResult) + + let result = try await openAI.assistants(query: nil, method: "GET", after: nil) + XCTAssertEqual(result, expectedResult) + } + + func testListAssistantQueryError() async throws { + let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") + self.stub(error: inError) + + let apiError: APIError = try await XCTExpectError { try await openAI.assistants(query: nil, method: "GET", after: nil) } XCTAssertEqual(inError, apiError) } diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index 66aea66b..421b1612 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -126,11 +126,11 @@ final class OpenAITestsCombine: XCTestCase { // 1106 func testAssistantQuery() throws { - let query = AssistantsQuery(model: Model("gpt-4-1106-preview"), name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) - let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.")], tools: []) + let query = AssistantsQuery(model: .gpt4_1106_preview, name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: []) + let expectedResult = AssistantsResult(id: "asst_1234", data: [AssistantsResult.AssistantContent(id: "asst_9876", name: "My New Assistant", description: "Assistant Description", instructions: "You are a helpful assistant.", tools: nil, fileIds: nil)], tools: []) try self.stub(result: expectedResult) - let result = try awaitPublisher(openAI.assistants(query: query, method: "POST")) + let result = try awaitPublisher(openAI.assistants(query: query, method: "POST", after: nil)) XCTAssertEqual(result, expectedResult) } From 8be981addace713105b028cf351e8face231c2d1 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Tue, 21 Nov 2023 16:21:32 -0700 Subject: [PATCH 5/9] demo: enhancement: Handle local message replacement, README update --- Demo/DemoChat/Sources/ChatStore.swift | 46 +++++-- Demo/DemoChat/Sources/Models/Message.swift | 3 + .../UI/AssistantModalContentView.swift | 18 ++- Demo/DemoChat/Sources/UI/AssistantsView.swift | 129 ++++++++++++------ Demo/DemoChat/Sources/UI/ChatView.swift | 65 --------- README.md | 119 ++++++++++++++++ .../Public/Protocols/OpenAIProtocol.swift | 23 +++- 7 files changed, 287 insertions(+), 116 deletions(-) diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 19a67a7b..8f9a792f 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -85,6 +85,11 @@ public final class ChatStore: ObservableObject { // First message in an assistant thread. if conversations[conversationIndex].messages.count == 0 { + + var localMessage = message + localMessage.isLocal = true + conversations[conversationIndex].messages.append(localMessage) + do { let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) let threadsResult = try await openAIClient.threads(query: threadsQuery) @@ -103,6 +108,11 @@ public final class ChatStore: ObservableObject { } // Subsequent messages on the assistant thread. else { + + var localMessage = message + localMessage.isLocal = true + conversations[conversationIndex].messages.append(localMessage) + do { guard let currentThreadId else { return print("No thread to add message to.")} @@ -209,7 +219,7 @@ public final class ChatStore: ObservableObject { } } - // Polling + // Start Polling section func startPolling(conversationId: Conversation.ID, runId: String, threadId: String) { currentRunId = runId currentThreadId = threadId @@ -239,11 +249,12 @@ public final class ChatStore: ObservableObject { switch result.status { // Get threadsMesages. case "completed": - stopPolling() - + DispatchQueue.main.async { + self.stopPolling() + } var before: String? - if let lastMessageId = self.conversations[conversationIndex].messages.last?.id { - before = lastMessageId + if let lastNonLocalMessage = self.conversations[conversationIndex].messages.last(where: { $0.isLocal == false }) { + before = lastNonLocalMessage.id } let result = try await openAIClient.threadsMessages(threadId: currentThreadId ?? "", before: before) @@ -252,18 +263,37 @@ public final class ChatStore: ObservableObject { for item in result.data.reversed() { let role = item.role for innerItem in item.content { - let message = Message(id: item.id, role: Chat.Role(rawValue: role) ?? .user, content: innerItem.text.value, createdAt: Date()) - self.conversations[conversationIndex].messages.append(message) + let message = Message( + id: item.id, + role: Chat.Role(rawValue: role) ?? .user, + content: innerItem.text.value, + createdAt: Date(), + isLocal: false // Messages from the server are not local + ) + // Check if this message from the API matches a local message + if let localMessageIndex = self.conversations[conversationIndex].messages.firstIndex(where: { $0.isLocal == true }) { + + // Replace the local message with the API message + self.conversations[conversationIndex].messages[localMessageIndex] = message + } else { + // This is a new message from the server, append it + + self.conversations[conversationIndex].messages.append(message) + } } } } break case "failed": - stopPolling() + DispatchQueue.main.async { + + self.stopPolling() + } break default: break } } } + // END Polling section } diff --git a/Demo/DemoChat/Sources/Models/Message.swift b/Demo/DemoChat/Sources/Models/Message.swift index afea9099..35c44d53 100644 --- a/Demo/DemoChat/Sources/Models/Message.swift +++ b/Demo/DemoChat/Sources/Models/Message.swift @@ -13,6 +13,9 @@ struct Message { var role: Chat.Role var content: String var createdAt: Date + + var isLocal: Bool? + } extension Message: Equatable, Codable, Hashable, Identifiable {} diff --git a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift index 084b784c..267801ba 100644 --- a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift @@ -8,6 +8,11 @@ import SwiftUI struct AssistantModalContentView: View { + enum Mode { + case modify + case create + } + @Binding var name: String @Binding var description: String @Binding var customInstructions: String @@ -15,14 +20,15 @@ struct AssistantModalContentView: View { @Binding var codeInterpreter: Bool @Binding var retrieval: Bool @Binding var fileIds: [String] + @Binding var isUploading: Bool var modify: Bool @Environment(\.dismiss) var dismiss @Binding var isPickerPresented: Bool + // If a file has been selected for uploading and is currently in progress, this is set. @Binding var selectedFileURL: URL? - var onCommit: () -> Void var onFileUpload: () -> Void @@ -49,6 +55,16 @@ struct AssistantModalContentView: View { Toggle(isOn: $retrieval, label: { Text("Retrieval") }) + + if !fileIds.isEmpty { + ForEach(fileIds, id: \.self) { url in + HStack { + Text("File: \(url)") + + } + } + } + if let selectedFileURL { HStack { Text("File: \(selectedFileURL.lastPathComponent)") diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift index fa079737..a45fb04c 100644 --- a/Demo/DemoChat/Sources/UI/AssistantsView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -11,31 +11,37 @@ import SwiftUI public struct AssistantsView: View { @ObservedObject var store: ChatStore @ObservedObject var assistantStore: AssistantStore - + @Environment(\.dateProviderValue) var dateProvider @Environment(\.idProviderValue) var idProvider - + // state to select file @State private var isPickerPresented: Bool = false @State private var fileURL: URL? - @State private var uploadedFileId: String? - + // state to modify assistant @State private var name: String = "" @State private var description: String = "" @State private var customInstructions: String = "" @State private var fileIds: [String] = [] - + @State private var codeInterpreter: Bool = false @State private var retrieval: Bool = false @State var isLoadingMore = false @State private var isModalPresented = false - + @State private var isUploading = false + + //If a file is selected via the document picker, this is set. + @State var selectedFileURL: URL? + @State var uploadedFileId: String? + + @State var mode: AssistantModalContentView.Mode = .create + public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store self.assistantStore = assistantStore } - + public var body: some View { ZStack { NavigationSplitView { @@ -43,22 +49,24 @@ public struct AssistantsView: View { assistants: $assistantStore.availableAssistants, selectedAssistantId: Binding( get: { assistantStore.selectedAssistantId - + }, set: { newId in guard newId != nil else { return } - + assistantStore.selectAssistant(newId) - + let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first - + name = selectedAssistant?.name ?? "" description = selectedAssistant?.description ?? "" customInstructions = selectedAssistant?.instructions ?? "" codeInterpreter = selectedAssistant?.codeInterpreter ?? false retrieval = selectedAssistant?.retrieval ?? false + fileIds = selectedAssistant?.fileIds ?? [] + mode = .modify isModalPresented = true - + }), onLoadMoreAssistants: { loadMoreAssistants() }, isLoadingMore: $isLoadingMore @@ -73,64 +81,107 @@ public struct AssistantsView: View { let _ = await assistantStore.getAssistants() } } + Button("Create Assistant") { + mode = .create + isModalPresented = true + } } label: { Image(systemName: "plus") } - + .buttonStyle(.borderedProminent) } } } detail: { - + } - .sheet(isPresented: $isModalPresented) { - if let _ = assistantStore.selectedAssistantId { - AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, - codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, modify: true, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { - Task { - await handleOKTap() + .sheet(isPresented: $isModalPresented, onDismiss: { + resetAssistantCreator() + }, content: { + AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, + codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, isUploading: $isUploading, modify: mode == .modify, isPickerPresented: $isPickerPresented, selectedFileURL: $selectedFileURL) { + Task { + await handleOKTap() + } + } onFileUpload: { + Task { + guard let selectedFileURL else { return } + + isUploading = true + uploadedFileId = await assistantStore.uploadFile(url: selectedFileURL) + isUploading = false + + if uploadedFileId == nil { + print("Failed to upload") + self.selectedFileURL = nil } - } onFileUpload: { - Task { - guard let fileURL else { return } - - uploadedFileId = await assistantStore.uploadFile(url: fileURL) + else { + // if successful upload , we can show it. + if let uploadedFileId = uploadedFileId { + self.selectedFileURL = nil + + fileIds += [uploadedFileId] + + print("Successful upload!") + } } } } - } + }) } } - + func handleOKTap() async { - guard let selectedAssistantId = assistantStore.selectedAssistantId else { return print("Cannot modify assistant, not selected.") } - // When OK is tapped that means we should save the modified assistant and start a new thread with it. - var fileIds = [String]() - if let fileId = uploadedFileId { - fileIds.append(fileId) + var mergedFileIds = [String]() + + mergedFileIds += fileIds + + let asstId: String? + + if mode == .create { + asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) } - - let asstId = await assistantStore.modifyAssistant(asstId: selectedAssistantId, name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) - + // Modify + else { + guard let selectedAssistantId = assistantStore.selectedAssistantId else { return print("Cannot modify assistant, not selected.") } + + asstId = await assistantStore.modifyAssistant(asstId: selectedAssistantId, name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) + } + guard let asstId else { print("failed to create Assistant.") return } - + + resetAssistantCreator() + store.createConversation(type: .assistant, assistantId: asstId) } - + func loadMoreAssistants() { guard !isLoadingMore else { return } - + isLoadingMore = true let lastAssistantId = assistantStore.availableAssistants.last?.id ?? "" - + Task { // Fetch more assistants and append to the list let _ = await assistantStore.getAssistants(after: lastAssistantId) isLoadingMore = false } } + + func resetAssistantCreator() { + // Reset state for Assistant creator. + name = "" + description = "" + customInstructions = "" + + codeInterpreter = false + retrieval = false + selectedFileURL = nil + uploadedFileId = nil + fileIds = [] + } } diff --git a/Demo/DemoChat/Sources/UI/ChatView.swift b/Demo/DemoChat/Sources/UI/ChatView.swift index 6bb74bc4..1812ed26 100644 --- a/Demo/DemoChat/Sources/UI/ChatView.swift +++ b/Demo/DemoChat/Sources/UI/ChatView.swift @@ -15,22 +15,6 @@ public struct ChatView: View { @Environment(\.dateProviderValue) var dateProvider @Environment(\.idProviderValue) var idProvider - - @State private var isModalPresented = false - @State private var name: String = "" - @State private var description: String = "" - @State private var customInstructions: String = "" - - @State private var codeInterpreter: Bool = false - @State private var retrieval: Bool = false - @State private var fileIds: [String] = [] - - - @State private var isPickerPresented: Bool = false - @State private var fileURL: URL? - @State private var uploadedFileId: String? - - public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store self.assistantStore = assistantStore @@ -52,15 +36,9 @@ public struct ChatView: View { ToolbarItem( placement: .primaryAction ) { - Menu { Button("Create Chat") { store.createConversation() - - } - Button("Create Assistant") { - isModalPresented = true - } } label: { Image(systemName: "plus") @@ -90,49 +68,6 @@ public struct ChatView: View { ) } } - .sheet(isPresented: $isModalPresented) { - AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, - codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, modify: false, isPickerPresented: $isPickerPresented, selectedFileURL: $fileURL) { - Task { - await handleOKTap() - } - } onFileUpload: { - Task { - guard let fileURL else { return } - - uploadedFileId = await assistantStore.uploadFile(url: fileURL) - if uploadedFileId == nil { - print("Failed to upload") - } - } - } - } - } - } - func handleOKTap() async { - - var fileIds = [String]() - if let fileId = uploadedFileId { - fileIds.append(fileId) } - - let asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: fileIds.isEmpty ? nil : fileIds) - - guard let asstId else { - print("failed to create Assistant.") - return - } - - // Reset state for Assistant creator. - name = "" - description = "" - customInstructions = "" - - codeInterpreter = false - retrieval = false - fileURL = nil - uploadedFileId = nil - - store.createConversation(type: .assistant, assistantId: asstId) } } diff --git a/README.md b/README.md index f8b1eef5..8b0744d8 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,19 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Audio](#audio) - [Audio Transcriptions](#audio-transcriptions) - [Audio Translations](#audio-translations) + - [Assistants](#assistants) + - [Create Assistant](#create-assistant) + - [Modify Assistant](#modify-assistant) + - [List Assistants](#list-assistants) + - [Threads](#threads) + - [Create Thread](#create-thread) + - [Get Threads Messages](#get-threads-messages) + - [Add Message to Thread](#add-message-to-thread) + - [Runs](#runs) + - [Create Run](#create-run) + - [Retrieve Run](#retrieve-run) + - [Files](#files) + - [Upload File](#upload-file) - [Edits](#edits) - [Embeddings](#embeddings) - [Models](#models) @@ -515,6 +528,112 @@ let result = try await openAI.imageVariations(query: query) Review [Images Documentation](https://platform.openai.com/docs/api-reference/images) for more info. +### Assistants + +Review [Assistants Documentation](https://platform.openai.com/docs/api-reference/assistants) for more info. + +#### Create Assistant + +Example: Create Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistants(query: query) { result in + //Handle response here +} +``` + +#### Modify Assistant + +Example: Modify Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistantModify(query: query, asstId: "asst_1234") { result in + //Handle response here +} +``` + +#### List Assistants + +Example: List Assistants +``` +openAI.assistants(query: nil, method: "GET") { result in + //Handle response here +} +``` + +#### Threads + +Review [Threads Documentation](https://platform.openai.com/docs/api-reference/threads) for more info. + +##### Create Thread + +Example: Create Thread +``` +let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) +openAI.threads(query: threadsQuery) { result in + //Handle response here +} +``` + +##### Get Threads Messages + +Review [Messages Documentation](https://platform.openai.com/docs/api-reference/messages) for more info. + +Example: Get Threads Messages +``` +openAI.threadsMessages(threadId: currentThreadId, before: nil) { result in + //Handle response here +} +``` + +##### Add Message to Thread + +Example: Add Message to Thread +``` +let query = ThreadAddMessageQuery(role: message.role.rawValue, content: message.content) +openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in + //Handle response here +} +``` + +#### Runs + +Review [Runs Documentation](https://platform.openai.com/docs/api-reference/runs) for more info. + +##### Create Run + +Example: Create Run +``` +let runsQuery = RunsQuery(assistantId: currentAssistantId) +openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in + //Handle response here +} +``` + +##### Retrieve Run + +Example: Retrieve Run +``` +openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here +} +``` + +#### Files + +Review [Files Documentation](https://platform.openai.com/docs/api-reference/files) for more info. + +##### Upload file + +Example: Upload file +``` +let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") +openAI.files(query: query) { result in + //Handle response here +} +``` + + ### Audio The speech to text API provides two endpoints, transcriptions and translations, based on our state-of-the-art open source large-v2 [Whisper model](https://openai.com/research/whisper). They can be used to: diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index 16719754..95f33387 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -263,7 +263,7 @@ public protocol OpenAIProtocol { /** This function sends a assistants query to the OpenAI API and modifies an assistant. The Assistants API in this usage enables you to modify an assistant. - Example: Create Assistant + Example: Modify Assistant ``` let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) openAI.assistantModify(query: query, asstId: "asst_1234") { result in @@ -281,6 +281,7 @@ public protocol OpenAIProtocol { /** This function sends a threads query to the OpenAI API and creates a thread. The Threads API in this usage enables you to create a thread. + Example: Create Thread ``` let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) openAI.threads(query: threadsQuery) { result in @@ -297,6 +298,7 @@ public protocol OpenAIProtocol { /** This function sends a runs query to the OpenAI API and creates a run. The Runs API in this usage enables you to create a run. + Example: Create Run ``` let runsQuery = RunsQuery(assistantId: currentAssistantId) openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in @@ -314,6 +316,7 @@ public protocol OpenAIProtocol { /** This function sends a thread id and run id to the OpenAI API and retrieves a run. The Runs API in this usage enables you to retrieve a run. + Example: Retrieve Run ``` openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in //Handle response here @@ -330,7 +333,7 @@ public protocol OpenAIProtocol { This function sends a thread id and run id to the OpenAI API and retrieves a threads messages. The Thread API in this usage enables you to retrieve a threads messages. - + Example: Get Threads Messages ``` openAI.threadsMessages(threadId: currentThreadId, before: nil) { result in //Handle response here @@ -347,6 +350,7 @@ public protocol OpenAIProtocol { /** This function sends a thread id and message contents to the OpenAI API and returns a run. + Example: Add Message to Thread ``` let query = ThreadAddMessageQuery(role: message.role.rawValue, content: message.content) openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in @@ -361,7 +365,20 @@ public protocol OpenAIProtocol { **/ func threadsAddMessage(threadId: String, query: ThreadAddMessageQuery, completion: @escaping (Result) -> Void) - // TODO: Files Docs + /** + This function sends a purpose string, file contents, and fileName contents to the OpenAI API and returns a file id result. + + Example: Upload file + ``` + let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") + openAI.files(query: query) { result in + //Handle response here + } + ``` + - Parameter query: The `FilesQuery` instance, containing the information required for the files request. + - Parameter completion: The completion handler to be executed upon completion of the files request. + Returns a `Result` of type `FilesResult` if successful, or an `Error` if an error occurs. + **/ func files(query: FilesQuery, completion: @escaping (Result) -> Void) // END new functionality added to OpenAI Beta on 11-06-23 end From 5337e8ef2247b85158be0ad9a37a01e638d45b2e Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Fri, 8 Dec 2023 17:22:07 -0700 Subject: [PATCH 6/9] clean, runRetrieveSteps implemented, SupportedFileTypes implemented --- Demo/App/ContentView.swift | 8 +- Demo/DemoChat/Sources/AssistantStore.swift | 9 +- Demo/DemoChat/Sources/ChatStore.swift | 6 + Demo/DemoChat/Sources/SupportedFileType.swift | 92 ++++++++++++++ Demo/DemoChat/Sources/UI/AssistantsView.swift | 114 ++++++++++-------- Demo/DemoChat/Sources/UI/DetailView.swift | 7 ++ Demo/DemoChat/Sources/UI/DocumentPicker.swift | 5 +- Sources/OpenAI/OpenAI.swift | 11 +- Sources/OpenAI/Public/Models/FilesQuery.swift | 2 +- .../OpenAI/Public/Models/FilesResult.swift | 1 + .../Models/RunRetrieveStepsResult.swift | 42 +++++++ .../Protocols/OpenAIProtocol+Async.swift | 16 +++ .../Public/Protocols/OpenAIProtocol.swift | 17 +++ 13 files changed, 266 insertions(+), 64 deletions(-) create mode 100644 Demo/DemoChat/Sources/SupportedFileType.swift create mode 100644 Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift diff --git a/Demo/App/ContentView.swift b/Demo/App/ContentView.swift index 2029d6fb..091951c7 100644 --- a/Demo/App/ContentView.swift +++ b/Demo/App/ContentView.swift @@ -43,7 +43,7 @@ struct ContentView: View { .tabItem { Label("Transcribe", systemImage: "mic") } - .tag(1) + .tag(2) ImageView( store: imageStore @@ -51,15 +51,15 @@ struct ContentView: View { .tabItem { Label("Image", systemImage: "photo") } - .tag(2) - + .tag(3) + MiscView( store: miscStore ) .tabItem { Label("Misc", systemImage: "ellipsis") } - .tag(3) + .tag(4) } } } diff --git a/Demo/DemoChat/Sources/AssistantStore.swift b/Demo/DemoChat/Sources/AssistantStore.swift index f667c12f..cb39d739 100644 --- a/Demo/DemoChat/Sources/AssistantStore.swift +++ b/Demo/DemoChat/Sources/AssistantStore.swift @@ -32,6 +32,9 @@ public final class AssistantStore: ObservableObject { let tools = createToolsArray(codeInterpreter: codeInterpreter, retrieval: retrievel) let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools:tools, fileIds: fileIds) let response = try await openAIClient.assistants(query: query, method: "POST", after: nil) + + // Refresh assistants with one just created (or modified) + let _ = await getAssistants() // Returns assistantId return response.id @@ -95,10 +98,12 @@ public final class AssistantStore: ObservableObject { @MainActor func uploadFile(url: URL) async -> String? { do { + + let mimeType = url.mimeType() + let fileData = try Data(contentsOf: url) - // TODO: Support all the same types as openAI (not just pdf). - let result = try await openAIClient.files(query: FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf")) + let result = try await openAIClient.files(query: FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: mimeType)) return result.id } catch { diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 8f9a792f..8dae353d 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -245,6 +245,12 @@ public final class ChatStore: ObservableObject { Task { let result = try await openAIClient.runRetrieve(threadId: currentThreadId ?? "", runId: currentRunId ?? "") + + // TESTING RETRIEVAL OF RUN STEPS + Task { + let stepsResult = try await openAIClient.runRetrieveSteps(threadId: currentThreadId ?? "", runId: currentRunId ?? "") + // print(stepsResult) + } switch result.status { // Get threadsMesages. diff --git a/Demo/DemoChat/Sources/SupportedFileType.swift b/Demo/DemoChat/Sources/SupportedFileType.swift new file mode 100644 index 00000000..dc604cc9 --- /dev/null +++ b/Demo/DemoChat/Sources/SupportedFileType.swift @@ -0,0 +1,92 @@ +// +// SupportedFileType.swift +// +// +// Created by Chris Dillard on 12/8/23. +// + +import Foundation +import UniformTypeIdentifiers + +struct SupportedFileType { + let fileFormat: String + let mimeType: String + let isCodeInterpreterSupported: Bool + let isRetrievalSupported: Bool +} + +let supportedFileTypes: [SupportedFileType] = [ + SupportedFileType(fileFormat: "c", mimeType: "text/x-c", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "cpp", mimeType: "text/x-c++", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "csv", mimeType: "application/csv", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "docx", mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "html", mimeType: "text/html", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "java", mimeType: "text/x-java", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "json", mimeType: "application/json", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "md", mimeType: "text/markdown", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "pdf", mimeType: "application/pdf", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "php", mimeType: "text/x-php", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "pptx", mimeType: "application/vnd.openxmlformats-officedocument.presentationml.presentation", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "py", mimeType: "text/x-python", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "rb", mimeType: "text/x-ruby", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "tex", mimeType: "text/x-tex", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "txt", mimeType: "text/plain", + isCodeInterpreterSupported: true, isRetrievalSupported: true), + SupportedFileType(fileFormat: "css", mimeType: "text/css", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "jpeg", mimeType: "image/jpeg", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "jpg", mimeType: "image/jpeg", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "js", mimeType: "text/javascript", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "gif", mimeType: "image/gif", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "png", mimeType: "image/png", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "tar", mimeType: "application/x-tar", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "ts", mimeType: "application/typescript", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "xlsx", mimeType: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "xml", mimeType: "application/xml", // or \"text/xml\" + isCodeInterpreterSupported: true, isRetrievalSupported: false), + SupportedFileType(fileFormat: "zip", mimeType: "application/zip", + isCodeInterpreterSupported: true, isRetrievalSupported: false) +] + +func supportedUITypes() -> [UTType] { + var supportedTypes: [UTType] = [] + + for supportedFileType in supportedFileTypes { + if let newType = UTType(filenameExtension: supportedFileType.fileFormat) { + supportedTypes += [newType] + } + } + + return supportedTypes +} + +extension URL { + func mimeType() -> String { + guard let utType = UTType(filenameExtension: self.pathExtension) else { + return "application/octet-stream" // Default type if unknown + } + return utType.preferredMIMEType ?? "application/octet-stream" + } +} diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift index a45fb04c..e98bdc49 100644 --- a/Demo/DemoChat/Sources/UI/AssistantsView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -11,37 +11,37 @@ import SwiftUI public struct AssistantsView: View { @ObservedObject var store: ChatStore @ObservedObject var assistantStore: AssistantStore - + @Environment(\.dateProviderValue) var dateProvider @Environment(\.idProviderValue) var idProvider - + // state to select file @State private var isPickerPresented: Bool = false @State private var fileURL: URL? - + // state to modify assistant @State private var name: String = "" @State private var description: String = "" @State private var customInstructions: String = "" @State private var fileIds: [String] = [] - + @State private var codeInterpreter: Bool = false @State private var retrieval: Bool = false @State var isLoadingMore = false @State private var isModalPresented = false @State private var isUploading = false - + //If a file is selected via the document picker, this is set. @State var selectedFileURL: URL? @State var uploadedFileId: String? - + @State var mode: AssistantModalContentView.Mode = .create - + public init(store: ChatStore, assistantStore: AssistantStore) { self.store = store self.assistantStore = assistantStore } - + public var body: some View { ZStack { NavigationSplitView { @@ -49,24 +49,11 @@ public struct AssistantsView: View { assistants: $assistantStore.availableAssistants, selectedAssistantId: Binding( get: { assistantStore.selectedAssistantId - + }, set: { newId in guard newId != nil else { return } - - assistantStore.selectAssistant(newId) - - let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first - - name = selectedAssistant?.name ?? "" - description = selectedAssistant?.description ?? "" - customInstructions = selectedAssistant?.instructions ?? "" - codeInterpreter = selectedAssistant?.codeInterpreter ?? false - retrieval = selectedAssistant?.retrieval ?? false - fileIds = selectedAssistant?.fileIds ?? [] - - mode = .modify - isModalPresented = true - + + selectAssistant(newId: newId) }), onLoadMoreAssistants: { loadMoreAssistants() }, isLoadingMore: $isLoadingMore @@ -88,29 +75,30 @@ public struct AssistantsView: View { } label: { Image(systemName: "plus") } - + .buttonStyle(.borderedProminent) } } } detail: { - + } .sheet(isPresented: $isModalPresented, onDismiss: { resetAssistantCreator() }, content: { AssistantModalContentView(name: $name, description: $description, customInstructions: $customInstructions, - codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, isUploading: $isUploading, modify: mode == .modify, isPickerPresented: $isPickerPresented, selectedFileURL: $selectedFileURL) { + codeInterpreter: $codeInterpreter, retrieval: $retrieval, fileIds: $fileIds, + isUploading: $isUploading, modify: mode == .modify, isPickerPresented: $isPickerPresented, selectedFileURL: $selectedFileURL) { Task { await handleOKTap() } } onFileUpload: { Task { guard let selectedFileURL else { return } - + isUploading = true uploadedFileId = await assistantStore.uploadFile(url: selectedFileURL) isUploading = false - + if uploadedFileId == nil { print("Failed to upload") self.selectedFileURL = nil @@ -119,9 +107,9 @@ public struct AssistantsView: View { // if successful upload , we can show it. if let uploadedFileId = uploadedFileId { self.selectedFileURL = nil - + fileIds += [uploadedFileId] - + print("Successful upload!") } } @@ -130,58 +118,78 @@ public struct AssistantsView: View { }) } } - - func handleOKTap() async { - // When OK is tapped that means we should save the modified assistant and start a new thread with it. + + private func handleOKTap() async { + var mergedFileIds = [String]() - + mergedFileIds += fileIds - + let asstId: String? - - if mode == .create { + + switch mode { + // Create new Assistant and start a new conversation with it. + case .create: asstId = await assistantStore.createAssistant(name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) - } - // Modify - else { + // Modify existing Assistant and start new conversation with it. + case .modify: guard let selectedAssistantId = assistantStore.selectedAssistantId else { return print("Cannot modify assistant, not selected.") } - + asstId = await assistantStore.modifyAssistant(asstId: selectedAssistantId, name: name, description: description, instructions: customInstructions, codeInterpreter: codeInterpreter, retrievel: retrieval, fileIds: mergedFileIds.isEmpty ? nil : mergedFileIds) } - + + // Reset Assistant Creator after attempted creation or modification. + resetAssistantCreator() + guard let asstId else { print("failed to create Assistant.") return } - - resetAssistantCreator() - + + // Create new local conversation to represent new thread. store.createConversation(type: .assistant, assistantId: asstId) } - - func loadMoreAssistants() { + + private func loadMoreAssistants() { guard !isLoadingMore else { return } - + isLoadingMore = true let lastAssistantId = assistantStore.availableAssistants.last?.id ?? "" - + Task { // Fetch more assistants and append to the list let _ = await assistantStore.getAssistants(after: lastAssistantId) isLoadingMore = false } } - - func resetAssistantCreator() { + + private func resetAssistantCreator() { // Reset state for Assistant creator. name = "" description = "" customInstructions = "" - + codeInterpreter = false retrieval = false selectedFileURL = nil uploadedFileId = nil fileIds = [] } + + private func selectAssistant(newId: String?) { + assistantStore.selectAssistant(newId) + + let selectedAssistant = assistantStore.availableAssistants.filter { $0.id == assistantStore.selectedAssistantId }.first + + name = selectedAssistant?.name ?? "" + description = selectedAssistant?.description ?? "" + customInstructions = selectedAssistant?.instructions ?? "" + codeInterpreter = selectedAssistant?.codeInterpreter ?? false + retrieval = selectedAssistant?.retrieval ?? false + fileIds = selectedAssistant?.fileIds ?? [] + + mode = .modify + isModalPresented = true + + } } diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index 34258eab..ee7b076f 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -54,6 +54,10 @@ struct DetailView: View { } .listRowSeparator(.hidden) } + // Tapping on the message bubble area should dismiss the keyboard. + .onTapGesture { + self.hideKeyboard() + } .listStyle(.plain) .animation(.default, value: conversation.messages) // .onChange(of: conversation) { newValue in @@ -226,6 +230,9 @@ struct DetailView: View { func currentAssistantName() -> String { availableAssistants.filter { conversation.assistantId == $0.id }.first?.name ?? "" } + func hideKeyboard() { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } } struct ChatBubble: View { diff --git a/Demo/DemoChat/Sources/UI/DocumentPicker.swift b/Demo/DemoChat/Sources/UI/DocumentPicker.swift index 770ea0c3..3c960235 100644 --- a/Demo/DemoChat/Sources/UI/DocumentPicker.swift +++ b/Demo/DemoChat/Sources/UI/DocumentPicker.swift @@ -12,11 +12,10 @@ struct DocumentPicker: UIViewControllerRepresentable { var callback: (URL) -> Void func makeUIViewController(context: Context) -> UIDocumentPickerViewController { - // TODO: Support all the same file types as openAI. - let supportedTypes: [UTType] = [UTType.pdf] - let pickerViewController = UIDocumentPickerViewController(forOpeningContentTypes: supportedTypes, asCopy: true) + let pickerViewController = UIDocumentPickerViewController(forOpeningContentTypes: supportedUITypes(), asCopy: true) pickerViewController.allowsMultipleSelection = false pickerViewController.shouldShowFileExtensions = true + pickerViewController.delegate = context.coordinator return pickerViewController } diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index 8ac445f4..df0442d7 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -69,6 +69,10 @@ final public class OpenAI: OpenAIProtocol { performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieve, threadId: threadId, runId: runId), method: "GET"), completion: completion) } + public func runRetrieveSteps(threadId: String, runId: String, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieveSteps, threadId: threadId, runId: runId), method: "GET"), completion: completion) + } + public func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) { performRequest(request: JSONRequest(body: query, url: buildRunsURL(path: .runs, threadId: threadId)), completion: completion) } @@ -173,7 +177,9 @@ extension OpenAI { do { let errorText = String(data: data, encoding: .utf8) - + + // print(errorText) + let decoded = try JSONDecoder().decode(ResultType.self, from: data) completion(.success(decoded)) } catch { @@ -305,8 +311,11 @@ extension APIPath { static let threads = "/v1/threads" static let runs = "/v1/threads/THREAD_ID/runs" static let runRetrieve = "/v1/threads/THREAD_ID/runs/RUN_ID" + static let runRetrieveSteps = "/v1/threads/THREAD_ID/runs/RUN_ID/steps" + static let threadsMessages = "/v1/threads/THREAD_ID/messages" static let files = "/v1/files" + // 1106 end static let completions = "/v1/completions" diff --git a/Sources/OpenAI/Public/Models/FilesQuery.swift b/Sources/OpenAI/Public/Models/FilesQuery.swift index 20ccfeb1..09d883dc 100644 --- a/Sources/OpenAI/Public/Models/FilesQuery.swift +++ b/Sources/OpenAI/Public/Models/FilesQuery.swift @@ -1,5 +1,5 @@ // -// AssistantsQuery.swift +// FilesQuery.swift // // // Created by Chris Dillard on 11/07/2023. diff --git a/Sources/OpenAI/Public/Models/FilesResult.swift b/Sources/OpenAI/Public/Models/FilesResult.swift index 3063c0fe..0799d8c4 100644 --- a/Sources/OpenAI/Public/Models/FilesResult.swift +++ b/Sources/OpenAI/Public/Models/FilesResult.swift @@ -10,5 +10,6 @@ import Foundation public struct FilesResult: Codable, Equatable { public let id: String + public let name: String } diff --git a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift new file mode 100644 index 00000000..22b91d05 --- /dev/null +++ b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift @@ -0,0 +1,42 @@ +// +// RunRetreiveStepsResult.swift +// +// +// Created by Chris Dillard on 11/07/2023. +// + +import Foundation + +public struct RunRetreiveStepsResult: Codable, Equatable { + + public struct StepDetailsTopLevel: Codable, Equatable { + + public let stepDetails: StepDetailsSecondLevel + + enum CodingKeys: String, CodingKey { + case stepDetails = "step_details" + } + + public struct StepDetailsSecondLevel: Codable, Equatable { + + public let toolCalls: [ToolCall]? + + enum CodingKeys: String, CodingKey { + case toolCalls = "tool_calls" + } + + public struct ToolCall: Codable, Equatable { + public let type: String + public let code: CodeToolCall? + + public struct CodeToolCall: Codable, Equatable { + public let input: String + public let outputs: [[String: String]] + + } + } + } + } + + public let data: [StepDetailsTopLevel] +} diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index d8ca4cfe..7aa322f5 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -310,6 +310,22 @@ public extension OpenAIProtocol { } } + func runRetrieveSteps( + threadId: String, + runId: String + ) async throws -> RunRetreiveStepsResult { + try await withCheckedThrowingContinuation { continuation in + runRetrieveSteps(threadId: threadId, runId: runId) { result in + switch result { + case let .success(success): + return continuation.resume(returning: success) + case let .failure(failure): + return continuation.resume(throwing: failure) + } + } + } + } + func threadsMessages( threadId: String, before: String? diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index e9fc85f8..7a69478d 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -346,6 +346,23 @@ public protocol OpenAIProtocol { **/ func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) + /** + This function sends a thread id and run id to the OpenAI API and retrieves a run. The Runs API in this usage enables you to retrieve a run. + + Example: Retrieve Run Steps + ``` + openAI.runRetrieveSteps(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here + } + ``` + - Parameter threadId: The thread id for the thread to run. + - Parameter runId: The run id for the run to retrieve. + - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. + Returns a `Result` of type `RunRetreiveStepsResult` if successful, or an `Error` if an error occurs. + **/ + func runRetrieveSteps(threadId: String, runId: String, completion: @escaping (Result) -> Void) + + /** This function sends a thread id and run id to the OpenAI API and retrieves a threads messages. The Thread API in this usage enables you to retrieve a threads messages. From 82d1eda0cebf1bcb2355fcde81018e10f474ffc4 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Mon, 11 Dec 2023 17:35:53 -0700 Subject: [PATCH 7/9] Handle run retrieve steps --- Demo/DemoChat/Sources/AssistantStore.swift | 4 +- Demo/DemoChat/Sources/ChatStore.swift | 134 ++++++++++++------ Demo/DemoChat/Sources/Models/Message.swift | 2 +- .../UI/AssistantModalContentView.swift | 17 ++- Demo/DemoChat/Sources/UI/AssistantsView.swift | 3 +- Sources/OpenAI/OpenAI.swift | 15 +- .../Models/RunRetrieveStepsResult.swift | 3 +- .../Protocols/OpenAIProtocol+Async.swift | 5 +- .../Public/Protocols/OpenAIProtocol.swift | 7 +- Tests/OpenAITests/OpenAITests.swift | 7 + 10 files changed, 129 insertions(+), 68 deletions(-) diff --git a/Demo/DemoChat/Sources/AssistantStore.swift b/Demo/DemoChat/Sources/AssistantStore.swift index cb39d739..1393fefb 100644 --- a/Demo/DemoChat/Sources/AssistantStore.swift +++ b/Demo/DemoChat/Sources/AssistantStore.swift @@ -96,7 +96,7 @@ public final class AssistantStore: ObservableObject { } @MainActor - func uploadFile(url: URL) async -> String? { + func uploadFile(url: URL) async -> FilesResult? { do { let mimeType = url.mimeType() @@ -104,7 +104,7 @@ public final class AssistantStore: ObservableObject { let fileData = try Data(contentsOf: url) let result = try await openAIClient.files(query: FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: mimeType)) - return result.id + return result } catch { print("error = \(error)") diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 8dae353d..f426c1b9 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -80,7 +80,7 @@ public final class ChatStore: ObservableObject { conversationId: conversationId, model: model ) - // For assistant case we send chats to thread and then poll, polling will receive sent chat + new assistant messages. + // For assistant case we send chats to thread and then poll, polling will receive sent chat + new assistant messages. case .assistant: // First message in an assistant thread. @@ -239,67 +239,107 @@ public final class ChatStore: ObservableObject { } private func timerFired() { - guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { - return - } - Task { let result = try await openAIClient.runRetrieve(threadId: currentThreadId ?? "", runId: currentRunId ?? "") - + // TESTING RETRIEVAL OF RUN STEPS - Task { - let stepsResult = try await openAIClient.runRetrieveSteps(threadId: currentThreadId ?? "", runId: currentRunId ?? "") - // print(stepsResult) - } + handleRunRetrieveSteps() switch result.status { - // Get threadsMesages. + // Get threadsMesages. case "completed": - DispatchQueue.main.async { - self.stopPolling() - } - var before: String? - if let lastNonLocalMessage = self.conversations[conversationIndex].messages.last(where: { $0.isLocal == false }) { - before = lastNonLocalMessage.id - } - - let result = try await openAIClient.threadsMessages(threadId: currentThreadId ?? "", before: before) - - DispatchQueue.main.async { - for item in result.data.reversed() { - let role = item.role - for innerItem in item.content { - let message = Message( - id: item.id, - role: Chat.Role(rawValue: role) ?? .user, - content: innerItem.text.value, - createdAt: Date(), - isLocal: false // Messages from the server are not local - ) - // Check if this message from the API matches a local message - if let localMessageIndex = self.conversations[conversationIndex].messages.firstIndex(where: { $0.isLocal == true }) { - - // Replace the local message with the API message - self.conversations[conversationIndex].messages[localMessageIndex] = message - } else { - // This is a new message from the server, append it - - self.conversations[conversationIndex].messages.append(message) - } - } - } - } + handleCompleted() break case "failed": - DispatchQueue.main.async { - + // Handle more gracefully with a popup dialog or failure indicator + await MainActor.run { self.stopPolling() } break default: + // Handle additional statuses "requires_action", "queued" ?, "expired", "cancelled" + // https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps break } } } // END Polling section + + // This function is called when a thread is marked "completed" by the run status API. + private func handleCompleted() { + guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { + return + } + Task { + await MainActor.run { + self.stopPolling() + } + // Once a thread is marked "completed" by the status API, we can retrieve the threads messages, including a pagins cursor representing the last message we received. + var before: String? + if let lastNonLocalMessage = self.conversations[conversationIndex].messages.last(where: { $0.isLocal == false }) { + before = lastNonLocalMessage.id + } + + let result = try await openAIClient.threadsMessages(threadId: currentThreadId ?? "", before: before) + + for item in result.data.reversed() { + let role = item.role + for innerItem in item.content { + let message = Message( + id: item.id, + role: Chat.Role(rawValue: role) ?? .user, + content: innerItem.text.value, + createdAt: Date(), + isLocal: false // Messages from the server are not local + ) + await MainActor.run { + // Check if this message from the API matches a local message + if let localMessageIndex = self.conversations[conversationIndex].messages.firstIndex(where: { $0.isLocal == true }) { + + // Replace the local message with the API message + self.conversations[conversationIndex].messages[localMessageIndex] = message + } else { + // This is a new message from the server, append it + self.conversations[conversationIndex].messages.append(message) + } + } + } + } + } + } + + // The run retrieval steps are fetched in a separate task. This request is fetched, checking for new run steps, each time the run is fetched. + private func handleRunRetrieveSteps() { + Task { + guard let conversationIndex = conversations.firstIndex(where: { $0.id == currentConversationId }) else { + return + } + var before: String? + if let lastRunStepMessage = self.conversations[conversationIndex].messages.last(where: { $0.isRunStep == true }) { + before = lastRunStepMessage.id + } + + let stepsResult = try await openAIClient.runRetrieveSteps(threadId: currentThreadId ?? "", runId: currentRunId ?? "", before: before) + + // Add Steps as + for item in stepsResult.data.reversed() { + for step in item.stepDetails.toolCalls?.reversed() ?? [] { + // TODO: Depending on the type of tool tha is used we can add additional information here + // ie: if its a retrieval: add file information, code_interpreter: add inputs and outputs info, or function: add arguemts and additional info. + + let runStepMessage = Message( + id: item.id, + role: .assistant, + content: "RUN STEP: \(step.type)", + createdAt: Date(), + isRunStep: true // Messages from the server are not local + ) + await MainActor.run { + + self.conversations[conversationIndex].messages.append(runStepMessage) + } + } + } + } + } } diff --git a/Demo/DemoChat/Sources/Models/Message.swift b/Demo/DemoChat/Sources/Models/Message.swift index 35c44d53..bfbc7b9b 100644 --- a/Demo/DemoChat/Sources/Models/Message.swift +++ b/Demo/DemoChat/Sources/Models/Message.swift @@ -15,7 +15,7 @@ struct Message { var createdAt: Date var isLocal: Bool? - + var isRunStep: Bool? } extension Message: Equatable, Codable, Hashable, Identifiable {} diff --git a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift index 267801ba..efde0536 100644 --- a/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantModalContentView.swift @@ -57,10 +57,21 @@ struct AssistantModalContentView: View { }) if !fileIds.isEmpty { - ForEach(fileIds, id: \.self) { url in + ForEach(fileIds, id: \.self) { fileId in HStack { - Text("File: \(url)") - + // File Id of each file added to the assistant. + Text("File: \(fileId)") + Spacer() + // Button to remove fileId from the list of fileIds to be used when create or modify assistant. + Button(action: { + // Add action to remove the file from the list + if let index = fileIds.firstIndex(of: fileId) { + fileIds.remove(at: index) + } + }) { + Image(systemName: "xmark.circle.fill") // X button + .foregroundColor(.red) + } } } } diff --git a/Demo/DemoChat/Sources/UI/AssistantsView.swift b/Demo/DemoChat/Sources/UI/AssistantsView.swift index e98bdc49..d2668fdd 100644 --- a/Demo/DemoChat/Sources/UI/AssistantsView.swift +++ b/Demo/DemoChat/Sources/UI/AssistantsView.swift @@ -96,7 +96,8 @@ public struct AssistantsView: View { guard let selectedFileURL else { return } isUploading = true - uploadedFileId = await assistantStore.uploadFile(url: selectedFileURL) + let file = await assistantStore.uploadFile(url: selectedFileURL) + uploadedFileId = file?.id isUploading = false if uploadedFileId == nil { diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index df0442d7..d3d7e1a4 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -66,11 +66,11 @@ final public class OpenAI: OpenAIProtocol { } public func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) { - performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieve, threadId: threadId, runId: runId), method: "GET"), completion: completion) + performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieve, threadId: threadId, runId: runId, before: nil), method: "GET"), completion: completion) } - public func runRetrieveSteps(threadId: String, runId: String, completion: @escaping (Result) -> Void) { - performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieveSteps, threadId: threadId, runId: runId), method: "GET"), completion: completion) + public func runRetrieveSteps(threadId: String, runId: String, before: String?, completion: @escaping (Result) -> Void) { + performRequest(request: JSONRequest(body: nil, url: buildRunRetrieveURL(path: .runRetrieveSteps, threadId: threadId, runId: runId, before: before), method: "GET"), completion: completion) } public func runs(threadId: String, query: RunsQuery, completion: @escaping (Result) -> Void) { @@ -178,8 +178,6 @@ extension OpenAI { let errorText = String(data: data, encoding: .utf8) - // print(errorText) - let decoded = try JSONDecoder().decode(ResultType.self, from: data) completion(.success(decoded)) } catch { @@ -284,12 +282,15 @@ extension OpenAI { return components.url! } - func buildRunRetrieveURL(path: String, threadId: String, runId: String) -> URL { + func buildRunRetrieveURL(path: String, threadId: String, runId: String, before: String? = nil) -> URL { var components = URLComponents() components.scheme = "https" components.host = configuration.host components.path = path.replacingOccurrences(of: "THREAD_ID", with: threadId) .replacingOccurrences(of: "RUN_ID", with: runId) + if let before { + components.queryItems = [URLQueryItem(name: "before", value: before)] + } return components.url! } @@ -312,10 +313,8 @@ extension APIPath { static let runs = "/v1/threads/THREAD_ID/runs" static let runRetrieve = "/v1/threads/THREAD_ID/runs/RUN_ID" static let runRetrieveSteps = "/v1/threads/THREAD_ID/runs/RUN_ID/steps" - static let threadsMessages = "/v1/threads/THREAD_ID/messages" static let files = "/v1/files" - // 1106 end static let completions = "/v1/completions" diff --git a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift index 22b91d05..ef99e549 100644 --- a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift +++ b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift @@ -10,10 +10,11 @@ import Foundation public struct RunRetreiveStepsResult: Codable, Equatable { public struct StepDetailsTopLevel: Codable, Equatable { - + public let id: String public let stepDetails: StepDetailsSecondLevel enum CodingKeys: String, CodingKey { + case id case stepDetails = "step_details" } diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 7aa322f5..7428e34f 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -312,10 +312,11 @@ public extension OpenAIProtocol { func runRetrieveSteps( threadId: String, - runId: String + runId: String, + before: String? ) async throws -> RunRetreiveStepsResult { try await withCheckedThrowingContinuation { continuation in - runRetrieveSteps(threadId: threadId, runId: runId) { result in + runRetrieveSteps(threadId: threadId, runId: runId, before: before) { result in switch result { case let .success(success): return continuation.resume(returning: success) diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index 7a69478d..f178bd2b 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -347,7 +347,7 @@ public protocol OpenAIProtocol { func runRetrieve(threadId: String, runId: String, completion: @escaping (Result) -> Void) /** - This function sends a thread id and run id to the OpenAI API and retrieves a run. The Runs API in this usage enables you to retrieve a run. + This function sends a thread id and run id to the OpenAI API and retrieves a list of run steps. The Runs API in this usage enables you to retrieve a runs run steps. Example: Retrieve Run Steps ``` @@ -357,10 +357,11 @@ public protocol OpenAIProtocol { ``` - Parameter threadId: The thread id for the thread to run. - Parameter runId: The run id for the run to retrieve. + - Parameter before: String?: The message id for the run step that defines your place in the list of run steps. Pass nil to get all. - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. Returns a `Result` of type `RunRetreiveStepsResult` if successful, or an `Error` if an error occurs. **/ - func runRetrieveSteps(threadId: String, runId: String, completion: @escaping (Result) -> Void) + func runRetrieveSteps(threadId: String, runId: String, before: String?, completion: @escaping (Result) -> Void) /** @@ -375,7 +376,7 @@ public protocol OpenAIProtocol { ``` - Parameter threadId: The thread id for the thread to run. - - Parameter before: String?: The message id for the message taht defines your place in the list of messages. Pass nil to get all. + - Parameter before: String?: The message id for the message that defines your place in the list of messages. Pass nil to get all. - Parameter completion: The completion handler to be executed upon completion of the runRetrieve request. Returns a `Result` of type `ThreadsMessagesResult` if successful, or an `Error` if an error occurs. **/ diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index 167afcdb..a31373b8 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -476,6 +476,13 @@ class OpenAITests: XCTestCase { let completionsURL = openAI.buildRunRetrieveURL(path: .runRetrieve, threadId: "thread_4321", runId: "run_1234") XCTAssertEqual(completionsURL, URL(string: "https://my.host.com/v1/threads/thread_4321/runs/run_1234")) } + + func testCustomRunRetrieveStepsURLBuilt() { + let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", host: "my.host.com", timeoutInterval: 14) + let openAI = OpenAI(configuration: configuration, session: self.urlSession) + let completionsURL = openAI.buildRunRetrieveURL(path: .runRetrieveSteps, threadId: "thread_4321", runId: "run_1234") + XCTAssertEqual(completionsURL, URL(string: "https://my.host.com/v1/threads/thread_4321/runs/run_1234/steps")) + } // 1106 end } From 9fee0843e9c5441b767e7c8f389c54f3cb8edbf2 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Mon, 11 Dec 2023 17:57:17 -0700 Subject: [PATCH 8/9] Assistant README, add run retrieve steps --- README.md | 240 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 125 insertions(+), 115 deletions(-) diff --git a/README.md b/README.md index 7a76638a..13545443 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,15 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Audio Create Speech](#audio-create-speech) - [Audio Transcriptions](#audio-transcriptions) - [Audio Translations](#audio-translations) - - [Assistants](#assistants) + - [Edits](#edits) + - [Embeddings](#embeddings) + - [Models](#models) + - [List Models](#list-models) + - [Retrieve Model](#retrieve-model) + - [Moderations](#moderations) + - [Utilities](#utilities) + - [Combine Extensions](#combine-extensions) + - [Assistants (Beta)](#assistants) - [Create Assistant](#create-assistant) - [Modify Assistant](#modify-assistant) - [List Assistants](#list-assistants) @@ -38,16 +46,10 @@ This repository contains Swift community-maintained implementation over [OpenAI] - [Runs](#runs) - [Create Run](#create-run) - [Retrieve Run](#retrieve-run) + - [Retrieve Run Steps](#retrieve-run-steps) + - [Files](#files) - [Upload File](#upload-file) - - [Edits](#edits) - - [Embeddings](#embeddings) - - [Models](#models) - - [List Models](#list-models) - - [Retrieve Model](#retrieve-model) - - [Moderations](#moderations) - - [Utilities](#utilities) - - [Combine Extensions](#combine-extensions) - [Example Project](#example-project) - [Contribution Guidelines](#contribution-guidelines) - [Links](#links) @@ -529,112 +531,6 @@ let result = try await openAI.imageVariations(query: query) Review [Images Documentation](https://platform.openai.com/docs/api-reference/images) for more info. -### Assistants - -Review [Assistants Documentation](https://platform.openai.com/docs/api-reference/assistants) for more info. - -#### Create Assistant - -Example: Create Assistant -``` -let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) -openAI.assistants(query: query) { result in - //Handle response here -} -``` - -#### Modify Assistant - -Example: Modify Assistant -``` -let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) -openAI.assistantModify(query: query, asstId: "asst_1234") { result in - //Handle response here -} -``` - -#### List Assistants - -Example: List Assistants -``` -openAI.assistants(query: nil, method: "GET") { result in - //Handle response here -} -``` - -#### Threads - -Review [Threads Documentation](https://platform.openai.com/docs/api-reference/threads) for more info. - -##### Create Thread - -Example: Create Thread -``` -let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) -openAI.threads(query: threadsQuery) { result in - //Handle response here -} -``` - -##### Get Threads Messages - -Review [Messages Documentation](https://platform.openai.com/docs/api-reference/messages) for more info. - -Example: Get Threads Messages -``` -openAI.threadsMessages(threadId: currentThreadId, before: nil) { result in - //Handle response here -} -``` - -##### Add Message to Thread - -Example: Add Message to Thread -``` -let query = ThreadAddMessageQuery(role: message.role.rawValue, content: message.content) -openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in - //Handle response here -} -``` - -#### Runs - -Review [Runs Documentation](https://platform.openai.com/docs/api-reference/runs) for more info. - -##### Create Run - -Example: Create Run -``` -let runsQuery = RunsQuery(assistantId: currentAssistantId) -openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in - //Handle response here -} -``` - -##### Retrieve Run - -Example: Retrieve Run -``` -openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in - //Handle response here -} -``` - -#### Files - -Review [Files Documentation](https://platform.openai.com/docs/api-reference/files) for more info. - -##### Upload file - -Example: Upload file -``` -let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") -openAI.files(query: query) { result in - //Handle response here -} -``` - - ### Audio The speech to text API provides two endpoints, transcriptions and translations, based on our state-of-the-art open source large-v2 [Whisper model](https://openai.com/research/whisper). They can be used to: @@ -1117,6 +1013,120 @@ func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher AnyPublisher ``` +### Assistants + +Review [Assistants Documentation](https://platform.openai.com/docs/api-reference/assistants) for more info. + +#### Create Assistant + +Example: Create Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistants(query: query) { result in + //Handle response here +} +``` + +#### Modify Assistant + +Example: Modify Assistant +``` +let query = AssistantsQuery(model: Model.gpt4_1106_preview, name: name, description: description, instructions: instructions, tools: tools, fileIds: fileIds) +openAI.assistantModify(query: query, asstId: "asst_1234") { result in + //Handle response here +} +``` + +#### List Assistants + +Example: List Assistants +``` +openAI.assistants(query: nil, method: "GET") { result in + //Handle response here +} +``` + +#### Threads + +Review [Threads Documentation](https://platform.openai.com/docs/api-reference/threads) for more info. + +##### Create Thread + +Example: Create Thread +``` +let threadsQuery = ThreadsQuery(messages: [Chat(role: message.role, content: message.content)]) +openAI.threads(query: threadsQuery) { result in + //Handle response here +} +``` + +##### Get Threads Messages + +Review [Messages Documentation](https://platform.openai.com/docs/api-reference/messages) for more info. + +Example: Get Threads Messages +``` +openAI.threadsMessages(threadId: currentThreadId, before: nil) { result in + //Handle response here +} +``` + +##### Add Message to Thread + +Example: Add Message to Thread +``` +let query = ThreadAddMessageQuery(role: message.role.rawValue, content: message.content) +openAI.threadsAddMessage(threadId: currentThreadId, query: query) { result in + //Handle response here +} +``` + +#### Runs + +Review [Runs Documentation](https://platform.openai.com/docs/api-reference/runs) for more info. + +##### Create Run + +Example: Create Run +``` +let runsQuery = RunsQuery(assistantId: currentAssistantId) +openAI.runs(threadId: threadsResult.id, query: runsQuery) { result in + //Handle response here +} +``` + +##### Retrieve Run + +Example: Retrieve Run +``` +openAI.runRetrieve(threadId: currentThreadId, runId: currentRunId) { result in + //Handle response here +} +``` + +##### Retrieve Run Steps + +Example: Retrieve Run Steps +``` +openAI.runRetrieveSteps(threadId: currentThreadId, runId: currentRunId, before: nil) { result in + //Handle response here +} +``` + +#### Files + +Review [Files Documentation](https://platform.openai.com/docs/api-reference/files) for more info. + +##### Upload file + +Example: Upload file +``` +let query = FilesQuery(purpose: "assistants", file: fileData, fileName: url.lastPathComponent, contentType: "application/pdf") +openAI.files(query: query) { result in + //Handle response here +} +``` + ## Example Project You can find example iOS application in [Demo](/Demo) folder. From 3b5621476c62caa1627555437e24983ad41c9a74 Mon Sep 17 00:00:00 2001 From: Chris Dillard Date: Wed, 13 Dec 2023 07:52:36 -0700 Subject: [PATCH 9/9] display run retrieve steps in updating fashion for code_interpreter --- Demo/DemoChat/Sources/ChatStore.swift | 38 +++++++++++++------ Sources/OpenAI/OpenAI.swift | 2 +- .../Models/RunRetrieveStepsResult.swift | 14 ++++++- .../Public/Models/ThreadsMessagesResult.swift | 18 +++++++-- 4 files changed, 56 insertions(+), 16 deletions(-) diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index f426c1b9..99f62696 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -288,7 +288,7 @@ public final class ChatStore: ObservableObject { let message = Message( id: item.id, role: Chat.Role(rawValue: role) ?? .user, - content: innerItem.text.value, + content: innerItem.text?.value ?? "", createdAt: Date(), isLocal: false // Messages from the server are not local ) @@ -315,28 +315,44 @@ public final class ChatStore: ObservableObject { return } var before: String? - if let lastRunStepMessage = self.conversations[conversationIndex].messages.last(where: { $0.isRunStep == true }) { - before = lastRunStepMessage.id - } +// if let lastRunStepMessage = self.conversations[conversationIndex].messages.last(where: { $0.isRunStep == true }) { +// before = lastRunStepMessage.id +// } let stepsResult = try await openAIClient.runRetrieveSteps(threadId: currentThreadId ?? "", runId: currentRunId ?? "", before: before) - // Add Steps as for item in stepsResult.data.reversed() { - for step in item.stepDetails.toolCalls?.reversed() ?? [] { + let toolCalls = item.stepDetails.toolCalls?.reversed() ?? [] + + for step in toolCalls { // TODO: Depending on the type of tool tha is used we can add additional information here // ie: if its a retrieval: add file information, code_interpreter: add inputs and outputs info, or function: add arguemts and additional info. + let msgContent: String + switch step.type { + case "retrieval": + msgContent = "RUN STEP: \(step.type)" + + case "code_interpreter": + msgContent = "code_interpreter\ninput:\n\(step.code?.input ?? "")\noutputs: \(step.code?.outputs?.first?.logs ?? "")" + default: + msgContent = "RUN STEP: \(step.type)" + + } let runStepMessage = Message( - id: item.id, + id: step.id, role: .assistant, - content: "RUN STEP: \(step.type)", + content: msgContent, createdAt: Date(), - isRunStep: true // Messages from the server are not local + isRunStep: true ) await MainActor.run { - - self.conversations[conversationIndex].messages.append(runStepMessage) + if let localMessageIndex = self.conversations[conversationIndex].messages.firstIndex(where: { $0.isRunStep == true && $0.id == step.id }) { + self.conversations[conversationIndex].messages[localMessageIndex] = runStepMessage + } + else { + self.conversations[conversationIndex].messages.append(runStepMessage) + } } } } diff --git a/Sources/OpenAI/OpenAI.swift b/Sources/OpenAI/OpenAI.swift index d3d7e1a4..f9eaf606 100644 --- a/Sources/OpenAI/OpenAI.swift +++ b/Sources/OpenAI/OpenAI.swift @@ -177,7 +177,7 @@ extension OpenAI { do { let errorText = String(data: data, encoding: .utf8) - + let decoded = try JSONDecoder().decode(ResultType.self, from: data) completion(.success(decoded)) } catch { diff --git a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift index ef99e549..5bbd47bb 100644 --- a/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift +++ b/Sources/OpenAI/Public/Models/RunRetrieveStepsResult.swift @@ -27,13 +27,25 @@ public struct RunRetreiveStepsResult: Codable, Equatable { } public struct ToolCall: Codable, Equatable { + public let id: String public let type: String public let code: CodeToolCall? + enum CodingKeys: String, CodingKey { + case id + case type + case code = "code_interpreter" + } + public struct CodeToolCall: Codable, Equatable { public let input: String - public let outputs: [[String: String]] + public let outputs: [CodeToolCallOutput]? + + public struct CodeToolCallOutput: Codable, Equatable { + public let type: String + public let logs: String? + } } } } diff --git a/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift index c8c58c5d..975f897f 100644 --- a/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift +++ b/Sources/OpenAI/Public/Models/ThreadsMessagesResult.swift @@ -15,21 +15,33 @@ public struct ThreadsMessagesResult: Codable, Equatable { public struct ThreadsMessageContentText: Codable, Equatable { - public let value: String - + public let value: String? + enum CodingKeys: String, CodingKey { case value } } + public struct ImageFileContentText: Codable, Equatable { + + public let fildId: String + + enum CodingKeys: String, CodingKey { + case fildId = "file_id" + } + } + public let type: String - public let text: ThreadsMessageContentText + public let text: ThreadsMessageContentText? + public let imageFile: ThreadsMessageContentText? enum CodingKeys: String, CodingKey { case type case text + case imageFile = "image_file" } } + public let id: String public let role: String