diff --git a/Playground/OKPlayground/Views/ChatView.swift b/Playground/OKPlayground/Views/ChatView.swift index 384ff9a..e6a91f1 100644 --- a/Playground/OKPlayground/Views/ChatView.swift +++ b/Playground/OKPlayground/Views/ChatView.swift @@ -13,6 +13,7 @@ struct ChatView: View { @Environment(ViewModel.self) private var viewModel @State private var model: String? = nil + @State private var temperature: Double = 0.5 @State private var prompt = "" @State private var response = "" @State private var cancellables = Set() @@ -20,14 +21,26 @@ struct ChatView: View { var body: some View { NavigationStack { Form { - Section { - Picker("Model", selection: $model) { + Section("Model") { + Picker("Selected Model", selection: $model) { ForEach(viewModel.models, id: \.self) { model in Text(model) .tag(model as String?) } } - + } + + Section("Temperature") { + Slider(value: $temperature, in: 0...1, step: 0.1) { + Text("Temperature") + } minimumValueLabel: { + Text("0") + } maximumValueLabel: { + Text("1") + } + } + + Section("Prompt") { TextField("Prompt", text: $prompt) } @@ -53,7 +66,8 @@ struct ChatView: View { guard let model = model else { return } let messages = [OKChatRequestData.Message(role: .user, content: prompt)] - let data = OKChatRequestData(model: model, messages: messages) + var data = OKChatRequestData(model: model, messages: messages) + data.options = OKCompletionOptions(temperature: temperature) Task { for try await chunk in viewModel.ollamaKit.chat(data: data) { @@ -67,7 +81,8 @@ struct ChatView: View { guard let model = model else { return } let messages = [OKChatRequestData.Message(role: .user, content: prompt)] - let data = OKChatRequestData(model: model, messages: messages) + var data = OKChatRequestData(model: model, messages: messages) + data.options = OKCompletionOptions(temperature: temperature) viewModel.ollamaKit.chat(data: data) .sink { completion in diff --git a/Playground/OKPlayground/Views/GenerateView.swift b/Playground/OKPlayground/Views/GenerateView.swift index c1ec1f5..3923d8f 100644 --- a/Playground/OKPlayground/Views/GenerateView.swift +++ b/Playground/OKPlayground/Views/GenerateView.swift @@ -13,6 +13,7 @@ struct GenerateView: View { @Environment(ViewModel.self) private var viewModel @State private var model: String? = nil + @State private var temperature: Double = 0.5 @State private var prompt = "" @State private var response = "" @State private var cancellables = Set() @@ -20,14 +21,26 @@ struct GenerateView: View { var body: some View { NavigationStack { Form { - Section { - Picker("Model", selection: $model) { + Section("Model") { + Picker("Selected Model", selection: $model) { ForEach(viewModel.models, id: \.self) { model in Text(model) .tag(model as String?) } } - + } + + Section("Temperature") { + Slider(value: $temperature, in: 0...1, step: 0.1) { + Text("Temperature") + } minimumValueLabel: { + Text("0") + } maximumValueLabel: { + Text("1") + } + } + + Section("Prompt") { TextField("Prompt", text: $prompt) } @@ -52,7 +65,8 @@ struct GenerateView: View { self.response = "" guard let model = model else { return } - let data = OKGenerateRequestData(model: model, prompt: prompt) + var data = OKGenerateRequestData(model: model, prompt: prompt) + data.options = OKCompletionOptions(temperature: temperature) Task { for try await chunk in viewModel.ollamaKit.generate(data: data) { @@ -65,7 +79,8 @@ struct GenerateView: View { self.response = "" guard let model = model else { return } - let data = OKGenerateRequestData(model: model, prompt: prompt) + var data = OKGenerateRequestData(model: model, prompt: prompt) + data.options = OKCompletionOptions(temperature: temperature) viewModel.ollamaKit.generate(data: data) .sink { completion in diff --git a/Sources/OllamaKit/RequestData/Completion/OKCompletionOptions.swift b/Sources/OllamaKit/RequestData/Completion/OKCompletionOptions.swift index d0649c4..260bab3 100644 --- a/Sources/OllamaKit/RequestData/Completion/OKCompletionOptions.swift +++ b/Sources/OllamaKit/RequestData/Completion/OKCompletionOptions.swift @@ -82,4 +82,21 @@ public struct OKCompletionOptions: Encodable { /// `minP` ensures that tokens below a certain probability threshold are excluded, /// focusing the model's output on more probable sequences. Default is 0.0, meaning no filtering. public var minP: Double? + + public init(mirostat: Int? = nil, mirostatEta: Double? = nil, mirostatTau: Double? = nil, numCtx: Int? = nil, repeatLastN: Int? = nil, repeatPenalty: Double? = nil, temperature: Double? = nil, seed: Int? = nil, stop: String? = nil, tfsZ: Double? = nil, numPredict: Int? = nil, topK: Int? = nil, topP: Double? = nil, minP: Double? = nil) { + self.mirostat = mirostat + self.mirostatEta = mirostatEta + self.mirostatTau = mirostatTau + self.numCtx = numCtx + self.repeatLastN = repeatLastN + self.repeatPenalty = repeatPenalty + self.temperature = temperature + self.seed = seed + self.stop = stop + self.tfsZ = tfsZ + self.numPredict = numPredict + self.topK = topK + self.topP = topP + self.minP = minP + } } diff --git a/Sources/OllamaKit/RequestData/OKChatRequestData.swift b/Sources/OllamaKit/RequestData/OKChatRequestData.swift index f7c32b2..75e683e 100644 --- a/Sources/OllamaKit/RequestData/OKChatRequestData.swift +++ b/Sources/OllamaKit/RequestData/OKChatRequestData.swift @@ -8,7 +8,7 @@ import Foundation /// A structure that encapsulates data for chat requests to the Ollama API. -public struct OKChatRequestData: Encodable { +public struct OKChatRequestData { private let stream: Bool /// A string representing the model identifier to be used for the chat session. @@ -60,3 +60,21 @@ public struct OKChatRequestData: Encodable { } } } + +extension OKChatRequestData: Encodable { + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(stream, forKey: .stream) + try container.encode(model, forKey: .model) + try container.encode(messages, forKey: .messages) + try container.encodeIfPresent(tools, forKey: .tools) + + if let options { + try options.encode(to: encoder) + } + } + + private enum CodingKeys: String, CodingKey { + case stream, model, messages, tools + } +} diff --git a/Sources/OllamaKit/RequestData/OKGenerateRequestData.swift b/Sources/OllamaKit/RequestData/OKGenerateRequestData.swift index 029dd37..d3a2100 100644 --- a/Sources/OllamaKit/RequestData/OKGenerateRequestData.swift +++ b/Sources/OllamaKit/RequestData/OKGenerateRequestData.swift @@ -8,7 +8,7 @@ import Foundation /// A structure that encapsulates the data required for generating responses using the Ollama API. -public struct OKGenerateRequestData: Encodable { +public struct OKGenerateRequestData { private let stream: Bool /// A string representing the identifier of the model. @@ -36,3 +36,23 @@ public struct OKGenerateRequestData: Encodable { self.images = images } } + +extension OKGenerateRequestData: Encodable { + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(stream, forKey: .stream) + try container.encode(model, forKey: .model) + try container.encode(prompt, forKey: .prompt) + try container.encodeIfPresent(images, forKey: .images) + try container.encodeIfPresent(system, forKey: .system) + try container.encodeIfPresent(context, forKey: .context) + + if let options { + try options.encode(to: encoder) + } + } + + private enum CodingKeys: String, CodingKey { + case stream, model, prompt, images, system, context + } +}