diff --git a/Cargo.lock b/Cargo.lock index ac2b69d3882e..1cacda506129 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -430,48 +430,6 @@ dependencies = [ "regex-syntax 0.8.5", ] -[[package]] -name = "askama" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4744ed2eef2645831b441d8f5459689ade2ab27c854488fbab1fbe94fce1a7" -dependencies = [ - "askama_derive", - "itoa", - "percent-encoding", - "serde", - "serde_json", -] - -[[package]] -name = "askama_derive" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d661e0f57be36a5c14c48f78d09011e67e0cb618f269cca9f2fd8d15b68c46ac" -dependencies = [ - "askama_parser", - "basic-toml", - "memchr", - "proc-macro2", - "quote", - "rustc-hash 2.1.1", - "serde", - "serde_derive", - "syn 2.0.99", -] - -[[package]] -name = "askama_parser" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f" -dependencies = [ - "memchr", - "serde", - "serde_derive", - "winnow", -] - [[package]] name = "assert-json-diff" version = "2.0.2" @@ -482,19 +440,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "async-compat" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bab94bde396a3f7b4962e396fdad640e241ed797d4d8d77fc8c237d14c58fc0" -dependencies = [ - "futures-core", - "futures-io", - "once_cell", - "pin-project-lite", - "tokio", -] - [[package]] name = "async-compression" version = "0.4.20" @@ -614,17 +559,6 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.4.0" @@ -1191,15 +1125,6 @@ dependencies = [ "vsimd", ] -[[package]] -name = "basic-toml" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a" -dependencies = [ - "serde", -] - [[package]] name = "bat" version = "0.24.0" @@ -1210,7 +1135,7 @@ dependencies = [ "bincode", "bugreport", "bytesize", - "clap 4.5.31", + "clap", "clircle", "console", "content_inspector", @@ -1516,20 +1441,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "cargo_metadata" -version = "0.19.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" -dependencies = [ - "camino", - "cargo-platform", - "semver", - "serde", - "serde_json", - "thiserror 2.0.12", -] - [[package]] name = "cast" version = "0.3.0" @@ -1545,25 +1456,6 @@ dependencies = [ "cipher", ] -[[package]] -name = "cbindgen" -version = "0.24.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b922faaf31122819ec80c4047cc684c6979a087366c069611e33649bf98e18d" -dependencies = [ - "clap 3.2.25", - "heck 0.4.1", - "indexmap 1.9.3", - "log", - "proc-macro2", - "quote", - "serde", - "serde_json", - "syn 1.0.109", - "tempfile", - "toml 0.5.11", -] - [[package]] name = "cc" version = "1.2.16" @@ -1714,21 +1606,6 @@ dependencies = [ "libloading", ] -[[package]] -name = "clap" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" -dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_lex 0.2.4", - "indexmap 1.9.3", - "strsim 0.10.0", - "termcolor", - "textwrap", -] - [[package]] name = "clap" version = "4.5.31" @@ -1747,8 +1624,8 @@ checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863" dependencies = [ "anstream", "anstyle", - "clap_lex 0.7.4", - "strsim 0.11.1", + "clap_lex", + "strsim", "terminal_size", ] @@ -1764,15 +1641,6 @@ dependencies = [ "syn 2.0.99", ] -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] - [[package]] name = "clap_lex" version = "0.7.4" @@ -1788,7 +1656,7 @@ dependencies = [ "console", "indicatif", "once_cell", - "strsim 0.11.1", + "strsim", "textwrap", "zeroize", ] @@ -1879,7 +1747,7 @@ dependencies = [ "rust-ini", "serde", "serde_json", - "toml 0.8.20", + "toml", "yaml-rust2", ] @@ -2046,7 +1914,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.31", + "clap", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -2191,7 +2059,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim 0.11.1", + "strsim", "syn 2.0.99", ] @@ -3090,15 +2958,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" -[[package]] -name = "fs-err" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" -dependencies = [ - "autocfg", -] - [[package]] name = "fs2" version = "0.4.3" @@ -3341,17 +3200,6 @@ dependencies = [ "regex-syntax 0.8.5", ] -[[package]] -name = "goblin" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b363a30c165f666402fe6a3024d3bec7ebc898f96a4a23bd1c99f8dbf3f4f47" -dependencies = [ - "log", - "plain", - "scroll", -] - [[package]] name = "google-apis-common" version = "7.0.0" @@ -3461,7 +3309,7 @@ dependencies = [ "fs2", "futures", "include_dir", - "indoc 2.0.6", + "indoc", "jsonschema", "jsonwebtoken", "keyring", @@ -3521,7 +3369,7 @@ dependencies = [ "serde", "serde_json", "tokio", - "toml 0.8.20", + "toml", "tracing", "tracing-subscriber", "winapi", @@ -3538,7 +3386,7 @@ dependencies = [ "bat", "bytes", "chrono", - "clap 4.5.31", + "clap", "cliclack", "console", "dirs 5.0.1", @@ -3581,50 +3429,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "goose-ffi" -version = "1.1.0" -dependencies = [ - "cbindgen", - "futures", - "goose", - "libc", - "once_cell", - "serde", - "serde_json", - "tokio", -] - -[[package]] -name = "goose-llm" -version = "1.1.0" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.7", - "chrono", - "criterion", - "ctor", - "dotenvy", - "goose", - "include_dir", - "indoc 1.0.9", - "lazy_static", - "minijinja", - "once_cell", - "regex", - "reqwest 0.12.12", - "serde", - "serde_json", - "smallvec", - "tempfile", - "thiserror 1.0.69", - "tokio", - "tracing", - "uniffi", - "url", -] - [[package]] name = "goose-mcp" version = "1.1.0" @@ -3645,7 +3449,7 @@ dependencies = [ "ignore", "image 0.24.9", "include_dir", - "indoc 2.0.6", + "indoc", "keyring", "kill_tree", "lazy_static", @@ -3688,7 +3492,7 @@ dependencies = [ "base64 0.21.7", "bytes", "chrono", - "clap 4.5.31", + "clap", "config", "dirs 6.0.0", "etcetera", @@ -3828,15 +3632,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.9" @@ -4389,12 +4184,6 @@ dependencies = [ "web-time", ] -[[package]] -name = "indoc" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" - [[package]] name = "indoc" version = "2.0.6" @@ -6092,12 +5881,6 @@ dependencies = [ "hashbrown 0.14.5", ] -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" - [[package]] name = "outref" version = "0.5.2" @@ -6293,7 +6076,7 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher 1.0.1", + "siphasher", ] [[package]] @@ -6334,12 +6117,6 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" -[[package]] -name = "plain" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" - [[package]] name = "plist" version = "1.7.0" @@ -7481,26 +7258,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "scroll" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab8598aa408498679922eff7fa985c25d58a90771bd6be794434c5277eab1a6" -dependencies = [ - "scroll_derive", -] - -[[package]] -name = "scroll_derive" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1783eabc414609e28a5ba76aee5ddd52199f7107a0b24c2e9746a1ecc34a683d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.99", -] - [[package]] name = "sct" version = "0.7.1" @@ -7822,12 +7579,6 @@ dependencies = [ "time", ] -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - [[package]] name = "siphasher" version = "1.0.1" @@ -7841,7 +7592,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" dependencies = [ "bytecount", - "cargo_metadata 0.14.2", + "cargo_metadata", "error-chain", "glob", "pulldown-cmark", @@ -7872,9 +7623,6 @@ name = "smallvec" version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" -dependencies = [ - "serde", -] [[package]] name = "smawk" @@ -7969,12 +7717,6 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e51f1e89f093f99e7432c491c382b88a6860a5adbe6bf02574bf0a08efff1978" -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" @@ -8137,7 +7879,7 @@ dependencies = [ "cfg-expr", "heck 0.5.0", "pkg-config", - "toml 0.8.20", + "toml", "version-compare", ] @@ -8680,15 +8422,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - [[package]] name = "toml" version = "0.8.20" @@ -8996,128 +8729,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" -[[package]] -name = "uniffi" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd1d240101ba3b9d7532ae86d9cb64d9a7ff63e13a2b7b9e94a32a601d8233" -dependencies = [ - "anyhow", - "camino", - "cargo_metadata 0.19.2", - "clap 4.5.31", - "uniffi_bindgen", - "uniffi_core", - "uniffi_macros", - "uniffi_pipeline", -] - -[[package]] -name = "uniffi_bindgen" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d0525f06d749ea80d8049dc0bb038bb87941e3d909eefa76b6f0a5589b59ac5" -dependencies = [ - "anyhow", - "askama", - "camino", - "cargo_metadata 0.19.2", - "fs-err", - "glob", - "goblin", - "heck 0.5.0", - "indexmap 2.7.1", - "once_cell", - "serde", - "tempfile", - "textwrap", - "toml 0.5.11", - "uniffi_internal_macros", - "uniffi_meta", - "uniffi_pipeline", - "uniffi_udl", -] - -[[package]] -name = "uniffi_core" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fa8eb4d825b4ed095cb13483cba6927c3002b9eb603cef9b7688758cc3772e" -dependencies = [ - "anyhow", - "async-compat", - "bytes", - "once_cell", - "static_assertions", -] - -[[package]] -name = "uniffi_internal_macros" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b547d69d699e52f2129fde4b57ae0d00b5216e59ed5b56097c95c86ba06095" -dependencies = [ - "anyhow", - "indexmap 2.7.1", - "proc-macro2", - "quote", - "syn 2.0.99", -] - -[[package]] -name = "uniffi_macros" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f1de72edc8cb9201c7d650e3678840d143e4499004571aac49e6cb1b17da43" -dependencies = [ - "camino", - "fs-err", - "once_cell", - "proc-macro2", - "quote", - "serde", - "syn 2.0.99", - "toml 0.5.11", - "uniffi_meta", -] - -[[package]] -name = "uniffi_meta" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acc9204632f6a555b2cba7c8852c5523bc1aa5f3eff605c64af5054ea28b72e" -dependencies = [ - "anyhow", - "siphasher 0.3.11", - "uniffi_internal_macros", - "uniffi_pipeline", -] - -[[package]] -name = "uniffi_pipeline" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b5336a9a925b358183837d31541d12590b7fcec373256d3770de02dff24c69" -dependencies = [ - "anyhow", - "heck 0.5.0", - "indexmap 2.7.1", - "tempfile", - "uniffi_internal_macros", -] - -[[package]] -name = "uniffi_udl" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f95e73373d85f04736bc51997d3e6855721144ec4384cae9ca8513c80615e129" -dependencies = [ - "anyhow", - "textwrap", - "uniffi_meta", - "weedle2", -] - [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -9452,15 +9063,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "weedle2" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "998d2c24ec099a87daf9467808859f9d82b61f1d9c9701251aea037f514eae0e" -dependencies = [ - "nom", -] - [[package]] name = "weezl" version = "0.1.8" diff --git a/Justfile b/Justfile index 4625b8bb3c39..d9e71581d8b0 100644 --- a/Justfile +++ b/Justfile @@ -444,23 +444,3 @@ win-total-rls *allparam: just win-bld-rls{{allparam}} just win-run-rls -### Build and run the Kotlin example with -### auto-generated bindings for goose-llm -kotlin-example: - # Build Rust dylib and generate Kotlin bindings - cargo build -p goose-llm - cargo run --features=uniffi/cli --bin uniffi-bindgen generate \ - --library ./target/debug/libgoose_llm.dylib --language kotlin --out-dir bindings/kotlin - - # Compile and run the Kotlin example - cd bindings/kotlin/ && kotlinc \ - example/Usage.kt \ - uniffi/goose_llm/goose_llm.kt \ - -classpath "libs/kotlin-stdlib-1.9.0.jar:libs/kotlinx-coroutines-core-jvm-1.7.3.jar:libs/jna-5.13.0.jar" \ - -include-runtime \ - -d example.jar - - cd bindings/kotlin/ && java \ - -Djna.library.path=$HOME/Development/goose/target/debug \ - -classpath "example.jar:libs/kotlin-stdlib-1.9.0.jar:libs/kotlinx-coroutines-core-jvm-1.7.3.jar:libs/jna-5.13.0.jar" \ - UsageKt diff --git a/bindings/kotlin/example/RuntimeStats.kt b/bindings/kotlin/example/RuntimeStats.kt deleted file mode 100644 index 688d382fb9c6..000000000000 --- a/bindings/kotlin/example/RuntimeStats.kt +++ /dev/null @@ -1,115 +0,0 @@ -import kotlin.system.measureNanoTime -import kotlinx.coroutines.runBlocking -import uniffi.goose_llm.* - -import java.net.URI -import java.net.http.HttpClient -import java.net.http.HttpRequest -import java.net.http.HttpResponse - -/* ---------- Goose helpers ---------- */ - -fun buildProviderConfig(host: String, token: String): String = - """{ "host": "$host", "token": "$token" }""" - -suspend fun timeGooseCall( - modelCfg: ModelConfig, - providerName: String, - providerCfg: String -): Pair { - - val req = createCompletionRequest( - providerName, - providerCfg, - modelCfg, - systemPreamble = "You are a helpful assistant.", - messages = listOf( - Message( - Role.USER, - System.currentTimeMillis() / 1000, - listOf(MessageContent.Text(TextContent("Write me a 1000 word chapter about learning Go vs Rust in the world of LLMs and AI."))) - ) - ), - extensions = emptyList() - ) - - lateinit var resp: CompletionResponse - val wallMs = measureNanoTime { resp = completion(req) } / 1_000_000.0 - return wallMs to resp -} - -/* ---------- OpenAI helpers ---------- */ - -fun timeOpenAiCall(client: HttpClient, apiKey: String): Double { - val body = """ - { - "model": "gpt-4.1", - "max_tokens": 500, - "messages": [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": "Write me a 1000 word chapter about learning Go vs Rust in the world of LLMs and AI."} - ] - } - """.trimIndent() - - val request = HttpRequest.newBuilder() - .uri(URI.create("https://api.openai.com/v1/chat/completions")) - .header("Authorization", "Bearer $apiKey") - .header("Content-Type", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(body)) - .build() - - val wallMs = measureNanoTime { - client.send(request, HttpResponse.BodyHandlers.ofString()) - } / 1_000_000.0 - - return wallMs -} - -/* ---------- main ---------- */ - -fun main() = runBlocking { - /* Goose provider setup */ - val providerName = "databricks" - val host = System.getenv("DATABRICKS_HOST") ?: error("DATABRICKS_HOST not set") - val token = System.getenv("DATABRICKS_TOKEN") ?: error("DATABRICKS_TOKEN not set") - val providerCfg = buildProviderConfig(host, token) - - /* OpenAI setup */ - val openAiKey = System.getenv("OPENAI_API_KEY") ?: error("OPENAI_API_KEY not set") - val httpClient = HttpClient.newBuilder().build() - - val gooseModels = listOf("goose-claude-4-sonnet", "goose-gpt-4-1") - val runsPerModel = 3 - - /* --- Goose timing --- */ - for (model in gooseModels) { - val maxTokens = 500 - val cfg = ModelConfig(model, 100_000u, 0.0f, maxTokens) - var wallSum = 0.0 - var gooseSum = 0.0 - - println("=== Goose: $model ===") - repeat(runsPerModel) { run -> - val (wall, resp) = timeGooseCall(cfg, providerName, providerCfg) - val gooseMs = resp.runtimeMetrics.totalTimeSec * 1_000 - val overhead = wall - gooseMs - wallSum += wall - gooseSum += gooseMs - println("run ${run + 1}: wall = %.1f ms | goose-llm = %.1f ms | overhead = %.1f ms" - .format(wall, gooseMs, overhead)) - } - println("-- avg wall = %.1f ms | avg overhead = %.1f ms --\n" - .format(wallSum / runsPerModel, (wallSum - gooseSum) / runsPerModel)) - } - - /* --- OpenAI direct timing --- */ - var oaSum = 0.0 - println("=== OpenAI: gpt-4.1 (direct HTTPS) ===") - repeat(runsPerModel) { run -> - val wall = timeOpenAiCall(httpClient, openAiKey) - oaSum += wall - println("run ${run + 1}: wall = %.1f ms".format(wall)) - } - println("-- avg wall = %.1f ms --".format(oaSum / runsPerModel)) -} diff --git a/bindings/kotlin/example/Usage.kt b/bindings/kotlin/example/Usage.kt deleted file mode 100644 index 90ee002d9e99..000000000000 --- a/bindings/kotlin/example/Usage.kt +++ /dev/null @@ -1,228 +0,0 @@ -import java.io.File -import java.util.Base64 -import kotlinx.coroutines.runBlocking -import uniffi.goose_llm.* - -/* ---------- shared helpers ---------- */ - -fun buildProviderConfig(host: String, token: String, imageFormat: String = "OpenAi"): String = """ -{ - "host": "$host", - "token": "$token", - "image_format": "$imageFormat" -} -""".trimIndent() - -fun calculatorExtension(): ExtensionConfig { - val calculatorTool = createToolConfig( - name = "calculator", - description = "Perform basic arithmetic operations", - inputSchema = """ - { - "type": "object", - "required": ["operation", "numbers"], - "properties": { - "operation": { - "type": "string", - "enum": ["add", "subtract", "multiply", "divide"], - "description": "The arithmetic operation to perform" - }, - "numbers": { - "type": "array", - "items": { "type": "number" }, - "description": "List of numbers to operate on in order" - } - } - } - """.trimIndent(), - approvalMode = ToolApprovalMode.AUTO - ) - return ExtensionConfig( - name = "calculator_extension", - instructions = "This extension provides a calculator tool.", - tools = listOf(calculatorTool) - ) -} - -/* ---------- demos ---------- */ - -suspend fun runCalculatorDemo( - modelConfig: ModelConfig, - providerName: String, - providerConfig: String -) { - val now = System.currentTimeMillis() / 1000 - val msgs = listOf( - // same conversation you already had - Message(Role.USER, now, listOf(MessageContent.Text(TextContent("What is 7 x 6?")))), - Message(Role.ASSISTANT, now + 2, listOf(MessageContent.ToolReq( - ToolRequest( - id = "calc1", - toolCall = """ - { - "status": "success", - "value": { - "name": "calculator_extension__toolname", - "arguments": { "operation": "doesnotexist", "numbers": [7,6] }, - "needsApproval": false - } - } - """.trimIndent() - )))), - Message(Role.USER, now + 3, listOf(MessageContent.ToolResp( - ToolResponse( - id = "calc1", - toolResult = """ - { - "status": "error", - "error": "Invalid value for operation: 'doesnotexist'. Valid values are: ['add','subtract','multiply','divide']" - } - """.trimIndent() - )))), - Message(Role.ASSISTANT, now + 4, listOf(MessageContent.ToolReq( - ToolRequest( - id = "calc1", - toolCall = """ - { - "status": "success", - "value": { - "name": "calculator_extension__toolname", - "arguments": { "operation": "multiply", "numbers": [7,6] }, - "needsApproval": false - } - } - """.trimIndent() - )))), - Message(Role.USER, now + 5, listOf(MessageContent.ToolResp( - ToolResponse( - id = "calc1", - toolResult = """ - { - "status": "success", - "value": [ { "type": "text", "text": "42" } ] - } - """.trimIndent() - )))) - ) - - /* one-shot prompt with error */ - val reqErr = createCompletionRequest( - providerName, providerConfig, modelConfig, - "You are a helpful assistant.", - messages = listOf(msgs.first()), - extensions = listOf(calculatorExtension()) - ) - println("\n[${modelConfig.modelName}] Calculator (single-msg) → ${completion(reqErr).message}") - - /* full conversation */ - val reqAll = createCompletionRequest( - providerName, providerConfig, modelConfig, - "You are a helpful assistant.", - messages = msgs, - extensions = listOf(calculatorExtension()) - ) - println("[${modelConfig.modelName}] Calculator (full chat) → ${completion(reqAll).message}") -} - -suspend fun runImageExample( - modelConfig: ModelConfig, - providerName: String, - providerConfig: String -) { - val imagePath = "../../crates/goose/examples/test_assets/test_image.png" - val base64Image = Base64.getEncoder().encodeToString(File(imagePath).readBytes()) - val now = System.currentTimeMillis() / 1000 - - val msgs = listOf( - Message(Role.USER, now, listOf( - MessageContent.Text(TextContent("What is in this image?")), - MessageContent.Image(ImageContent(base64Image, "image/png")) - )), - ) - - val req = createCompletionRequest( - providerName, providerConfig, modelConfig, - "You are a helpful assistant. Please describe any text you see in the image.", - messages = msgs, - extensions = emptyList() - ) - - println("\n[${modelConfig.modelName}] Image example → ${completion(req).message}") -} - -suspend fun runPromptOverride( - modelConfig: ModelConfig, - providerName: String, - providerConfig: String -) { - val now = System.currentTimeMillis() / 1000 - val req = createCompletionRequest( - providerName, providerConfig, modelConfig, - systemPreamble = null, - systemPromptOverride = "You are a bot named Tile Creator. Your task is to create a tile based on the user's input.", - messages = listOf( - Message(Role.USER, now, listOf(MessageContent.Text(TextContent("What's your name?")))) - ), - extensions = emptyList() - ) - println("\n[${modelConfig.modelName}] Prompt override → ${completion(req).message}") -} - -suspend fun runUiExtraction(providerName: String, providerConfig: String) { - val schema = /* same JSON schema as before */ """ - { - "type":"object", - "properties":{ - "type":{"type":"string","enum":["div","button","header","section","field","form"]}, - "label":{"type":"string"}, - "children":{"type":"array","items":{"${'$'}ref":"#"}}, - "attributes":{"type":"array","items":{"type":"object","properties":{"name":{"type":"string"},"value":{"type":"string"}},"required":["name","value"],"additionalProperties":false}} - }, - "required":["type","label","children","attributes"], - "additionalProperties":false - } - """.trimIndent() - - val messages = listOf( - Message(Role.USER, System.currentTimeMillis()/1000, - listOf(MessageContent.Text(TextContent("Make a User Profile Form")))) - ) - - val res = generateStructuredOutputs( - providerName, providerConfig, - systemPrompt = "You are a UI generator AI. Convert the user input into a JSON-driven UI.", - messages = messages, - schema = schema - ) - println("\n[UI-Extraction] → $res") -} - -/* ---------- entry-point ---------- */ - -fun main() = runBlocking { - /* --- provider setup --- */ - val providerName = "databricks" - val host = System.getenv("DATABRICKS_HOST") ?: error("DATABRICKS_HOST not set") - val token = System.getenv("DATABRICKS_TOKEN") ?: error("DATABRICKS_TOKEN not set") - val providerConfig = buildProviderConfig(host, token) - - println("Provider: $providerName") - println("Config : $providerConfig\n") - - /* --- run demos for each model --- */ - // NOTE: `claude-3-5-haiku` does NOT support images - val modelNames = listOf("kgoose-gpt-4o", "goose-claude-4-sonnet") - - for (name in modelNames) { - val modelConfig = ModelConfig(name, 100000u, 0.1f, 200) - println("\n===== Running demos for model: $name =====") - - runCalculatorDemo(modelConfig, providerName, providerConfig) - runImageExample(modelConfig, providerName, providerConfig) - runPromptOverride(modelConfig, providerName, providerConfig) - println("===== End demos for $name =====\n") - } - - /* UI extraction is model-agnostic, so run it once */ - runUiExtraction(providerName, providerConfig) -} diff --git a/bindings/kotlin/uniffi/goose_llm/goose_llm.kt b/bindings/kotlin/uniffi/goose_llm/goose_llm.kt deleted file mode 100644 index f01956947261..000000000000 --- a/bindings/kotlin/uniffi/goose_llm/goose_llm.kt +++ /dev/null @@ -1,3096 +0,0 @@ -// This file was autogenerated by some hot garbage in the `uniffi` crate. -// Trust me, you don't want to mess with it! - -@file:Suppress("NAME_SHADOWING") - -package uniffi.goose_llm - -// Common helper code. -// -// Ideally this would live in a separate .kt file where it can be unittested etc -// in isolation, and perhaps even published as a re-useable package. -// -// However, it's important that the details of how this helper code works (e.g. the -// way that different builtin types are passed across the FFI) exactly match what's -// expected by the Rust code on the other side of the interface. In practice right -// now that means coming from the exact some version of `uniffi` that was used to -// compile the Rust component. The easiest way to ensure this is to bundle the Kotlin -// helpers directly inline like we're doing here. - -import com.sun.jna.Callback -import com.sun.jna.Library -import com.sun.jna.Native -import com.sun.jna.Pointer -import com.sun.jna.Structure -import com.sun.jna.ptr.* -import kotlinx.coroutines.CancellableContinuation -import kotlinx.coroutines.suspendCancellableCoroutine -import java.nio.ByteBuffer -import java.nio.ByteOrder -import java.nio.CharBuffer -import java.nio.charset.CodingErrorAction -import java.util.concurrent.ConcurrentHashMap -import java.util.concurrent.atomic.AtomicLong -import kotlin.coroutines.resume - -// This is a helper for safely working with byte buffers returned from the Rust code. -// A rust-owned buffer is represented by its capacity, its current length, and a -// pointer to the underlying data. - -/** - * @suppress - */ -@Structure.FieldOrder("capacity", "len", "data") -open class RustBuffer : Structure() { - // Note: `capacity` and `len` are actually `ULong` values, but JVM only supports signed values. - // When dealing with these fields, make sure to call `toULong()`. - @JvmField var capacity: Long = 0 - - @JvmField var len: Long = 0 - - @JvmField var data: Pointer? = null - - class ByValue : - RustBuffer(), - Structure.ByValue - - class ByReference : - RustBuffer(), - Structure.ByReference - - internal fun setValue(other: RustBuffer) { - capacity = other.capacity - len = other.len - data = other.data - } - - companion object { - internal fun alloc(size: ULong = 0UL) = - uniffiRustCall { status -> - // Note: need to convert the size to a `Long` value to make this work with JVM. - UniffiLib.INSTANCE.ffi_goose_llm_rustbuffer_alloc(size.toLong(), status) - }.also { - if (it.data == null) { - throw RuntimeException("RustBuffer.alloc() returned null data pointer (size=$size)") - } - } - - internal fun create( - capacity: ULong, - len: ULong, - data: Pointer?, - ): RustBuffer.ByValue { - var buf = RustBuffer.ByValue() - buf.capacity = capacity.toLong() - buf.len = len.toLong() - buf.data = data - return buf - } - - internal fun free(buf: RustBuffer.ByValue) = - uniffiRustCall { status -> - UniffiLib.INSTANCE.ffi_goose_llm_rustbuffer_free(buf, status) - } - } - - @Suppress("TooGenericExceptionThrown") - fun asByteBuffer() = - this.data?.getByteBuffer(0, this.len.toLong())?.also { - it.order(ByteOrder.BIG_ENDIAN) - } -} - -/** - * The equivalent of the `*mut RustBuffer` type. - * Required for callbacks taking in an out pointer. - * - * Size is the sum of all values in the struct. - * - * @suppress - */ -class RustBufferByReference : ByReference(16) { - /** - * Set the pointed-to `RustBuffer` to the given value. - */ - fun setValue(value: RustBuffer.ByValue) { - // NOTE: The offsets are as they are in the C-like struct. - val pointer = getPointer() - pointer.setLong(0, value.capacity) - pointer.setLong(8, value.len) - pointer.setPointer(16, value.data) - } - - /** - * Get a `RustBuffer.ByValue` from this reference. - */ - fun getValue(): RustBuffer.ByValue { - val pointer = getPointer() - val value = RustBuffer.ByValue() - value.writeField("capacity", pointer.getLong(0)) - value.writeField("len", pointer.getLong(8)) - value.writeField("data", pointer.getLong(16)) - - return value - } -} - -// This is a helper for safely passing byte references into the rust code. -// It's not actually used at the moment, because there aren't many things that you -// can take a direct pointer to in the JVM, and if we're going to copy something -// then we might as well copy it into a `RustBuffer`. But it's here for API -// completeness. - -@Structure.FieldOrder("len", "data") -internal open class ForeignBytes : Structure() { - @JvmField var len: Int = 0 - - @JvmField var data: Pointer? = null - - class ByValue : - ForeignBytes(), - Structure.ByValue -} - -/** - * The FfiConverter interface handles converter types to and from the FFI - * - * All implementing objects should be public to support external types. When a - * type is external we need to import it's FfiConverter. - * - * @suppress - */ -public interface FfiConverter { - // Convert an FFI type to a Kotlin type - fun lift(value: FfiType): KotlinType - - // Convert an Kotlin type to an FFI type - fun lower(value: KotlinType): FfiType - - // Read a Kotlin type from a `ByteBuffer` - fun read(buf: ByteBuffer): KotlinType - - // Calculate bytes to allocate when creating a `RustBuffer` - // - // This must return at least as many bytes as the write() function will - // write. It can return more bytes than needed, for example when writing - // Strings we can't know the exact bytes needed until we the UTF-8 - // encoding, so we pessimistically allocate the largest size possible (3 - // bytes per codepoint). Allocating extra bytes is not really a big deal - // because the `RustBuffer` is short-lived. - fun allocationSize(value: KotlinType): ULong - - // Write a Kotlin type to a `ByteBuffer` - fun write( - value: KotlinType, - buf: ByteBuffer, - ) - - // Lower a value into a `RustBuffer` - // - // This method lowers a value into a `RustBuffer` rather than the normal - // FfiType. It's used by the callback interface code. Callback interface - // returns are always serialized into a `RustBuffer` regardless of their - // normal FFI type. - fun lowerIntoRustBuffer(value: KotlinType): RustBuffer.ByValue { - val rbuf = RustBuffer.alloc(allocationSize(value)) - try { - val bbuf = - rbuf.data!!.getByteBuffer(0, rbuf.capacity).also { - it.order(ByteOrder.BIG_ENDIAN) - } - write(value, bbuf) - rbuf.writeField("len", bbuf.position().toLong()) - return rbuf - } catch (e: Throwable) { - RustBuffer.free(rbuf) - throw e - } - } - - // Lift a value from a `RustBuffer`. - // - // This here mostly because of the symmetry with `lowerIntoRustBuffer()`. - // It's currently only used by the `FfiConverterRustBuffer` class below. - fun liftFromRustBuffer(rbuf: RustBuffer.ByValue): KotlinType { - val byteBuf = rbuf.asByteBuffer()!! - try { - val item = read(byteBuf) - if (byteBuf.hasRemaining()) { - throw RuntimeException("junk remaining in buffer after lifting, something is very wrong!!") - } - return item - } finally { - RustBuffer.free(rbuf) - } - } -} - -/** - * FfiConverter that uses `RustBuffer` as the FfiType - * - * @suppress - */ -public interface FfiConverterRustBuffer : FfiConverter { - override fun lift(value: RustBuffer.ByValue) = liftFromRustBuffer(value) - - override fun lower(value: KotlinType) = lowerIntoRustBuffer(value) -} -// A handful of classes and functions to support the generated data structures. -// This would be a good candidate for isolating in its own ffi-support lib. - -internal const val UNIFFI_CALL_SUCCESS = 0.toByte() -internal const val UNIFFI_CALL_ERROR = 1.toByte() -internal const val UNIFFI_CALL_UNEXPECTED_ERROR = 2.toByte() - -@Structure.FieldOrder("code", "error_buf") -internal open class UniffiRustCallStatus : Structure() { - @JvmField var code: Byte = 0 - - @JvmField var error_buf: RustBuffer.ByValue = RustBuffer.ByValue() - - class ByValue : - UniffiRustCallStatus(), - Structure.ByValue - - fun isSuccess(): Boolean = code == UNIFFI_CALL_SUCCESS - - fun isError(): Boolean = code == UNIFFI_CALL_ERROR - - fun isPanic(): Boolean = code == UNIFFI_CALL_UNEXPECTED_ERROR - - companion object { - fun create( - code: Byte, - errorBuf: RustBuffer.ByValue, - ): UniffiRustCallStatus.ByValue { - val callStatus = UniffiRustCallStatus.ByValue() - callStatus.code = code - callStatus.error_buf = errorBuf - return callStatus - } - } -} - -class InternalException( - message: String, -) : kotlin.Exception(message) - -/** - * Each top-level error class has a companion object that can lift the error from the call status's rust buffer - * - * @suppress - */ -interface UniffiRustCallStatusErrorHandler { - fun lift(error_buf: RustBuffer.ByValue): E -} - -// Helpers for calling Rust -// In practice we usually need to be synchronized to call this safely, so it doesn't -// synchronize itself - -// Call a rust function that returns a Result<>. Pass in the Error class companion that corresponds to the Err -private inline fun uniffiRustCallWithError( - errorHandler: UniffiRustCallStatusErrorHandler, - callback: (UniffiRustCallStatus) -> U, -): U { - var status = UniffiRustCallStatus() - val return_value = callback(status) - uniffiCheckCallStatus(errorHandler, status) - return return_value -} - -// Check UniffiRustCallStatus and throw an error if the call wasn't successful -private fun uniffiCheckCallStatus( - errorHandler: UniffiRustCallStatusErrorHandler, - status: UniffiRustCallStatus, -) { - if (status.isSuccess()) { - return - } else if (status.isError()) { - throw errorHandler.lift(status.error_buf) - } else if (status.isPanic()) { - // when the rust code sees a panic, it tries to construct a rustbuffer - // with the message. but if that code panics, then it just sends back - // an empty buffer. - if (status.error_buf.len > 0) { - throw InternalException(FfiConverterString.lift(status.error_buf)) - } else { - throw InternalException("Rust panic") - } - } else { - throw InternalException("Unknown rust call status: $status.code") - } -} - -/** - * UniffiRustCallStatusErrorHandler implementation for times when we don't expect a CALL_ERROR - * - * @suppress - */ -object UniffiNullRustCallStatusErrorHandler : UniffiRustCallStatusErrorHandler { - override fun lift(error_buf: RustBuffer.ByValue): InternalException { - RustBuffer.free(error_buf) - return InternalException("Unexpected CALL_ERROR") - } -} - -// Call a rust function that returns a plain value -private inline fun uniffiRustCall(callback: (UniffiRustCallStatus) -> U): U = - uniffiRustCallWithError(UniffiNullRustCallStatusErrorHandler, callback) - -internal inline fun uniffiTraitInterfaceCall( - callStatus: UniffiRustCallStatus, - makeCall: () -> T, - writeReturn: (T) -> Unit, -) { - try { - writeReturn(makeCall()) - } catch (e: kotlin.Exception) { - callStatus.code = UNIFFI_CALL_UNEXPECTED_ERROR - callStatus.error_buf = FfiConverterString.lower(e.toString()) - } -} - -internal inline fun uniffiTraitInterfaceCallWithError( - callStatus: UniffiRustCallStatus, - makeCall: () -> T, - writeReturn: (T) -> Unit, - lowerError: (E) -> RustBuffer.ByValue, -) { - try { - writeReturn(makeCall()) - } catch (e: kotlin.Exception) { - if (e is E) { - callStatus.code = UNIFFI_CALL_ERROR - callStatus.error_buf = lowerError(e) - } else { - callStatus.code = UNIFFI_CALL_UNEXPECTED_ERROR - callStatus.error_buf = FfiConverterString.lower(e.toString()) - } - } -} - -// Map handles to objects -// -// This is used pass an opaque 64-bit handle representing a foreign object to the Rust code. -internal class UniffiHandleMap { - private val map = ConcurrentHashMap() - private val counter = - java.util.concurrent.atomic - .AtomicLong(0) - - val size: Int - get() = map.size - - // Insert a new object into the handle map and get a handle for it - fun insert(obj: T): Long { - val handle = counter.getAndAdd(1) - map.put(handle, obj) - return handle - } - - // Get an object from the handle map - fun get(handle: Long): T = map.get(handle) ?: throw InternalException("UniffiHandleMap.get: Invalid handle") - - // Remove an entry from the handlemap and get the Kotlin object back - fun remove(handle: Long): T = map.remove(handle) ?: throw InternalException("UniffiHandleMap: Invalid handle") -} - -// Contains loading, initialization code, -// and the FFI Function declarations in a com.sun.jna.Library. -@Synchronized -private fun findLibraryName(componentName: String): String { - val libOverride = System.getProperty("uniffi.component.$componentName.libraryOverride") - if (libOverride != null) { - return libOverride - } - return "goose_llm" -} - -private inline fun loadIndirect(componentName: String): Lib = - Native.load(findLibraryName(componentName), Lib::class.java) - -// Define FFI callback types -internal interface UniffiRustFutureContinuationCallback : com.sun.jna.Callback { - fun callback( - `data`: Long, - `pollResult`: Byte, - ) -} - -internal interface UniffiForeignFutureFree : com.sun.jna.Callback { - fun callback(`handle`: Long) -} - -internal interface UniffiCallbackInterfaceFree : com.sun.jna.Callback { - fun callback(`handle`: Long) -} - -@Structure.FieldOrder("handle", "free") -internal open class UniffiForeignFuture( - @JvmField internal var `handle`: Long = 0.toLong(), - @JvmField internal var `free`: UniffiForeignFutureFree? = null, -) : Structure() { - class UniffiByValue( - `handle`: Long = 0.toLong(), - `free`: UniffiForeignFutureFree? = null, - ) : UniffiForeignFuture(`handle`, `free`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFuture) { - `handle` = other.`handle` - `free` = other.`free` - } -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructU8( - @JvmField internal var `returnValue`: Byte = 0.toByte(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Byte = 0.toByte(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructU8(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructU8) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteU8 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructU8.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructI8( - @JvmField internal var `returnValue`: Byte = 0.toByte(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Byte = 0.toByte(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructI8(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructI8) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteI8 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructI8.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructU16( - @JvmField internal var `returnValue`: Short = 0.toShort(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Short = 0.toShort(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructU16(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructU16) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteU16 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructU16.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructI16( - @JvmField internal var `returnValue`: Short = 0.toShort(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Short = 0.toShort(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructI16(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructI16) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteI16 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructI16.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructU32( - @JvmField internal var `returnValue`: Int = 0, - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Int = 0, - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructU32(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructU32) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteU32 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructU32.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructI32( - @JvmField internal var `returnValue`: Int = 0, - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Int = 0, - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructI32(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructI32) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteI32 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructI32.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructU64( - @JvmField internal var `returnValue`: Long = 0.toLong(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Long = 0.toLong(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructU64(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructU64) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteU64 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructU64.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructI64( - @JvmField internal var `returnValue`: Long = 0.toLong(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Long = 0.toLong(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructI64(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructI64) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteI64 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructI64.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructF32( - @JvmField internal var `returnValue`: Float = 0.0f, - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Float = 0.0f, - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructF32(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructF32) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteF32 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructF32.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructF64( - @JvmField internal var `returnValue`: Double = 0.0, - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Double = 0.0, - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructF64(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructF64) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteF64 : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructF64.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructPointer( - @JvmField internal var `returnValue`: Pointer = Pointer.NULL, - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: Pointer = Pointer.NULL, - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructPointer(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructPointer) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompletePointer : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructPointer.UniffiByValue, - ) -} - -@Structure.FieldOrder("returnValue", "callStatus") -internal open class UniffiForeignFutureStructRustBuffer( - @JvmField internal var `returnValue`: RustBuffer.ByValue = RustBuffer.ByValue(), - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `returnValue`: RustBuffer.ByValue = RustBuffer.ByValue(), - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructRustBuffer(`returnValue`, `callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructRustBuffer) { - `returnValue` = other.`returnValue` - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteRustBuffer : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructRustBuffer.UniffiByValue, - ) -} - -@Structure.FieldOrder("callStatus") -internal open class UniffiForeignFutureStructVoid( - @JvmField internal var `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), -) : Structure() { - class UniffiByValue( - `callStatus`: UniffiRustCallStatus.ByValue = UniffiRustCallStatus.ByValue(), - ) : UniffiForeignFutureStructVoid(`callStatus`), - Structure.ByValue - - internal fun uniffiSetValue(other: UniffiForeignFutureStructVoid) { - `callStatus` = other.`callStatus` - } -} - -internal interface UniffiForeignFutureCompleteVoid : com.sun.jna.Callback { - fun callback( - `callbackData`: Long, - `result`: UniffiForeignFutureStructVoid.UniffiByValue, - ) -} - -// For large crates we prevent `MethodTooLargeException` (see #2340) -// N.B. the name of the extension is very misleading, since it is -// rather `InterfaceTooLargeException`, caused by too many methods -// in the interface for large crates. -// -// By splitting the otherwise huge interface into two parts -// * UniffiLib -// * IntegrityCheckingUniffiLib (this) -// we allow for ~2x as many methods in the UniffiLib interface. -// -// The `ffi_uniffi_contract_version` method and all checksum methods are put -// into `IntegrityCheckingUniffiLib` and these methods are called only once, -// when the library is loaded. -internal interface IntegrityCheckingUniffiLib : Library { - // Integrity check functions only - fun uniffi_goose_llm_checksum_func_completion(): Short - - fun uniffi_goose_llm_checksum_func_create_completion_request(): Short - - fun uniffi_goose_llm_checksum_func_create_tool_config(): Short - - fun uniffi_goose_llm_checksum_func_generate_session_name(): Short - - fun uniffi_goose_llm_checksum_func_generate_structured_outputs(): Short - - fun uniffi_goose_llm_checksum_func_generate_tooltip(): Short - - fun uniffi_goose_llm_checksum_func_print_messages(): Short - - fun ffi_goose_llm_uniffi_contract_version(): Int -} - -// A JNA Library to expose the extern-C FFI definitions. -// This is an implementation detail which will be called internally by the public API. -internal interface UniffiLib : Library { - companion object { - internal val INSTANCE: UniffiLib by lazy { - val componentName = "goose_llm" - // For large crates we prevent `MethodTooLargeException` (see #2340) - // N.B. the name of the extension is very misleading, since it is - // rather `InterfaceTooLargeException`, caused by too many methods - // in the interface for large crates. - // - // By splitting the otherwise huge interface into two parts - // * UniffiLib (this) - // * IntegrityCheckingUniffiLib - // And all checksum methods are put into `IntegrityCheckingUniffiLib` - // we allow for ~2x as many methods in the UniffiLib interface. - // - // Thus we first load the library with `loadIndirect` as `IntegrityCheckingUniffiLib` - // so that we can (optionally!) call `uniffiCheckApiChecksums`... - loadIndirect(componentName) - .also { lib: IntegrityCheckingUniffiLib -> - uniffiCheckContractApiVersion(lib) - uniffiCheckApiChecksums(lib) - } - // ... and then we load the library as `UniffiLib` - // N.B. we cannot use `loadIndirect` once and then try to cast it to `UniffiLib` - // => results in `java.lang.ClassCastException: com.sun.proxy.$Proxy cannot be cast to ...` - // error. So we must call `loadIndirect` twice. For crates large enough - // to trigger this issue, the performance impact is negligible, running on - // a macOS M1 machine the `loadIndirect` call takes ~50ms. - val lib = loadIndirect(componentName) - // No need to check the contract version and checksums, since - // we already did that with `IntegrityCheckingUniffiLib` above. - // Loading of library with integrity check done. - lib - } - } - - // FFI functions - fun uniffi_goose_llm_fn_func_completion(`req`: RustBuffer.ByValue): Long - - fun uniffi_goose_llm_fn_func_create_completion_request( - `providerName`: RustBuffer.ByValue, - `providerConfig`: RustBuffer.ByValue, - `modelConfig`: RustBuffer.ByValue, - `systemPreamble`: RustBuffer.ByValue, - `systemPromptOverride`: RustBuffer.ByValue, - `messages`: RustBuffer.ByValue, - `extensions`: RustBuffer.ByValue, - `requestId`: RustBuffer.ByValue, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun uniffi_goose_llm_fn_func_create_tool_config( - `name`: RustBuffer.ByValue, - `description`: RustBuffer.ByValue, - `inputSchema`: RustBuffer.ByValue, - `approvalMode`: RustBuffer.ByValue, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun uniffi_goose_llm_fn_func_generate_session_name( - `providerName`: RustBuffer.ByValue, - `providerConfig`: RustBuffer.ByValue, - `messages`: RustBuffer.ByValue, - `requestId`: RustBuffer.ByValue, - ): Long - - fun uniffi_goose_llm_fn_func_generate_structured_outputs( - `providerName`: RustBuffer.ByValue, - `providerConfig`: RustBuffer.ByValue, - `systemPrompt`: RustBuffer.ByValue, - `messages`: RustBuffer.ByValue, - `schema`: RustBuffer.ByValue, - `requestId`: RustBuffer.ByValue, - ): Long - - fun uniffi_goose_llm_fn_func_generate_tooltip( - `providerName`: RustBuffer.ByValue, - `providerConfig`: RustBuffer.ByValue, - `messages`: RustBuffer.ByValue, - `requestId`: RustBuffer.ByValue, - ): Long - - fun uniffi_goose_llm_fn_func_print_messages( - `messages`: RustBuffer.ByValue, - uniffi_out_err: UniffiRustCallStatus, - ): Unit - - fun ffi_goose_llm_rustbuffer_alloc( - `size`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun ffi_goose_llm_rustbuffer_from_bytes( - `bytes`: ForeignBytes.ByValue, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun ffi_goose_llm_rustbuffer_free( - `buf`: RustBuffer.ByValue, - uniffi_out_err: UniffiRustCallStatus, - ): Unit - - fun ffi_goose_llm_rustbuffer_reserve( - `buf`: RustBuffer.ByValue, - `additional`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun ffi_goose_llm_rust_future_poll_u8( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_u8(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_u8(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_u8( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Byte - - fun ffi_goose_llm_rust_future_poll_i8( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_i8(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_i8(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_i8( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Byte - - fun ffi_goose_llm_rust_future_poll_u16( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_u16(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_u16(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_u16( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Short - - fun ffi_goose_llm_rust_future_poll_i16( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_i16(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_i16(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_i16( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Short - - fun ffi_goose_llm_rust_future_poll_u32( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_u32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_u32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_u32( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Int - - fun ffi_goose_llm_rust_future_poll_i32( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_i32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_i32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_i32( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Int - - fun ffi_goose_llm_rust_future_poll_u64( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_u64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_u64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_u64( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Long - - fun ffi_goose_llm_rust_future_poll_i64( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_i64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_i64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_i64( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Long - - fun ffi_goose_llm_rust_future_poll_f32( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_f32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_f32(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_f32( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Float - - fun ffi_goose_llm_rust_future_poll_f64( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_f64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_f64(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_f64( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Double - - fun ffi_goose_llm_rust_future_poll_pointer( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_pointer(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_pointer(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_pointer( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Pointer - - fun ffi_goose_llm_rust_future_poll_rust_buffer( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_rust_buffer(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_rust_buffer(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_rust_buffer( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): RustBuffer.ByValue - - fun ffi_goose_llm_rust_future_poll_void( - `handle`: Long, - `callback`: UniffiRustFutureContinuationCallback, - `callbackData`: Long, - ): Unit - - fun ffi_goose_llm_rust_future_cancel_void(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_free_void(`handle`: Long): Unit - - fun ffi_goose_llm_rust_future_complete_void( - `handle`: Long, - uniffi_out_err: UniffiRustCallStatus, - ): Unit -} - -private fun uniffiCheckContractApiVersion(lib: IntegrityCheckingUniffiLib) { - // Get the bindings contract version from our ComponentInterface - val bindings_contract_version = 29 - // Get the scaffolding contract version by calling the into the dylib - val scaffolding_contract_version = lib.ffi_goose_llm_uniffi_contract_version() - if (bindings_contract_version != scaffolding_contract_version) { - throw RuntimeException("UniFFI contract version mismatch: try cleaning and rebuilding your project") - } -} - -@Suppress("UNUSED_PARAMETER") -private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) { - if (lib.uniffi_goose_llm_checksum_func_completion() != 47457.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_create_completion_request() != 15391.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_create_tool_config() != 49910.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_generate_session_name() != 34350.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_generate_structured_outputs() != 4576.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_generate_tooltip() != 36439.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } - if (lib.uniffi_goose_llm_checksum_func_print_messages() != 30278.toShort()) { - throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - } -} - -/** - * @suppress - */ -public fun uniffiEnsureInitialized() { - UniffiLib.INSTANCE -} - -// Async support -// Async return type handlers - -internal const val UNIFFI_RUST_FUTURE_POLL_READY = 0.toByte() -internal const val UNIFFI_RUST_FUTURE_POLL_MAYBE_READY = 1.toByte() - -internal val uniffiContinuationHandleMap = UniffiHandleMap>() - -// FFI type for Rust future continuations -internal object uniffiRustFutureContinuationCallbackImpl : UniffiRustFutureContinuationCallback { - override fun callback( - data: Long, - pollResult: Byte, - ) { - uniffiContinuationHandleMap.remove(data).resume(pollResult) - } -} - -internal suspend fun uniffiRustCallAsync( - rustFuture: Long, - pollFunc: (Long, UniffiRustFutureContinuationCallback, Long) -> Unit, - completeFunc: (Long, UniffiRustCallStatus) -> F, - freeFunc: (Long) -> Unit, - liftFunc: (F) -> T, - errorHandler: UniffiRustCallStatusErrorHandler, -): T { - try { - do { - val pollResult = - suspendCancellableCoroutine { continuation -> - pollFunc( - rustFuture, - uniffiRustFutureContinuationCallbackImpl, - uniffiContinuationHandleMap.insert(continuation), - ) - } - } while (pollResult != UNIFFI_RUST_FUTURE_POLL_READY) - - return liftFunc( - uniffiRustCallWithError(errorHandler, { status -> completeFunc(rustFuture, status) }), - ) - } finally { - freeFunc(rustFuture) - } -} - -// Public interface members begin here. - -// Interface implemented by anything that can contain an object reference. -// -// Such types expose a `destroy()` method that must be called to cleanly -// dispose of the contained objects. Failure to call this method may result -// in memory leaks. -// -// The easiest way to ensure this method is called is to use the `.use` -// helper method to execute a block and destroy the object at the end. -interface Disposable { - fun destroy() - - companion object { - fun destroy(vararg args: Any?) { - for (arg in args) { - when (arg) { - is Disposable -> arg.destroy() - is ArrayList<*> -> { - for (idx in arg.indices) { - val element = arg[idx] - if (element is Disposable) { - element.destroy() - } - } - } - is Map<*, *> -> { - for (element in arg.values) { - if (element is Disposable) { - element.destroy() - } - } - } - is Iterable<*> -> { - for (element in arg) { - if (element is Disposable) { - element.destroy() - } - } - } - } - } - } - } -} - -/** - * @suppress - */ -inline fun T.use(block: (T) -> R) = - try { - block(this) - } finally { - try { - // N.B. our implementation is on the nullable type `Disposable?`. - this?.destroy() - } catch (e: Throwable) { - // swallow - } - } - -/** - * Used to instantiate an interface without an actual pointer, for fakes in tests, mostly. - * - * @suppress - * */ -object NoPointer - -/** - * @suppress - */ -public object FfiConverterUInt : FfiConverter { - override fun lift(value: Int): UInt = value.toUInt() - - override fun read(buf: ByteBuffer): UInt = lift(buf.getInt()) - - override fun lower(value: UInt): Int = value.toInt() - - override fun allocationSize(value: UInt) = 4UL - - override fun write( - value: UInt, - buf: ByteBuffer, - ) { - buf.putInt(value.toInt()) - } -} - -/** - * @suppress - */ -public object FfiConverterInt : FfiConverter { - override fun lift(value: Int): Int = value - - override fun read(buf: ByteBuffer): Int = buf.getInt() - - override fun lower(value: Int): Int = value - - override fun allocationSize(value: Int) = 4UL - - override fun write( - value: Int, - buf: ByteBuffer, - ) { - buf.putInt(value) - } -} - -/** - * @suppress - */ -public object FfiConverterLong : FfiConverter { - override fun lift(value: Long): Long = value - - override fun read(buf: ByteBuffer): Long = buf.getLong() - - override fun lower(value: Long): Long = value - - override fun allocationSize(value: Long) = 8UL - - override fun write( - value: Long, - buf: ByteBuffer, - ) { - buf.putLong(value) - } -} - -/** - * @suppress - */ -public object FfiConverterFloat : FfiConverter { - override fun lift(value: Float): Float = value - - override fun read(buf: ByteBuffer): Float = buf.getFloat() - - override fun lower(value: Float): Float = value - - override fun allocationSize(value: Float) = 4UL - - override fun write( - value: Float, - buf: ByteBuffer, - ) { - buf.putFloat(value) - } -} - -/** - * @suppress - */ -public object FfiConverterDouble : FfiConverter { - override fun lift(value: Double): Double = value - - override fun read(buf: ByteBuffer): Double = buf.getDouble() - - override fun lower(value: Double): Double = value - - override fun allocationSize(value: Double) = 8UL - - override fun write( - value: Double, - buf: ByteBuffer, - ) { - buf.putDouble(value) - } -} - -/** - * @suppress - */ -public object FfiConverterString : FfiConverter { - // Note: we don't inherit from FfiConverterRustBuffer, because we use a - // special encoding when lowering/lifting. We can use `RustBuffer.len` to - // store our length and avoid writing it out to the buffer. - override fun lift(value: RustBuffer.ByValue): String { - try { - val byteArr = ByteArray(value.len.toInt()) - value.asByteBuffer()!!.get(byteArr) - return byteArr.toString(Charsets.UTF_8) - } finally { - RustBuffer.free(value) - } - } - - override fun read(buf: ByteBuffer): String { - val len = buf.getInt() - val byteArr = ByteArray(len) - buf.get(byteArr) - return byteArr.toString(Charsets.UTF_8) - } - - fun toUtf8(value: String): ByteBuffer { - // Make sure we don't have invalid UTF-16, check for lone surrogates. - return Charsets.UTF_8.newEncoder().run { - onMalformedInput(CodingErrorAction.REPORT) - encode(CharBuffer.wrap(value)) - } - } - - override fun lower(value: String): RustBuffer.ByValue { - val byteBuf = toUtf8(value) - // Ideally we'd pass these bytes to `ffi_bytebuffer_from_bytes`, but doing so would require us - // to copy them into a JNA `Memory`. So we might as well directly copy them into a `RustBuffer`. - val rbuf = RustBuffer.alloc(byteBuf.limit().toULong()) - rbuf.asByteBuffer()!!.put(byteBuf) - return rbuf - } - - // We aren't sure exactly how many bytes our string will be once it's UTF-8 - // encoded. Allocate 3 bytes per UTF-16 code unit which will always be - // enough. - override fun allocationSize(value: String): ULong { - val sizeForLength = 4UL - val sizeForString = value.length.toULong() * 3UL - return sizeForLength + sizeForString - } - - override fun write( - value: String, - buf: ByteBuffer, - ) { - val byteBuf = toUtf8(value) - buf.putInt(byteBuf.limit()) - buf.put(byteBuf) - } -} - -data class CompletionResponse( - var `message`: Message, - var `model`: kotlin.String, - var `usage`: Usage, - var `runtimeMetrics`: RuntimeMetrics, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeCompletionResponse : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): CompletionResponse = - CompletionResponse( - FfiConverterTypeMessage.read(buf), - FfiConverterString.read(buf), - FfiConverterTypeUsage.read(buf), - FfiConverterTypeRuntimeMetrics.read(buf), - ) - - override fun allocationSize(value: CompletionResponse) = - ( - FfiConverterTypeMessage.allocationSize(value.`message`) + - FfiConverterString.allocationSize(value.`model`) + - FfiConverterTypeUsage.allocationSize(value.`usage`) + - FfiConverterTypeRuntimeMetrics.allocationSize(value.`runtimeMetrics`) - ) - - override fun write( - value: CompletionResponse, - buf: ByteBuffer, - ) { - FfiConverterTypeMessage.write(value.`message`, buf) - FfiConverterString.write(value.`model`, buf) - FfiConverterTypeUsage.write(value.`usage`, buf) - FfiConverterTypeRuntimeMetrics.write(value.`runtimeMetrics`, buf) - } -} - -data class ExtensionConfig( - var `name`: kotlin.String, - var `instructions`: kotlin.String?, - var `tools`: List, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeExtensionConfig : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ExtensionConfig = - ExtensionConfig( - FfiConverterString.read(buf), - FfiConverterOptionalString.read(buf), - FfiConverterSequenceTypeToolConfig.read(buf), - ) - - override fun allocationSize(value: ExtensionConfig) = - ( - FfiConverterString.allocationSize(value.`name`) + - FfiConverterOptionalString.allocationSize(value.`instructions`) + - FfiConverterSequenceTypeToolConfig.allocationSize(value.`tools`) - ) - - override fun write( - value: ExtensionConfig, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`name`, buf) - FfiConverterOptionalString.write(value.`instructions`, buf) - FfiConverterSequenceTypeToolConfig.write(value.`tools`, buf) - } -} - -data class ImageContent( - var `data`: kotlin.String, - var `mimeType`: kotlin.String, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeImageContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ImageContent = - ImageContent( - FfiConverterString.read(buf), - FfiConverterString.read(buf), - ) - - override fun allocationSize(value: ImageContent) = - ( - FfiConverterString.allocationSize(value.`data`) + - FfiConverterString.allocationSize(value.`mimeType`) - ) - - override fun write( - value: ImageContent, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`data`, buf) - FfiConverterString.write(value.`mimeType`, buf) - } -} - -/** - * A message to or from an LLM - */ -data class Message( - var `role`: Role, - var `created`: kotlin.Long, - var `content`: Contents, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeMessage : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): Message = - Message( - FfiConverterTypeRole.read(buf), - FfiConverterLong.read(buf), - FfiConverterTypeContents.read(buf), - ) - - override fun allocationSize(value: Message) = - ( - FfiConverterTypeRole.allocationSize(value.`role`) + - FfiConverterLong.allocationSize(value.`created`) + - FfiConverterTypeContents.allocationSize(value.`content`) - ) - - override fun write( - value: Message, - buf: ByteBuffer, - ) { - FfiConverterTypeRole.write(value.`role`, buf) - FfiConverterLong.write(value.`created`, buf) - FfiConverterTypeContents.write(value.`content`, buf) - } -} - -/** - * Configuration for model-specific settings and limits - */ -data class ModelConfig( - /** - * The name of the model to use - */ - var `modelName`: kotlin.String, - /** - * Optional explicit context limit that overrides any defaults - */ - var `contextLimit`: kotlin.UInt?, - /** - * Optional temperature setting (0.0 - 1.0) - */ - var `temperature`: kotlin.Float?, - /** - * Optional maximum tokens to generate - */ - var `maxTokens`: kotlin.Int?, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeModelConfig : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ModelConfig = - ModelConfig( - FfiConverterString.read(buf), - FfiConverterOptionalUInt.read(buf), - FfiConverterOptionalFloat.read(buf), - FfiConverterOptionalInt.read(buf), - ) - - override fun allocationSize(value: ModelConfig) = - ( - FfiConverterString.allocationSize(value.`modelName`) + - FfiConverterOptionalUInt.allocationSize(value.`contextLimit`) + - FfiConverterOptionalFloat.allocationSize(value.`temperature`) + - FfiConverterOptionalInt.allocationSize(value.`maxTokens`) - ) - - override fun write( - value: ModelConfig, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`modelName`, buf) - FfiConverterOptionalUInt.write(value.`contextLimit`, buf) - FfiConverterOptionalFloat.write(value.`temperature`, buf) - FfiConverterOptionalInt.write(value.`maxTokens`, buf) - } -} - -data class ProviderCompleteResponse( - var `message`: Message, - var `model`: kotlin.String, - var `usage`: Usage, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeProviderCompleteResponse : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ProviderCompleteResponse = - ProviderCompleteResponse( - FfiConverterTypeMessage.read(buf), - FfiConverterString.read(buf), - FfiConverterTypeUsage.read(buf), - ) - - override fun allocationSize(value: ProviderCompleteResponse) = - ( - FfiConverterTypeMessage.allocationSize(value.`message`) + - FfiConverterString.allocationSize(value.`model`) + - FfiConverterTypeUsage.allocationSize(value.`usage`) - ) - - override fun write( - value: ProviderCompleteResponse, - buf: ByteBuffer, - ) { - FfiConverterTypeMessage.write(value.`message`, buf) - FfiConverterString.write(value.`model`, buf) - FfiConverterTypeUsage.write(value.`usage`, buf) - } -} - -/** - * Response from a structured‐extraction call - */ -data class ProviderExtractResponse( - /** - * The extracted JSON object - */ - var `data`: Value, - /** - * Which model produced it - */ - var `model`: kotlin.String, - /** - * Token usage stats - */ - var `usage`: Usage, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeProviderExtractResponse : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ProviderExtractResponse = - ProviderExtractResponse( - FfiConverterTypeValue.read(buf), - FfiConverterString.read(buf), - FfiConverterTypeUsage.read(buf), - ) - - override fun allocationSize(value: ProviderExtractResponse) = - ( - FfiConverterTypeValue.allocationSize(value.`data`) + - FfiConverterString.allocationSize(value.`model`) + - FfiConverterTypeUsage.allocationSize(value.`usage`) - ) - - override fun write( - value: ProviderExtractResponse, - buf: ByteBuffer, - ) { - FfiConverterTypeValue.write(value.`data`, buf) - FfiConverterString.write(value.`model`, buf) - FfiConverterTypeUsage.write(value.`usage`, buf) - } -} - -data class RedactedThinkingContent( - var `data`: kotlin.String, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeRedactedThinkingContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): RedactedThinkingContent = - RedactedThinkingContent( - FfiConverterString.read(buf), - ) - - override fun allocationSize(value: RedactedThinkingContent) = - ( - FfiConverterString.allocationSize(value.`data`) - ) - - override fun write( - value: RedactedThinkingContent, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`data`, buf) - } -} - -data class RuntimeMetrics( - var `totalTimeSec`: kotlin.Float, - var `totalTimeSecProvider`: kotlin.Float, - var `tokensPerSecond`: kotlin.Double?, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeRuntimeMetrics : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): RuntimeMetrics = - RuntimeMetrics( - FfiConverterFloat.read(buf), - FfiConverterFloat.read(buf), - FfiConverterOptionalDouble.read(buf), - ) - - override fun allocationSize(value: RuntimeMetrics) = - ( - FfiConverterFloat.allocationSize(value.`totalTimeSec`) + - FfiConverterFloat.allocationSize(value.`totalTimeSecProvider`) + - FfiConverterOptionalDouble.allocationSize(value.`tokensPerSecond`) - ) - - override fun write( - value: RuntimeMetrics, - buf: ByteBuffer, - ) { - FfiConverterFloat.write(value.`totalTimeSec`, buf) - FfiConverterFloat.write(value.`totalTimeSecProvider`, buf) - FfiConverterOptionalDouble.write(value.`tokensPerSecond`, buf) - } -} - -data class TextContent( - var `text`: kotlin.String, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeTextContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): TextContent = - TextContent( - FfiConverterString.read(buf), - ) - - override fun allocationSize(value: TextContent) = - ( - FfiConverterString.allocationSize(value.`text`) - ) - - override fun write( - value: TextContent, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`text`, buf) - } -} - -data class ThinkingContent( - var `thinking`: kotlin.String, - var `signature`: kotlin.String, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeThinkingContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ThinkingContent = - ThinkingContent( - FfiConverterString.read(buf), - FfiConverterString.read(buf), - ) - - override fun allocationSize(value: ThinkingContent) = - ( - FfiConverterString.allocationSize(value.`thinking`) + - FfiConverterString.allocationSize(value.`signature`) - ) - - override fun write( - value: ThinkingContent, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`thinking`, buf) - FfiConverterString.write(value.`signature`, buf) - } -} - -data class ToolConfig( - var `name`: kotlin.String, - var `description`: kotlin.String, - var `inputSchema`: Value, - var `approvalMode`: ToolApprovalMode, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeToolConfig : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ToolConfig = - ToolConfig( - FfiConverterString.read(buf), - FfiConverterString.read(buf), - FfiConverterTypeValue.read(buf), - FfiConverterTypeToolApprovalMode.read(buf), - ) - - override fun allocationSize(value: ToolConfig) = - ( - FfiConverterString.allocationSize(value.`name`) + - FfiConverterString.allocationSize(value.`description`) + - FfiConverterTypeValue.allocationSize(value.`inputSchema`) + - FfiConverterTypeToolApprovalMode.allocationSize(value.`approvalMode`) - ) - - override fun write( - value: ToolConfig, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`name`, buf) - FfiConverterString.write(value.`description`, buf) - FfiConverterTypeValue.write(value.`inputSchema`, buf) - FfiConverterTypeToolApprovalMode.write(value.`approvalMode`, buf) - } -} - -data class ToolRequest( - var `id`: kotlin.String, - var `toolCall`: ToolRequestToolCall, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeToolRequest : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ToolRequest = - ToolRequest( - FfiConverterString.read(buf), - FfiConverterTypeToolRequestToolCall.read(buf), - ) - - override fun allocationSize(value: ToolRequest) = - ( - FfiConverterString.allocationSize(value.`id`) + - FfiConverterTypeToolRequestToolCall.allocationSize(value.`toolCall`) - ) - - override fun write( - value: ToolRequest, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`id`, buf) - FfiConverterTypeToolRequestToolCall.write(value.`toolCall`, buf) - } -} - -data class ToolResponse( - var `id`: kotlin.String, - var `toolResult`: ToolResponseToolResult, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeToolResponse : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ToolResponse = - ToolResponse( - FfiConverterString.read(buf), - FfiConverterTypeToolResponseToolResult.read(buf), - ) - - override fun allocationSize(value: ToolResponse) = - ( - FfiConverterString.allocationSize(value.`id`) + - FfiConverterTypeToolResponseToolResult.allocationSize(value.`toolResult`) - ) - - override fun write( - value: ToolResponse, - buf: ByteBuffer, - ) { - FfiConverterString.write(value.`id`, buf) - FfiConverterTypeToolResponseToolResult.write(value.`toolResult`, buf) - } -} - -data class Usage( - var `inputTokens`: kotlin.Int?, - var `outputTokens`: kotlin.Int?, - var `totalTokens`: kotlin.Int?, -) { - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeUsage : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): Usage = - Usage( - FfiConverterOptionalInt.read(buf), - FfiConverterOptionalInt.read(buf), - FfiConverterOptionalInt.read(buf), - ) - - override fun allocationSize(value: Usage) = - ( - FfiConverterOptionalInt.allocationSize(value.`inputTokens`) + - FfiConverterOptionalInt.allocationSize(value.`outputTokens`) + - FfiConverterOptionalInt.allocationSize(value.`totalTokens`) - ) - - override fun write( - value: Usage, - buf: ByteBuffer, - ) { - FfiConverterOptionalInt.write(value.`inputTokens`, buf) - FfiConverterOptionalInt.write(value.`outputTokens`, buf) - FfiConverterOptionalInt.write(value.`totalTokens`, buf) - } -} - -sealed class CompletionException( - message: String, -) : kotlin.Exception(message) { - class UnknownProvider( - message: String, - ) : CompletionException(message) - - class Provider( - message: String, - ) : CompletionException(message) - - class Template( - message: String, - ) : CompletionException(message) - - class Json( - message: String, - ) : CompletionException(message) - - class ToolNotFound( - message: String, - ) : CompletionException(message) - - companion object ErrorHandler : UniffiRustCallStatusErrorHandler { - override fun lift(error_buf: RustBuffer.ByValue): CompletionException = FfiConverterTypeCompletionError.lift(error_buf) - } -} - -/** - * @suppress - */ -public object FfiConverterTypeCompletionError : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): CompletionException = - when (buf.getInt()) { - 1 -> CompletionException.UnknownProvider(FfiConverterString.read(buf)) - 2 -> CompletionException.Provider(FfiConverterString.read(buf)) - 3 -> CompletionException.Template(FfiConverterString.read(buf)) - 4 -> CompletionException.Json(FfiConverterString.read(buf)) - 5 -> CompletionException.ToolNotFound(FfiConverterString.read(buf)) - else -> throw RuntimeException("invalid error enum value, something is very wrong!!") - } - - override fun allocationSize(value: CompletionException): ULong = 4UL - - override fun write( - value: CompletionException, - buf: ByteBuffer, - ) { - when (value) { - is CompletionException.UnknownProvider -> { - buf.putInt(1) - Unit - } - is CompletionException.Provider -> { - buf.putInt(2) - Unit - } - is CompletionException.Template -> { - buf.putInt(3) - Unit - } - is CompletionException.Json -> { - buf.putInt(4) - Unit - } - is CompletionException.ToolNotFound -> { - buf.putInt(5) - Unit - } - }.let { /* this makes the `when` an expression, which ensures it is exhaustive */ } - } -} - -sealed class Content { - data class Text( - val v1: TextContent, - ) : Content() { - companion object - } - - data class Image( - val v1: ImageContent, - ) : Content() { - companion object - } - - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): Content = - when (buf.getInt()) { - 1 -> - Content.Text( - FfiConverterTypeTextContent.read(buf), - ) - 2 -> - Content.Image( - FfiConverterTypeImageContent.read(buf), - ) - else -> throw RuntimeException("invalid enum value, something is very wrong!!") - } - - override fun allocationSize(value: Content) = - when (value) { - is Content.Text -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeTextContent.allocationSize(value.v1) - ) - } - is Content.Image -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeImageContent.allocationSize(value.v1) - ) - } - } - - override fun write( - value: Content, - buf: ByteBuffer, - ) { - when (value) { - is Content.Text -> { - buf.putInt(1) - FfiConverterTypeTextContent.write(value.v1, buf) - Unit - } - is Content.Image -> { - buf.putInt(2) - FfiConverterTypeImageContent.write(value.v1, buf) - Unit - } - }.let { /* this makes the `when` an expression, which ensures it is exhaustive */ } - } -} - -/** - * Content passed inside a message, which can be both simple content and tool content - */ -sealed class MessageContent { - data class Text( - val v1: TextContent, - ) : MessageContent() { - companion object - } - - data class Image( - val v1: ImageContent, - ) : MessageContent() { - companion object - } - - data class ToolReq( - val v1: ToolRequest, - ) : MessageContent() { - companion object - } - - data class ToolResp( - val v1: ToolResponse, - ) : MessageContent() { - companion object - } - - data class Thinking( - val v1: ThinkingContent, - ) : MessageContent() { - companion object - } - - data class RedactedThinking( - val v1: RedactedThinkingContent, - ) : MessageContent() { - companion object - } - - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeMessageContent : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): MessageContent = - when (buf.getInt()) { - 1 -> - MessageContent.Text( - FfiConverterTypeTextContent.read(buf), - ) - 2 -> - MessageContent.Image( - FfiConverterTypeImageContent.read(buf), - ) - 3 -> - MessageContent.ToolReq( - FfiConverterTypeToolRequest.read(buf), - ) - 4 -> - MessageContent.ToolResp( - FfiConverterTypeToolResponse.read(buf), - ) - 5 -> - MessageContent.Thinking( - FfiConverterTypeThinkingContent.read(buf), - ) - 6 -> - MessageContent.RedactedThinking( - FfiConverterTypeRedactedThinkingContent.read(buf), - ) - else -> throw RuntimeException("invalid enum value, something is very wrong!!") - } - - override fun allocationSize(value: MessageContent) = - when (value) { - is MessageContent.Text -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeTextContent.allocationSize(value.v1) - ) - } - is MessageContent.Image -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeImageContent.allocationSize(value.v1) - ) - } - is MessageContent.ToolReq -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeToolRequest.allocationSize(value.v1) - ) - } - is MessageContent.ToolResp -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeToolResponse.allocationSize(value.v1) - ) - } - is MessageContent.Thinking -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeThinkingContent.allocationSize(value.v1) - ) - } - is MessageContent.RedactedThinking -> { - // Add the size for the Int that specifies the variant plus the size needed for all fields - ( - 4UL + - FfiConverterTypeRedactedThinkingContent.allocationSize(value.v1) - ) - } - } - - override fun write( - value: MessageContent, - buf: ByteBuffer, - ) { - when (value) { - is MessageContent.Text -> { - buf.putInt(1) - FfiConverterTypeTextContent.write(value.v1, buf) - Unit - } - is MessageContent.Image -> { - buf.putInt(2) - FfiConverterTypeImageContent.write(value.v1, buf) - Unit - } - is MessageContent.ToolReq -> { - buf.putInt(3) - FfiConverterTypeToolRequest.write(value.v1, buf) - Unit - } - is MessageContent.ToolResp -> { - buf.putInt(4) - FfiConverterTypeToolResponse.write(value.v1, buf) - Unit - } - is MessageContent.Thinking -> { - buf.putInt(5) - FfiConverterTypeThinkingContent.write(value.v1, buf) - Unit - } - is MessageContent.RedactedThinking -> { - buf.putInt(6) - FfiConverterTypeRedactedThinkingContent.write(value.v1, buf) - Unit - } - }.let { /* this makes the `when` an expression, which ensures it is exhaustive */ } - } -} - -sealed class ProviderException : kotlin.Exception() { - class Authentication( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class ContextLengthExceeded( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class RateLimitExceeded( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class ServerException( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class RequestFailed( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class ExecutionException( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class UsageException( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - class ResponseParseException( - val v1: kotlin.String, - ) : ProviderException() { - override val message - get() = "v1=${ v1 }" - } - - companion object ErrorHandler : UniffiRustCallStatusErrorHandler { - override fun lift(error_buf: RustBuffer.ByValue): ProviderException = FfiConverterTypeProviderError.lift(error_buf) - } -} - -/** - * @suppress - */ -public object FfiConverterTypeProviderError : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ProviderException = - when (buf.getInt()) { - 1 -> - ProviderException.Authentication( - FfiConverterString.read(buf), - ) - 2 -> - ProviderException.ContextLengthExceeded( - FfiConverterString.read(buf), - ) - 3 -> - ProviderException.RateLimitExceeded( - FfiConverterString.read(buf), - ) - 4 -> - ProviderException.ServerException( - FfiConverterString.read(buf), - ) - 5 -> - ProviderException.RequestFailed( - FfiConverterString.read(buf), - ) - 6 -> - ProviderException.ExecutionException( - FfiConverterString.read(buf), - ) - 7 -> - ProviderException.UsageException( - FfiConverterString.read(buf), - ) - 8 -> - ProviderException.ResponseParseException( - FfiConverterString.read(buf), - ) - else -> throw RuntimeException("invalid error enum value, something is very wrong!!") - } - - override fun allocationSize(value: ProviderException): ULong = - when (value) { - is ProviderException.Authentication -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.ContextLengthExceeded -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.RateLimitExceeded -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.ServerException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.RequestFailed -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.ExecutionException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.UsageException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ProviderException.ResponseParseException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - } - - override fun write( - value: ProviderException, - buf: ByteBuffer, - ) { - when (value) { - is ProviderException.Authentication -> { - buf.putInt(1) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.ContextLengthExceeded -> { - buf.putInt(2) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.RateLimitExceeded -> { - buf.putInt(3) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.ServerException -> { - buf.putInt(4) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.RequestFailed -> { - buf.putInt(5) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.ExecutionException -> { - buf.putInt(6) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.UsageException -> { - buf.putInt(7) - FfiConverterString.write(value.v1, buf) - Unit - } - is ProviderException.ResponseParseException -> { - buf.putInt(8) - FfiConverterString.write(value.v1, buf) - Unit - } - }.let { /* this makes the `when` an expression, which ensures it is exhaustive */ } - } -} - -enum class Role { - USER, - ASSISTANT, - ; - - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeRole : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer) = - try { - Role.values()[buf.getInt() - 1] - } catch (e: IndexOutOfBoundsException) { - throw RuntimeException("invalid enum value, something is very wrong!!", e) - } - - override fun allocationSize(value: Role) = 4UL - - override fun write( - value: Role, - buf: ByteBuffer, - ) { - buf.putInt(value.ordinal + 1) - } -} - -enum class ToolApprovalMode { - AUTO, - MANUAL, - SMART, - ; - - companion object -} - -/** - * @suppress - */ -public object FfiConverterTypeToolApprovalMode : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer) = - try { - ToolApprovalMode.values()[buf.getInt() - 1] - } catch (e: IndexOutOfBoundsException) { - throw RuntimeException("invalid enum value, something is very wrong!!", e) - } - - override fun allocationSize(value: ToolApprovalMode) = 4UL - - override fun write( - value: ToolApprovalMode, - buf: ByteBuffer, - ) { - buf.putInt(value.ordinal + 1) - } -} - -sealed class ToolException : kotlin.Exception() { - class InvalidParameters( - val v1: kotlin.String, - ) : ToolException() { - override val message - get() = "v1=${ v1 }" - } - - class ExecutionException( - val v1: kotlin.String, - ) : ToolException() { - override val message - get() = "v1=${ v1 }" - } - - class SchemaException( - val v1: kotlin.String, - ) : ToolException() { - override val message - get() = "v1=${ v1 }" - } - - class NotFound( - val v1: kotlin.String, - ) : ToolException() { - override val message - get() = "v1=${ v1 }" - } - - companion object ErrorHandler : UniffiRustCallStatusErrorHandler { - override fun lift(error_buf: RustBuffer.ByValue): ToolException = FfiConverterTypeToolError.lift(error_buf) - } -} - -/** - * @suppress - */ -public object FfiConverterTypeToolError : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): ToolException = - when (buf.getInt()) { - 1 -> - ToolException.InvalidParameters( - FfiConverterString.read(buf), - ) - 2 -> - ToolException.ExecutionException( - FfiConverterString.read(buf), - ) - 3 -> - ToolException.SchemaException( - FfiConverterString.read(buf), - ) - 4 -> - ToolException.NotFound( - FfiConverterString.read(buf), - ) - else -> throw RuntimeException("invalid error enum value, something is very wrong!!") - } - - override fun allocationSize(value: ToolException): ULong = - when (value) { - is ToolException.InvalidParameters -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ToolException.ExecutionException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ToolException.SchemaException -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - is ToolException.NotFound -> ( - // Add the size for the Int that specifies the variant plus the size needed for all fields - 4UL + - FfiConverterString.allocationSize(value.v1) - ) - } - - override fun write( - value: ToolException, - buf: ByteBuffer, - ) { - when (value) { - is ToolException.InvalidParameters -> { - buf.putInt(1) - FfiConverterString.write(value.v1, buf) - Unit - } - is ToolException.ExecutionException -> { - buf.putInt(2) - FfiConverterString.write(value.v1, buf) - Unit - } - is ToolException.SchemaException -> { - buf.putInt(3) - FfiConverterString.write(value.v1, buf) - Unit - } - is ToolException.NotFound -> { - buf.putInt(4) - FfiConverterString.write(value.v1, buf) - Unit - } - }.let { /* this makes the `when` an expression, which ensures it is exhaustive */ } - } -} - -/** - * @suppress - */ -public object FfiConverterOptionalUInt : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): kotlin.UInt? { - if (buf.get().toInt() == 0) { - return null - } - return FfiConverterUInt.read(buf) - } - - override fun allocationSize(value: kotlin.UInt?): ULong { - if (value == null) { - return 1UL - } else { - return 1UL + FfiConverterUInt.allocationSize(value) - } - } - - override fun write( - value: kotlin.UInt?, - buf: ByteBuffer, - ) { - if (value == null) { - buf.put(0) - } else { - buf.put(1) - FfiConverterUInt.write(value, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterOptionalInt : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): kotlin.Int? { - if (buf.get().toInt() == 0) { - return null - } - return FfiConverterInt.read(buf) - } - - override fun allocationSize(value: kotlin.Int?): ULong { - if (value == null) { - return 1UL - } else { - return 1UL + FfiConverterInt.allocationSize(value) - } - } - - override fun write( - value: kotlin.Int?, - buf: ByteBuffer, - ) { - if (value == null) { - buf.put(0) - } else { - buf.put(1) - FfiConverterInt.write(value, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterOptionalFloat : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): kotlin.Float? { - if (buf.get().toInt() == 0) { - return null - } - return FfiConverterFloat.read(buf) - } - - override fun allocationSize(value: kotlin.Float?): ULong { - if (value == null) { - return 1UL - } else { - return 1UL + FfiConverterFloat.allocationSize(value) - } - } - - override fun write( - value: kotlin.Float?, - buf: ByteBuffer, - ) { - if (value == null) { - buf.put(0) - } else { - buf.put(1) - FfiConverterFloat.write(value, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterOptionalDouble : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): kotlin.Double? { - if (buf.get().toInt() == 0) { - return null - } - return FfiConverterDouble.read(buf) - } - - override fun allocationSize(value: kotlin.Double?): ULong { - if (value == null) { - return 1UL - } else { - return 1UL + FfiConverterDouble.allocationSize(value) - } - } - - override fun write( - value: kotlin.Double?, - buf: ByteBuffer, - ) { - if (value == null) { - buf.put(0) - } else { - buf.put(1) - FfiConverterDouble.write(value, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterOptionalString : FfiConverterRustBuffer { - override fun read(buf: ByteBuffer): kotlin.String? { - if (buf.get().toInt() == 0) { - return null - } - return FfiConverterString.read(buf) - } - - override fun allocationSize(value: kotlin.String?): ULong { - if (value == null) { - return 1UL - } else { - return 1UL + FfiConverterString.allocationSize(value) - } - } - - override fun write( - value: kotlin.String?, - buf: ByteBuffer, - ) { - if (value == null) { - buf.put(0) - } else { - buf.put(1) - FfiConverterString.write(value, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterSequenceTypeExtensionConfig : FfiConverterRustBuffer> { - override fun read(buf: ByteBuffer): List { - val len = buf.getInt() - return List(len) { - FfiConverterTypeExtensionConfig.read(buf) - } - } - - override fun allocationSize(value: List): ULong { - val sizeForLength = 4UL - val sizeForItems = value.map { FfiConverterTypeExtensionConfig.allocationSize(it) }.sum() - return sizeForLength + sizeForItems - } - - override fun write( - value: List, - buf: ByteBuffer, - ) { - buf.putInt(value.size) - value.iterator().forEach { - FfiConverterTypeExtensionConfig.write(it, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterSequenceTypeMessage : FfiConverterRustBuffer> { - override fun read(buf: ByteBuffer): List { - val len = buf.getInt() - return List(len) { - FfiConverterTypeMessage.read(buf) - } - } - - override fun allocationSize(value: List): ULong { - val sizeForLength = 4UL - val sizeForItems = value.map { FfiConverterTypeMessage.allocationSize(it) }.sum() - return sizeForLength + sizeForItems - } - - override fun write( - value: List, - buf: ByteBuffer, - ) { - buf.putInt(value.size) - value.iterator().forEach { - FfiConverterTypeMessage.write(it, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterSequenceTypeToolConfig : FfiConverterRustBuffer> { - override fun read(buf: ByteBuffer): List { - val len = buf.getInt() - return List(len) { - FfiConverterTypeToolConfig.read(buf) - } - } - - override fun allocationSize(value: List): ULong { - val sizeForLength = 4UL - val sizeForItems = value.map { FfiConverterTypeToolConfig.allocationSize(it) }.sum() - return sizeForLength + sizeForItems - } - - override fun write( - value: List, - buf: ByteBuffer, - ) { - buf.putInt(value.size) - value.iterator().forEach { - FfiConverterTypeToolConfig.write(it, buf) - } - } -} - -/** - * @suppress - */ -public object FfiConverterSequenceTypeMessageContent : FfiConverterRustBuffer> { - override fun read(buf: ByteBuffer): List { - val len = buf.getInt() - return List(len) { - FfiConverterTypeMessageContent.read(buf) - } - } - - override fun allocationSize(value: List): ULong { - val sizeForLength = 4UL - val sizeForItems = value.map { FfiConverterTypeMessageContent.allocationSize(it) }.sum() - return sizeForLength + sizeForItems - } - - override fun write( - value: List, - buf: ByteBuffer, - ) { - buf.putInt(value.size) - value.iterator().forEach { - FfiConverterTypeMessageContent.write(it, buf) - } - } -} - -/** - * Typealias from the type name used in the UDL file to the builtin type. This - * is needed because the UDL type name is used in function/method signatures. - * It's also what we have an external type that references a custom type. - */ -public typealias CompletionRequest = kotlin.String -public typealias FfiConverterTypeCompletionRequest = FfiConverterString - -/** - * Typealias from the type name used in the UDL file to the builtin type. This - * is needed because the UDL type name is used in function/method signatures. - * It's also what we have an external type that references a custom type. - */ -public typealias Contents = List -public typealias FfiConverterTypeContents = FfiConverterSequenceTypeMessageContent - -/** - * Typealias from the type name used in the UDL file to the builtin type. This - * is needed because the UDL type name is used in function/method signatures. - * It's also what we have an external type that references a custom type. - */ -public typealias ToolRequestToolCall = kotlin.String -public typealias FfiConverterTypeToolRequestToolCall = FfiConverterString - -/** - * Typealias from the type name used in the UDL file to the builtin type. This - * is needed because the UDL type name is used in function/method signatures. - * It's also what we have an external type that references a custom type. - */ -public typealias ToolResponseToolResult = kotlin.String -public typealias FfiConverterTypeToolResponseToolResult = FfiConverterString - -/** - * Typealias from the type name used in the UDL file to the builtin type. This - * is needed because the UDL type name is used in function/method signatures. - * It's also what we have an external type that references a custom type. - */ -public typealias Value = kotlin.String -public typealias FfiConverterTypeValue = FfiConverterString - -/** - * Public API for the Goose LLM completion function - */ -@Throws(CompletionException::class) -@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") -suspend fun `completion`(`req`: CompletionRequest): CompletionResponse = - uniffiRustCallAsync( - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_completion(FfiConverterTypeCompletionRequest.lower(`req`)), - { future, callback, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_poll_rust_buffer(future, callback, continuation) }, - { future, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_complete_rust_buffer(future, continuation) }, - { future -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_free_rust_buffer(future) }, - // lift function - { FfiConverterTypeCompletionResponse.lift(it) }, - // Error FFI converter - CompletionException.ErrorHandler, - ) - -fun `createCompletionRequest`( - `providerName`: kotlin.String, - `providerConfig`: Value, - `modelConfig`: ModelConfig, - `systemPreamble`: kotlin.String? = null, - `systemPromptOverride`: kotlin.String? = null, - `messages`: List, - `extensions`: List, - `requestId`: kotlin.String? = null, -): CompletionRequest = - FfiConverterTypeCompletionRequest.lift( - uniffiRustCall { _status -> - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_create_completion_request( - FfiConverterString.lower(`providerName`), - FfiConverterTypeValue.lower(`providerConfig`), - FfiConverterTypeModelConfig.lower(`modelConfig`), - FfiConverterOptionalString.lower(`systemPreamble`), - FfiConverterOptionalString.lower(`systemPromptOverride`), - FfiConverterSequenceTypeMessage.lower(`messages`), - FfiConverterSequenceTypeExtensionConfig.lower(`extensions`), - FfiConverterOptionalString.lower(`requestId`), - _status, - ) - }, - ) - -fun `createToolConfig`( - `name`: kotlin.String, - `description`: kotlin.String, - `inputSchema`: Value, - `approvalMode`: ToolApprovalMode, -): ToolConfig = - FfiConverterTypeToolConfig.lift( - uniffiRustCall { _status -> - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_create_tool_config( - FfiConverterString.lower(`name`), - FfiConverterString.lower(`description`), - FfiConverterTypeValue.lower(`inputSchema`), - FfiConverterTypeToolApprovalMode.lower(`approvalMode`), - _status, - ) - }, - ) - -/** - * Generates a short (≤4 words) session name - */ -@Throws(ProviderException::class) -@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") -suspend fun `generateSessionName`( - `providerName`: kotlin.String, - `providerConfig`: Value, - `messages`: List, - `requestId`: kotlin.String? = null, -): kotlin.String = - uniffiRustCallAsync( - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_generate_session_name( - FfiConverterString.lower(`providerName`), - FfiConverterTypeValue.lower(`providerConfig`), - FfiConverterSequenceTypeMessage.lower(`messages`), - FfiConverterOptionalString.lower(`requestId`), - ), - { future, callback, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_poll_rust_buffer(future, callback, continuation) }, - { future, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_complete_rust_buffer(future, continuation) }, - { future -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_free_rust_buffer(future) }, - // lift function - { FfiConverterString.lift(it) }, - // Error FFI converter - ProviderException.ErrorHandler, - ) - -/** - * Generates a structured output based on the provided schema, - * system prompt and user messages. - */ -@Throws(ProviderException::class) -@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") -suspend fun `generateStructuredOutputs`( - `providerName`: kotlin.String, - `providerConfig`: Value, - `systemPrompt`: kotlin.String, - `messages`: List, - `schema`: Value, - `requestId`: kotlin.String? = null, -): ProviderExtractResponse = - uniffiRustCallAsync( - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_generate_structured_outputs( - FfiConverterString.lower(`providerName`), - FfiConverterTypeValue.lower(`providerConfig`), - FfiConverterString.lower(`systemPrompt`), - FfiConverterSequenceTypeMessage.lower(`messages`), - FfiConverterTypeValue.lower(`schema`), - FfiConverterOptionalString.lower(`requestId`), - ), - { future, callback, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_poll_rust_buffer(future, callback, continuation) }, - { future, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_complete_rust_buffer(future, continuation) }, - { future -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_free_rust_buffer(future) }, - // lift function - { FfiConverterTypeProviderExtractResponse.lift(it) }, - // Error FFI converter - ProviderException.ErrorHandler, - ) - -/** - * Generates a tooltip summarizing the last two messages in the session, - * including any tool calls or results. - */ -@Throws(ProviderException::class) -@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") -suspend fun `generateTooltip`( - `providerName`: kotlin.String, - `providerConfig`: Value, - `messages`: List, - `requestId`: kotlin.String? = null, -): kotlin.String = - uniffiRustCallAsync( - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_generate_tooltip( - FfiConverterString.lower(`providerName`), - FfiConverterTypeValue.lower(`providerConfig`), - FfiConverterSequenceTypeMessage.lower(`messages`), - FfiConverterOptionalString.lower(`requestId`), - ), - { future, callback, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_poll_rust_buffer(future, callback, continuation) }, - { future, continuation -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_complete_rust_buffer(future, continuation) }, - { future -> UniffiLib.INSTANCE.ffi_goose_llm_rust_future_free_rust_buffer(future) }, - // lift function - { FfiConverterString.lift(it) }, - // Error FFI converter - ProviderException.ErrorHandler, - ) - -fun `printMessages`(`messages`: List) = - uniffiRustCall { _status -> - UniffiLib.INSTANCE.uniffi_goose_llm_fn_func_print_messages( - FfiConverterSequenceTypeMessage.lower(`messages`), - _status, - ) - } diff --git a/bindings/python/goose_llm.py b/bindings/python/goose_llm.py deleted file mode 100644 index 6fe87ab7d7b6..000000000000 --- a/bindings/python/goose_llm.py +++ /dev/null @@ -1,3001 +0,0 @@ - - -# This file was autogenerated by some hot garbage in the `uniffi` crate. -# Trust me, you don't want to mess with it! - -# Common helper code. -# -# Ideally this would live in a separate .py file where it can be unittested etc -# in isolation, and perhaps even published as a re-useable package. -# -# However, it's important that the details of how this helper code works (e.g. the -# way that different builtin types are passed across the FFI) exactly match what's -# expected by the rust code on the other side of the interface. In practice right -# now that means coming from the exact some version of `uniffi` that was used to -# compile the rust component. The easiest way to ensure this is to bundle the Python -# helpers directly inline like we're doing here. - -from __future__ import annotations -import os -import sys -import ctypes -import enum -import struct -import contextlib -import datetime -import threading -import itertools -import traceback -import typing -import asyncio -import platform - -# Used for default argument values -_DEFAULT = object() # type: typing.Any - - -class _UniffiRustBuffer(ctypes.Structure): - _fields_ = [ - ("capacity", ctypes.c_uint64), - ("len", ctypes.c_uint64), - ("data", ctypes.POINTER(ctypes.c_char)), - ] - - @staticmethod - def default(): - return _UniffiRustBuffer(0, 0, None) - - @staticmethod - def alloc(size): - return _uniffi_rust_call(_UniffiLib.ffi_goose_llm_rustbuffer_alloc, size) - - @staticmethod - def reserve(rbuf, additional): - return _uniffi_rust_call(_UniffiLib.ffi_goose_llm_rustbuffer_reserve, rbuf, additional) - - def free(self): - return _uniffi_rust_call(_UniffiLib.ffi_goose_llm_rustbuffer_free, self) - - def __str__(self): - return "_UniffiRustBuffer(capacity={}, len={}, data={})".format( - self.capacity, - self.len, - self.data[0:self.len] - ) - - @contextlib.contextmanager - def alloc_with_builder(*args): - """Context-manger to allocate a buffer using a _UniffiRustBufferBuilder. - - The allocated buffer will be automatically freed if an error occurs, ensuring that - we don't accidentally leak it. - """ - builder = _UniffiRustBufferBuilder() - try: - yield builder - except: - builder.discard() - raise - - @contextlib.contextmanager - def consume_with_stream(self): - """Context-manager to consume a buffer using a _UniffiRustBufferStream. - - The _UniffiRustBuffer will be freed once the context-manager exits, ensuring that we don't - leak it even if an error occurs. - """ - try: - s = _UniffiRustBufferStream.from_rust_buffer(self) - yield s - if s.remaining() != 0: - raise RuntimeError("junk data left in buffer at end of consume_with_stream") - finally: - self.free() - - @contextlib.contextmanager - def read_with_stream(self): - """Context-manager to read a buffer using a _UniffiRustBufferStream. - - This is like consume_with_stream, but doesn't free the buffer afterwards. - It should only be used with borrowed `_UniffiRustBuffer` data. - """ - s = _UniffiRustBufferStream.from_rust_buffer(self) - yield s - if s.remaining() != 0: - raise RuntimeError("junk data left in buffer at end of read_with_stream") - -class _UniffiForeignBytes(ctypes.Structure): - _fields_ = [ - ("len", ctypes.c_int32), - ("data", ctypes.POINTER(ctypes.c_char)), - ] - - def __str__(self): - return "_UniffiForeignBytes(len={}, data={})".format(self.len, self.data[0:self.len]) - - -class _UniffiRustBufferStream: - """ - Helper for structured reading of bytes from a _UniffiRustBuffer - """ - - def __init__(self, data, len): - self.data = data - self.len = len - self.offset = 0 - - @classmethod - def from_rust_buffer(cls, buf): - return cls(buf.data, buf.len) - - def remaining(self): - return self.len - self.offset - - def _unpack_from(self, size, format): - if self.offset + size > self.len: - raise InternalError("read past end of rust buffer") - value = struct.unpack(format, self.data[self.offset:self.offset+size])[0] - self.offset += size - return value - - def read(self, size): - if self.offset + size > self.len: - raise InternalError("read past end of rust buffer") - data = self.data[self.offset:self.offset+size] - self.offset += size - return data - - def read_i8(self): - return self._unpack_from(1, ">b") - - def read_u8(self): - return self._unpack_from(1, ">B") - - def read_i16(self): - return self._unpack_from(2, ">h") - - def read_u16(self): - return self._unpack_from(2, ">H") - - def read_i32(self): - return self._unpack_from(4, ">i") - - def read_u32(self): - return self._unpack_from(4, ">I") - - def read_i64(self): - return self._unpack_from(8, ">q") - - def read_u64(self): - return self._unpack_from(8, ">Q") - - def read_float(self): - v = self._unpack_from(4, ">f") - return v - - def read_double(self): - return self._unpack_from(8, ">d") - -class _UniffiRustBufferBuilder: - """ - Helper for structured writing of bytes into a _UniffiRustBuffer. - """ - - def __init__(self): - self.rbuf = _UniffiRustBuffer.alloc(16) - self.rbuf.len = 0 - - def finalize(self): - rbuf = self.rbuf - self.rbuf = None - return rbuf - - def discard(self): - if self.rbuf is not None: - rbuf = self.finalize() - rbuf.free() - - @contextlib.contextmanager - def _reserve(self, num_bytes): - if self.rbuf.len + num_bytes > self.rbuf.capacity: - self.rbuf = _UniffiRustBuffer.reserve(self.rbuf, num_bytes) - yield None - self.rbuf.len += num_bytes - - def _pack_into(self, size, format, value): - with self._reserve(size): - # XXX TODO: I feel like I should be able to use `struct.pack_into` here but can't figure it out. - for i, byte in enumerate(struct.pack(format, value)): - self.rbuf.data[self.rbuf.len + i] = byte - - def write(self, value): - with self._reserve(len(value)): - for i, byte in enumerate(value): - self.rbuf.data[self.rbuf.len + i] = byte - - def write_i8(self, v): - self._pack_into(1, ">b", v) - - def write_u8(self, v): - self._pack_into(1, ">B", v) - - def write_i16(self, v): - self._pack_into(2, ">h", v) - - def write_u16(self, v): - self._pack_into(2, ">H", v) - - def write_i32(self, v): - self._pack_into(4, ">i", v) - - def write_u32(self, v): - self._pack_into(4, ">I", v) - - def write_i64(self, v): - self._pack_into(8, ">q", v) - - def write_u64(self, v): - self._pack_into(8, ">Q", v) - - def write_float(self, v): - self._pack_into(4, ">f", v) - - def write_double(self, v): - self._pack_into(8, ">d", v) - - def write_c_size_t(self, v): - self._pack_into(ctypes.sizeof(ctypes.c_size_t) , "@N", v) -# A handful of classes and functions to support the generated data structures. -# This would be a good candidate for isolating in its own ffi-support lib. - -class InternalError(Exception): - pass - -class _UniffiRustCallStatus(ctypes.Structure): - """ - Error runtime. - """ - _fields_ = [ - ("code", ctypes.c_int8), - ("error_buf", _UniffiRustBuffer), - ] - - # These match the values from the uniffi::rustcalls module - CALL_SUCCESS = 0 - CALL_ERROR = 1 - CALL_UNEXPECTED_ERROR = 2 - - @staticmethod - def default(): - return _UniffiRustCallStatus(code=_UniffiRustCallStatus.CALL_SUCCESS, error_buf=_UniffiRustBuffer.default()) - - def __str__(self): - if self.code == _UniffiRustCallStatus.CALL_SUCCESS: - return "_UniffiRustCallStatus(CALL_SUCCESS)" - elif self.code == _UniffiRustCallStatus.CALL_ERROR: - return "_UniffiRustCallStatus(CALL_ERROR)" - elif self.code == _UniffiRustCallStatus.CALL_UNEXPECTED_ERROR: - return "_UniffiRustCallStatus(CALL_UNEXPECTED_ERROR)" - else: - return "_UniffiRustCallStatus()" - -def _uniffi_rust_call(fn, *args): - # Call a rust function - return _uniffi_rust_call_with_error(None, fn, *args) - -def _uniffi_rust_call_with_error(error_ffi_converter, fn, *args): - # Call a rust function and handle any errors - # - # This function is used for rust calls that return Result<> and therefore can set the CALL_ERROR status code. - # error_ffi_converter must be set to the _UniffiConverter for the error class that corresponds to the result. - call_status = _UniffiRustCallStatus.default() - - args_with_error = args + (ctypes.byref(call_status),) - result = fn(*args_with_error) - _uniffi_check_call_status(error_ffi_converter, call_status) - return result - -def _uniffi_check_call_status(error_ffi_converter, call_status): - if call_status.code == _UniffiRustCallStatus.CALL_SUCCESS: - pass - elif call_status.code == _UniffiRustCallStatus.CALL_ERROR: - if error_ffi_converter is None: - call_status.error_buf.free() - raise InternalError("_uniffi_rust_call_with_error: CALL_ERROR, but error_ffi_converter is None") - else: - raise error_ffi_converter.lift(call_status.error_buf) - elif call_status.code == _UniffiRustCallStatus.CALL_UNEXPECTED_ERROR: - # When the rust code sees a panic, it tries to construct a _UniffiRustBuffer - # with the message. But if that code panics, then it just sends back - # an empty buffer. - if call_status.error_buf.len > 0: - msg = _UniffiConverterString.lift(call_status.error_buf) - else: - msg = "Unknown rust panic" - raise InternalError(msg) - else: - raise InternalError("Invalid _UniffiRustCallStatus code: {}".format( - call_status.code)) - -def _uniffi_trait_interface_call(call_status, make_call, write_return_value): - try: - return write_return_value(make_call()) - except Exception as e: - call_status.code = _UniffiRustCallStatus.CALL_UNEXPECTED_ERROR - call_status.error_buf = _UniffiConverterString.lower(repr(e)) - -def _uniffi_trait_interface_call_with_error(call_status, make_call, write_return_value, error_type, lower_error): - try: - try: - return write_return_value(make_call()) - except error_type as e: - call_status.code = _UniffiRustCallStatus.CALL_ERROR - call_status.error_buf = lower_error(e) - except Exception as e: - call_status.code = _UniffiRustCallStatus.CALL_UNEXPECTED_ERROR - call_status.error_buf = _UniffiConverterString.lower(repr(e)) -class _UniffiHandleMap: - """ - A map where inserting, getting and removing data is synchronized with a lock. - """ - - def __init__(self): - # type Handle = int - self._map = {} # type: Dict[Handle, Any] - self._lock = threading.Lock() - self._counter = itertools.count() - - def insert(self, obj): - with self._lock: - handle = next(self._counter) - self._map[handle] = obj - return handle - - def get(self, handle): - try: - with self._lock: - return self._map[handle] - except KeyError: - raise InternalError("_UniffiHandleMap.get: Invalid handle") - - def remove(self, handle): - try: - with self._lock: - return self._map.pop(handle) - except KeyError: - raise InternalError("_UniffiHandleMap.remove: Invalid handle") - - def __len__(self): - return len(self._map) -# Types conforming to `_UniffiConverterPrimitive` pass themselves directly over the FFI. -class _UniffiConverterPrimitive: - @classmethod - def lift(cls, value): - return value - - @classmethod - def lower(cls, value): - return value - -class _UniffiConverterPrimitiveInt(_UniffiConverterPrimitive): - @classmethod - def check_lower(cls, value): - try: - value = value.__index__() - except Exception: - raise TypeError("'{}' object cannot be interpreted as an integer".format(type(value).__name__)) - if not isinstance(value, int): - raise TypeError("__index__ returned non-int (type {})".format(type(value).__name__)) - if not cls.VALUE_MIN <= value < cls.VALUE_MAX: - raise ValueError("{} requires {} <= value < {}".format(cls.CLASS_NAME, cls.VALUE_MIN, cls.VALUE_MAX)) - -class _UniffiConverterPrimitiveFloat(_UniffiConverterPrimitive): - @classmethod - def check_lower(cls, value): - try: - value = value.__float__() - except Exception: - raise TypeError("must be real number, not {}".format(type(value).__name__)) - if not isinstance(value, float): - raise TypeError("__float__ returned non-float (type {})".format(type(value).__name__)) - -# Helper class for wrapper types that will always go through a _UniffiRustBuffer. -# Classes should inherit from this and implement the `read` and `write` static methods. -class _UniffiConverterRustBuffer: - @classmethod - def lift(cls, rbuf): - with rbuf.consume_with_stream() as stream: - return cls.read(stream) - - @classmethod - def lower(cls, value): - with _UniffiRustBuffer.alloc_with_builder() as builder: - cls.write(value, builder) - return builder.finalize() - -# Contains loading, initialization code, and the FFI Function declarations. -# Define some ctypes FFI types that we use in the library - -""" -Function pointer for a Rust task, which a callback function that takes a opaque pointer -""" -_UNIFFI_RUST_TASK = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_int8) - -def _uniffi_future_callback_t(return_type): - """ - Factory function to create callback function types for async functions - """ - return ctypes.CFUNCTYPE(None, ctypes.c_uint64, return_type, _UniffiRustCallStatus) - -def _uniffi_load_indirect(): - """ - This is how we find and load the dynamic library provided by the component. - For now we just look it up by name. - """ - if sys.platform == "darwin": - libname = "lib{}.dylib" - elif sys.platform.startswith("win"): - # As of python3.8, ctypes does not seem to search $PATH when loading DLLs. - # We could use `os.add_dll_directory` to configure the search path, but - # it doesn't feel right to mess with application-wide settings. Let's - # assume that the `.dll` is next to the `.py` file and load by full path. - libname = os.path.join( - os.path.dirname(__file__), - "{}.dll", - ) - else: - # Anything else must be an ELF platform - Linux, *BSD, Solaris/illumos - libname = "lib{}.so" - - libname = libname.format("goose_llm") - path = os.path.join(os.path.dirname(__file__), libname) - lib = ctypes.cdll.LoadLibrary(path) - return lib - -def _uniffi_check_contract_api_version(lib): - # Get the bindings contract version from our ComponentInterface - bindings_contract_version = 29 - # Get the scaffolding contract version by calling the into the dylib - scaffolding_contract_version = lib.ffi_goose_llm_uniffi_contract_version() - if bindings_contract_version != scaffolding_contract_version: - raise InternalError("UniFFI contract version mismatch: try cleaning and rebuilding your project") - -def _uniffi_check_api_checksums(lib): - if lib.uniffi_goose_llm_checksum_func_completion() != 47457: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - if lib.uniffi_goose_llm_checksum_func_create_completion_request() != 51008: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - if lib.uniffi_goose_llm_checksum_func_create_tool_config() != 22809: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - if lib.uniffi_goose_llm_checksum_func_generate_session_name() != 9810: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - if lib.uniffi_goose_llm_checksum_func_generate_tooltip() != 15466: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - if lib.uniffi_goose_llm_checksum_func_print_messages() != 30278: - raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") - -# A ctypes library to expose the extern-C FFI definitions. -# This is an implementation detail which will be called internally by the public API. - -_UniffiLib = _uniffi_load_indirect() -_UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK = ctypes.CFUNCTYPE(None,ctypes.c_uint64,ctypes.c_int8, -) -_UNIFFI_FOREIGN_FUTURE_FREE = ctypes.CFUNCTYPE(None,ctypes.c_uint64, -) -_UNIFFI_CALLBACK_INTERFACE_FREE = ctypes.CFUNCTYPE(None,ctypes.c_uint64, -) -class _UniffiForeignFuture(ctypes.Structure): - _fields_ = [ - ("handle", ctypes.c_uint64), - ("free", _UNIFFI_FOREIGN_FUTURE_FREE), - ] -class _UniffiForeignFutureStructU8(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_uint8), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_U8 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructU8, -) -class _UniffiForeignFutureStructI8(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_int8), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_I8 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructI8, -) -class _UniffiForeignFutureStructU16(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_uint16), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_U16 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructU16, -) -class _UniffiForeignFutureStructI16(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_int16), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_I16 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructI16, -) -class _UniffiForeignFutureStructU32(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_uint32), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_U32 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructU32, -) -class _UniffiForeignFutureStructI32(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_int32), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_I32 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructI32, -) -class _UniffiForeignFutureStructU64(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_uint64), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_U64 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructU64, -) -class _UniffiForeignFutureStructI64(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_int64), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_I64 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructI64, -) -class _UniffiForeignFutureStructF32(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_float), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_F32 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructF32, -) -class _UniffiForeignFutureStructF64(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_double), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_F64 = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructF64, -) -class _UniffiForeignFutureStructPointer(ctypes.Structure): - _fields_ = [ - ("return_value", ctypes.c_void_p), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_POINTER = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructPointer, -) -class _UniffiForeignFutureStructRustBuffer(ctypes.Structure): - _fields_ = [ - ("return_value", _UniffiRustBuffer), - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_RUST_BUFFER = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructRustBuffer, -) -class _UniffiForeignFutureStructVoid(ctypes.Structure): - _fields_ = [ - ("call_status", _UniffiRustCallStatus), - ] -_UNIFFI_FOREIGN_FUTURE_COMPLETE_VOID = ctypes.CFUNCTYPE(None,ctypes.c_uint64,_UniffiForeignFutureStructVoid, -) -_UniffiLib.uniffi_goose_llm_fn_func_completion.argtypes = ( - _UniffiRustBuffer, -) -_UniffiLib.uniffi_goose_llm_fn_func_completion.restype = ctypes.c_uint64 -_UniffiLib.uniffi_goose_llm_fn_func_create_completion_request.argtypes = ( - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.uniffi_goose_llm_fn_func_create_completion_request.restype = _UniffiRustBuffer -_UniffiLib.uniffi_goose_llm_fn_func_create_tool_config.argtypes = ( - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.uniffi_goose_llm_fn_func_create_tool_config.restype = _UniffiRustBuffer -_UniffiLib.uniffi_goose_llm_fn_func_generate_session_name.argtypes = ( - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, -) -_UniffiLib.uniffi_goose_llm_fn_func_generate_session_name.restype = ctypes.c_uint64 -_UniffiLib.uniffi_goose_llm_fn_func_generate_tooltip.argtypes = ( - _UniffiRustBuffer, - _UniffiRustBuffer, - _UniffiRustBuffer, -) -_UniffiLib.uniffi_goose_llm_fn_func_generate_tooltip.restype = ctypes.c_uint64 -_UniffiLib.uniffi_goose_llm_fn_func_print_messages.argtypes = ( - _UniffiRustBuffer, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.uniffi_goose_llm_fn_func_print_messages.restype = None -_UniffiLib.ffi_goose_llm_rustbuffer_alloc.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rustbuffer_alloc.restype = _UniffiRustBuffer -_UniffiLib.ffi_goose_llm_rustbuffer_from_bytes.argtypes = ( - _UniffiForeignBytes, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rustbuffer_from_bytes.restype = _UniffiRustBuffer -_UniffiLib.ffi_goose_llm_rustbuffer_free.argtypes = ( - _UniffiRustBuffer, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rustbuffer_free.restype = None -_UniffiLib.ffi_goose_llm_rustbuffer_reserve.argtypes = ( - _UniffiRustBuffer, - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rustbuffer_reserve.restype = _UniffiRustBuffer -_UniffiLib.ffi_goose_llm_rust_future_poll_u8.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_u8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_u8.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_u8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_u8.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_u8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_u8.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_u8.restype = ctypes.c_uint8 -_UniffiLib.ffi_goose_llm_rust_future_poll_i8.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_i8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_i8.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_i8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_i8.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_i8.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_i8.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_i8.restype = ctypes.c_int8 -_UniffiLib.ffi_goose_llm_rust_future_poll_u16.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_u16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_u16.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_u16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_u16.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_u16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_u16.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_u16.restype = ctypes.c_uint16 -_UniffiLib.ffi_goose_llm_rust_future_poll_i16.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_i16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_i16.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_i16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_i16.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_i16.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_i16.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_i16.restype = ctypes.c_int16 -_UniffiLib.ffi_goose_llm_rust_future_poll_u32.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_u32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_u32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_u32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_u32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_u32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_u32.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_u32.restype = ctypes.c_uint32 -_UniffiLib.ffi_goose_llm_rust_future_poll_i32.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_i32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_i32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_i32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_i32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_i32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_i32.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_i32.restype = ctypes.c_int32 -_UniffiLib.ffi_goose_llm_rust_future_poll_u64.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_u64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_u64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_u64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_u64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_u64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_u64.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_u64.restype = ctypes.c_uint64 -_UniffiLib.ffi_goose_llm_rust_future_poll_i64.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_i64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_i64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_i64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_i64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_i64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_i64.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_i64.restype = ctypes.c_int64 -_UniffiLib.ffi_goose_llm_rust_future_poll_f32.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_f32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_f32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_f32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_f32.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_f32.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_f32.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_f32.restype = ctypes.c_float -_UniffiLib.ffi_goose_llm_rust_future_poll_f64.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_f64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_f64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_f64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_f64.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_f64.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_f64.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_f64.restype = ctypes.c_double -_UniffiLib.ffi_goose_llm_rust_future_poll_pointer.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_pointer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_pointer.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_pointer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_pointer.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_pointer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_pointer.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_pointer.restype = ctypes.c_void_p -_UniffiLib.ffi_goose_llm_rust_future_poll_rust_buffer.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_rust_buffer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_rust_buffer.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_rust_buffer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_rust_buffer.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_rust_buffer.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_rust_buffer.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_rust_buffer.restype = _UniffiRustBuffer -_UniffiLib.ffi_goose_llm_rust_future_poll_void.argtypes = ( - ctypes.c_uint64, - _UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK, - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_poll_void.restype = None -_UniffiLib.ffi_goose_llm_rust_future_cancel_void.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_cancel_void.restype = None -_UniffiLib.ffi_goose_llm_rust_future_free_void.argtypes = ( - ctypes.c_uint64, -) -_UniffiLib.ffi_goose_llm_rust_future_free_void.restype = None -_UniffiLib.ffi_goose_llm_rust_future_complete_void.argtypes = ( - ctypes.c_uint64, - ctypes.POINTER(_UniffiRustCallStatus), -) -_UniffiLib.ffi_goose_llm_rust_future_complete_void.restype = None -_UniffiLib.uniffi_goose_llm_checksum_func_completion.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_completion.restype = ctypes.c_uint16 -_UniffiLib.uniffi_goose_llm_checksum_func_create_completion_request.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_create_completion_request.restype = ctypes.c_uint16 -_UniffiLib.uniffi_goose_llm_checksum_func_create_tool_config.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_create_tool_config.restype = ctypes.c_uint16 -_UniffiLib.uniffi_goose_llm_checksum_func_generate_session_name.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_generate_session_name.restype = ctypes.c_uint16 -_UniffiLib.uniffi_goose_llm_checksum_func_generate_tooltip.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_generate_tooltip.restype = ctypes.c_uint16 -_UniffiLib.uniffi_goose_llm_checksum_func_print_messages.argtypes = ( -) -_UniffiLib.uniffi_goose_llm_checksum_func_print_messages.restype = ctypes.c_uint16 -_UniffiLib.ffi_goose_llm_uniffi_contract_version.argtypes = ( -) -_UniffiLib.ffi_goose_llm_uniffi_contract_version.restype = ctypes.c_uint32 - -_uniffi_check_contract_api_version(_UniffiLib) -# _uniffi_check_api_checksums(_UniffiLib) - -# Public interface members begin here. - - -class _UniffiConverterUInt32(_UniffiConverterPrimitiveInt): - CLASS_NAME = "u32" - VALUE_MIN = 0 - VALUE_MAX = 2**32 - - @staticmethod - def read(buf): - return buf.read_u32() - - @staticmethod - def write(value, buf): - buf.write_u32(value) - -class _UniffiConverterInt32(_UniffiConverterPrimitiveInt): - CLASS_NAME = "i32" - VALUE_MIN = -2**31 - VALUE_MAX = 2**31 - - @staticmethod - def read(buf): - return buf.read_i32() - - @staticmethod - def write(value, buf): - buf.write_i32(value) - -class _UniffiConverterInt64(_UniffiConverterPrimitiveInt): - CLASS_NAME = "i64" - VALUE_MIN = -2**63 - VALUE_MAX = 2**63 - - @staticmethod - def read(buf): - return buf.read_i64() - - @staticmethod - def write(value, buf): - buf.write_i64(value) - -class _UniffiConverterFloat(_UniffiConverterPrimitiveFloat): - @staticmethod - def read(buf): - return buf.read_float() - - @staticmethod - def write(value, buf): - buf.write_float(value) - -class _UniffiConverterDouble(_UniffiConverterPrimitiveFloat): - @staticmethod - def read(buf): - return buf.read_double() - - @staticmethod - def write(value, buf): - buf.write_double(value) - -class _UniffiConverterString: - @staticmethod - def check_lower(value): - if not isinstance(value, str): - raise TypeError("argument must be str, not {}".format(type(value).__name__)) - return value - - @staticmethod - def read(buf): - size = buf.read_i32() - if size < 0: - raise InternalError("Unexpected negative string length") - utf8_bytes = buf.read(size) - return utf8_bytes.decode("utf-8") - - @staticmethod - def write(value, buf): - utf8_bytes = value.encode("utf-8") - buf.write_i32(len(utf8_bytes)) - buf.write(utf8_bytes) - - @staticmethod - def lift(buf): - with buf.consume_with_stream() as stream: - return stream.read(stream.remaining()).decode("utf-8") - - @staticmethod - def lower(value): - with _UniffiRustBuffer.alloc_with_builder() as builder: - builder.write(value.encode("utf-8")) - return builder.finalize() - - -class CompletionResponse: - message: "Message" - model: "str" - usage: "Usage" - runtime_metrics: "RuntimeMetrics" - def __init__(self, *, message: "Message", model: "str", usage: "Usage", runtime_metrics: "RuntimeMetrics"): - self.message = message - self.model = model - self.usage = usage - self.runtime_metrics = runtime_metrics - - def __str__(self): - return "CompletionResponse(message={}, model={}, usage={}, runtime_metrics={})".format(self.message, self.model, self.usage, self.runtime_metrics) - - def __eq__(self, other): - if self.message != other.message: - return False - if self.model != other.model: - return False - if self.usage != other.usage: - return False - if self.runtime_metrics != other.runtime_metrics: - return False - return True - -class _UniffiConverterTypeCompletionResponse(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return CompletionResponse( - message=_UniffiConverterTypeMessage.read(buf), - model=_UniffiConverterString.read(buf), - usage=_UniffiConverterTypeUsage.read(buf), - runtime_metrics=_UniffiConverterTypeRuntimeMetrics.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterTypeMessage.check_lower(value.message) - _UniffiConverterString.check_lower(value.model) - _UniffiConverterTypeUsage.check_lower(value.usage) - _UniffiConverterTypeRuntimeMetrics.check_lower(value.runtime_metrics) - - @staticmethod - def write(value, buf): - _UniffiConverterTypeMessage.write(value.message, buf) - _UniffiConverterString.write(value.model, buf) - _UniffiConverterTypeUsage.write(value.usage, buf) - _UniffiConverterTypeRuntimeMetrics.write(value.runtime_metrics, buf) - - -class ExtensionConfig: - name: "str" - instructions: "typing.Optional[str]" - tools: "typing.List[ToolConfig]" - def __init__(self, *, name: "str", instructions: "typing.Optional[str]", tools: "typing.List[ToolConfig]"): - self.name = name - self.instructions = instructions - self.tools = tools - - def __str__(self): - return "ExtensionConfig(name={}, instructions={}, tools={})".format(self.name, self.instructions, self.tools) - - def __eq__(self, other): - if self.name != other.name: - return False - if self.instructions != other.instructions: - return False - if self.tools != other.tools: - return False - return True - -class _UniffiConverterTypeExtensionConfig(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ExtensionConfig( - name=_UniffiConverterString.read(buf), - instructions=_UniffiConverterOptionalString.read(buf), - tools=_UniffiConverterSequenceTypeToolConfig.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.name) - _UniffiConverterOptionalString.check_lower(value.instructions) - _UniffiConverterSequenceTypeToolConfig.check_lower(value.tools) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.name, buf) - _UniffiConverterOptionalString.write(value.instructions, buf) - _UniffiConverterSequenceTypeToolConfig.write(value.tools, buf) - - -class ImageContent: - data: "str" - mime_type: "str" - def __init__(self, *, data: "str", mime_type: "str"): - self.data = data - self.mime_type = mime_type - - def __str__(self): - return "ImageContent(data={}, mime_type={})".format(self.data, self.mime_type) - - def __eq__(self, other): - if self.data != other.data: - return False - if self.mime_type != other.mime_type: - return False - return True - -class _UniffiConverterTypeImageContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ImageContent( - data=_UniffiConverterString.read(buf), - mime_type=_UniffiConverterString.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.data) - _UniffiConverterString.check_lower(value.mime_type) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.data, buf) - _UniffiConverterString.write(value.mime_type, buf) - - -class Message: - """ - A message to or from an LLM - """ - - role: "Role" - created: "int" - content: "Contents" - def __init__(self, *, role: "Role", created: "int", content: "Contents"): - self.role = role - self.created = created - self.content = content - - def __str__(self): - return "Message(role={}, created={}, content={})".format(self.role, self.created, self.content) - - def __eq__(self, other): - if self.role != other.role: - return False - if self.created != other.created: - return False - if self.content != other.content: - return False - return True - -class _UniffiConverterTypeMessage(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return Message( - role=_UniffiConverterTypeRole.read(buf), - created=_UniffiConverterInt64.read(buf), - content=_UniffiConverterTypeContents.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterTypeRole.check_lower(value.role) - _UniffiConverterInt64.check_lower(value.created) - _UniffiConverterTypeContents.check_lower(value.content) - - @staticmethod - def write(value, buf): - _UniffiConverterTypeRole.write(value.role, buf) - _UniffiConverterInt64.write(value.created, buf) - _UniffiConverterTypeContents.write(value.content, buf) - - -class ModelConfig: - """ - Configuration for model-specific settings and limits - """ - - model_name: "str" - """ - The name of the model to use - """ - - context_limit: "typing.Optional[int]" - """ - Optional explicit context limit that overrides any defaults - """ - - temperature: "typing.Optional[float]" - """ - Optional temperature setting (0.0 - 1.0) - """ - - max_tokens: "typing.Optional[int]" - """ - Optional maximum tokens to generate - """ - - def __init__(self, *, model_name: "str", context_limit: "typing.Optional[int]", temperature: "typing.Optional[float]", max_tokens: "typing.Optional[int]"): - self.model_name = model_name - self.context_limit = context_limit - self.temperature = temperature - self.max_tokens = max_tokens - - def __str__(self): - return "ModelConfig(model_name={}, context_limit={}, temperature={}, max_tokens={})".format(self.model_name, self.context_limit, self.temperature, self.max_tokens) - - def __eq__(self, other): - if self.model_name != other.model_name: - return False - if self.context_limit != other.context_limit: - return False - if self.temperature != other.temperature: - return False - if self.max_tokens != other.max_tokens: - return False - return True - -class _UniffiConverterTypeModelConfig(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ModelConfig( - model_name=_UniffiConverterString.read(buf), - context_limit=_UniffiConverterOptionalUInt32.read(buf), - temperature=_UniffiConverterOptionalFloat.read(buf), - max_tokens=_UniffiConverterOptionalInt32.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.model_name) - _UniffiConverterOptionalUInt32.check_lower(value.context_limit) - _UniffiConverterOptionalFloat.check_lower(value.temperature) - _UniffiConverterOptionalInt32.check_lower(value.max_tokens) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.model_name, buf) - _UniffiConverterOptionalUInt32.write(value.context_limit, buf) - _UniffiConverterOptionalFloat.write(value.temperature, buf) - _UniffiConverterOptionalInt32.write(value.max_tokens, buf) - - -class ProviderCompleteResponse: - message: "Message" - model: "str" - usage: "Usage" - def __init__(self, *, message: "Message", model: "str", usage: "Usage"): - self.message = message - self.model = model - self.usage = usage - - def __str__(self): - return "ProviderCompleteResponse(message={}, model={}, usage={})".format(self.message, self.model, self.usage) - - def __eq__(self, other): - if self.message != other.message: - return False - if self.model != other.model: - return False - if self.usage != other.usage: - return False - return True - -class _UniffiConverterTypeProviderCompleteResponse(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ProviderCompleteResponse( - message=_UniffiConverterTypeMessage.read(buf), - model=_UniffiConverterString.read(buf), - usage=_UniffiConverterTypeUsage.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterTypeMessage.check_lower(value.message) - _UniffiConverterString.check_lower(value.model) - _UniffiConverterTypeUsage.check_lower(value.usage) - - @staticmethod - def write(value, buf): - _UniffiConverterTypeMessage.write(value.message, buf) - _UniffiConverterString.write(value.model, buf) - _UniffiConverterTypeUsage.write(value.usage, buf) - - -class RedactedThinkingContent: - data: "str" - def __init__(self, *, data: "str"): - self.data = data - - def __str__(self): - return "RedactedThinkingContent(data={})".format(self.data) - - def __eq__(self, other): - if self.data != other.data: - return False - return True - -class _UniffiConverterTypeRedactedThinkingContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return RedactedThinkingContent( - data=_UniffiConverterString.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.data) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.data, buf) - - -class RuntimeMetrics: - total_time_sec: "float" - total_time_sec_provider: "float" - tokens_per_second: "typing.Optional[float]" - def __init__(self, *, total_time_sec: "float", total_time_sec_provider: "float", tokens_per_second: "typing.Optional[float]"): - self.total_time_sec = total_time_sec - self.total_time_sec_provider = total_time_sec_provider - self.tokens_per_second = tokens_per_second - - def __str__(self): - return "RuntimeMetrics(total_time_sec={}, total_time_sec_provider={}, tokens_per_second={})".format(self.total_time_sec, self.total_time_sec_provider, self.tokens_per_second) - - def __eq__(self, other): - if self.total_time_sec != other.total_time_sec: - return False - if self.total_time_sec_provider != other.total_time_sec_provider: - return False - if self.tokens_per_second != other.tokens_per_second: - return False - return True - -class _UniffiConverterTypeRuntimeMetrics(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return RuntimeMetrics( - total_time_sec=_UniffiConverterFloat.read(buf), - total_time_sec_provider=_UniffiConverterFloat.read(buf), - tokens_per_second=_UniffiConverterOptionalDouble.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterFloat.check_lower(value.total_time_sec) - _UniffiConverterFloat.check_lower(value.total_time_sec_provider) - _UniffiConverterOptionalDouble.check_lower(value.tokens_per_second) - - @staticmethod - def write(value, buf): - _UniffiConverterFloat.write(value.total_time_sec, buf) - _UniffiConverterFloat.write(value.total_time_sec_provider, buf) - _UniffiConverterOptionalDouble.write(value.tokens_per_second, buf) - - -class TextContent: - text: "str" - def __init__(self, *, text: "str"): - self.text = text - - def __str__(self): - return "TextContent(text={})".format(self.text) - - def __eq__(self, other): - if self.text != other.text: - return False - return True - -class _UniffiConverterTypeTextContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return TextContent( - text=_UniffiConverterString.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.text) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.text, buf) - - -class ThinkingContent: - thinking: "str" - signature: "str" - def __init__(self, *, thinking: "str", signature: "str"): - self.thinking = thinking - self.signature = signature - - def __str__(self): - return "ThinkingContent(thinking={}, signature={})".format(self.thinking, self.signature) - - def __eq__(self, other): - if self.thinking != other.thinking: - return False - if self.signature != other.signature: - return False - return True - -class _UniffiConverterTypeThinkingContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ThinkingContent( - thinking=_UniffiConverterString.read(buf), - signature=_UniffiConverterString.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.thinking) - _UniffiConverterString.check_lower(value.signature) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.thinking, buf) - _UniffiConverterString.write(value.signature, buf) - - -class ToolRequest: - id: "str" - tool_call: "ToolRequestToolCall" - def __init__(self, *, id: "str", tool_call: "ToolRequestToolCall"): - self.id = id - self.tool_call = tool_call - - def __str__(self): - return "ToolRequest(id={}, tool_call={})".format(self.id, self.tool_call) - - def __eq__(self, other): - if self.id != other.id: - return False - if self.tool_call != other.tool_call: - return False - return True - -class _UniffiConverterTypeToolRequest(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ToolRequest( - id=_UniffiConverterString.read(buf), - tool_call=_UniffiConverterTypeToolRequestToolCall.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.id) - _UniffiConverterTypeToolRequestToolCall.check_lower(value.tool_call) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.id, buf) - _UniffiConverterTypeToolRequestToolCall.write(value.tool_call, buf) - - -class ToolResponse: - id: "str" - tool_result: "ToolResponseToolResult" - def __init__(self, *, id: "str", tool_result: "ToolResponseToolResult"): - self.id = id - self.tool_result = tool_result - - def __str__(self): - return "ToolResponse(id={}, tool_result={})".format(self.id, self.tool_result) - - def __eq__(self, other): - if self.id != other.id: - return False - if self.tool_result != other.tool_result: - return False - return True - -class _UniffiConverterTypeToolResponse(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return ToolResponse( - id=_UniffiConverterString.read(buf), - tool_result=_UniffiConverterTypeToolResponseToolResult.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterString.check_lower(value.id) - _UniffiConverterTypeToolResponseToolResult.check_lower(value.tool_result) - - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value.id, buf) - _UniffiConverterTypeToolResponseToolResult.write(value.tool_result, buf) - - -class Usage: - input_tokens: "typing.Optional[int]" - output_tokens: "typing.Optional[int]" - total_tokens: "typing.Optional[int]" - def __init__(self, *, input_tokens: "typing.Optional[int]", output_tokens: "typing.Optional[int]", total_tokens: "typing.Optional[int]"): - self.input_tokens = input_tokens - self.output_tokens = output_tokens - self.total_tokens = total_tokens - - def __str__(self): - return "Usage(input_tokens={}, output_tokens={}, total_tokens={})".format(self.input_tokens, self.output_tokens, self.total_tokens) - - def __eq__(self, other): - if self.input_tokens != other.input_tokens: - return False - if self.output_tokens != other.output_tokens: - return False - if self.total_tokens != other.total_tokens: - return False - return True - -class _UniffiConverterTypeUsage(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - return Usage( - input_tokens=_UniffiConverterOptionalInt32.read(buf), - output_tokens=_UniffiConverterOptionalInt32.read(buf), - total_tokens=_UniffiConverterOptionalInt32.read(buf), - ) - - @staticmethod - def check_lower(value): - _UniffiConverterOptionalInt32.check_lower(value.input_tokens) - _UniffiConverterOptionalInt32.check_lower(value.output_tokens) - _UniffiConverterOptionalInt32.check_lower(value.total_tokens) - - @staticmethod - def write(value, buf): - _UniffiConverterOptionalInt32.write(value.input_tokens, buf) - _UniffiConverterOptionalInt32.write(value.output_tokens, buf) - _UniffiConverterOptionalInt32.write(value.total_tokens, buf) - - -# CompletionError -# We want to define each variant as a nested class that's also a subclass, -# which is tricky in Python. To accomplish this we're going to create each -# class separately, then manually add the child classes to the base class's -# __dict__. All of this happens in dummy class to avoid polluting the module -# namespace. -class CompletionError(Exception): - pass - -_UniffiTempCompletionError = CompletionError - -class CompletionError: # type: ignore - class UnknownProvider(_UniffiTempCompletionError): - - def __repr__(self): - return "CompletionError.UnknownProvider({})".format(repr(str(self))) - _UniffiTempCompletionError.UnknownProvider = UnknownProvider # type: ignore - class Provider(_UniffiTempCompletionError): - - def __repr__(self): - return "CompletionError.Provider({})".format(repr(str(self))) - _UniffiTempCompletionError.Provider = Provider # type: ignore - class Template(_UniffiTempCompletionError): - - def __repr__(self): - return "CompletionError.Template({})".format(repr(str(self))) - _UniffiTempCompletionError.Template = Template # type: ignore - class Json(_UniffiTempCompletionError): - - def __repr__(self): - return "CompletionError.Json({})".format(repr(str(self))) - _UniffiTempCompletionError.Json = Json # type: ignore - class ToolNotFound(_UniffiTempCompletionError): - - def __repr__(self): - return "CompletionError.ToolNotFound({})".format(repr(str(self))) - _UniffiTempCompletionError.ToolNotFound = ToolNotFound # type: ignore - -CompletionError = _UniffiTempCompletionError # type: ignore -del _UniffiTempCompletionError - - -class _UniffiConverterTypeCompletionError(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return CompletionError.UnknownProvider( - _UniffiConverterString.read(buf), - ) - if variant == 2: - return CompletionError.Provider( - _UniffiConverterString.read(buf), - ) - if variant == 3: - return CompletionError.Template( - _UniffiConverterString.read(buf), - ) - if variant == 4: - return CompletionError.Json( - _UniffiConverterString.read(buf), - ) - if variant == 5: - return CompletionError.ToolNotFound( - _UniffiConverterString.read(buf), - ) - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if isinstance(value, CompletionError.UnknownProvider): - return - if isinstance(value, CompletionError.Provider): - return - if isinstance(value, CompletionError.Template): - return - if isinstance(value, CompletionError.Json): - return - if isinstance(value, CompletionError.ToolNotFound): - return - - @staticmethod - def write(value, buf): - if isinstance(value, CompletionError.UnknownProvider): - buf.write_i32(1) - if isinstance(value, CompletionError.Provider): - buf.write_i32(2) - if isinstance(value, CompletionError.Template): - buf.write_i32(3) - if isinstance(value, CompletionError.Json): - buf.write_i32(4) - if isinstance(value, CompletionError.ToolNotFound): - buf.write_i32(5) - - - - - -class Content: - def __init__(self): - raise RuntimeError("Content cannot be instantiated directly") - - # Each enum variant is a nested class of the enum itself. - class TEXT: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"Content.TEXT{self._values!r}" - - def __eq__(self, other): - if not other.is_TEXT(): - return False - return self._values == other._values - class IMAGE: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"Content.IMAGE{self._values!r}" - - def __eq__(self, other): - if not other.is_IMAGE(): - return False - return self._values == other._values - - - # For each variant, we have `is_NAME` and `is_name` methods for easily checking - # whether an instance is that variant. - def is_TEXT(self) -> bool: - return isinstance(self, Content.TEXT) - def is_text(self) -> bool: - return isinstance(self, Content.TEXT) - def is_IMAGE(self) -> bool: - return isinstance(self, Content.IMAGE) - def is_image(self) -> bool: - return isinstance(self, Content.IMAGE) - - -# Now, a little trick - we make each nested variant class be a subclass of the main -# enum class, so that method calls and instance checks etc will work intuitively. -# We might be able to do this a little more neatly with a metaclass, but this'll do. -Content.TEXT = type("Content.TEXT", (Content.TEXT, Content,), {}) # type: ignore -Content.IMAGE = type("Content.IMAGE", (Content.IMAGE, Content,), {}) # type: ignore - - - - -class _UniffiConverterTypeContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return Content.TEXT( - _UniffiConverterTypeTextContent.read(buf), - ) - if variant == 2: - return Content.IMAGE( - _UniffiConverterTypeImageContent.read(buf), - ) - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if value.is_TEXT(): - _UniffiConverterTypeTextContent.check_lower(value._values[0]) - return - if value.is_IMAGE(): - _UniffiConverterTypeImageContent.check_lower(value._values[0]) - return - raise ValueError(value) - - @staticmethod - def write(value, buf): - if value.is_TEXT(): - buf.write_i32(1) - _UniffiConverterTypeTextContent.write(value._values[0], buf) - if value.is_IMAGE(): - buf.write_i32(2) - _UniffiConverterTypeImageContent.write(value._values[0], buf) - - - - - - - -class MessageContent: - """ - Content passed inside a message, which can be both simple content and tool content - """ - - def __init__(self): - raise RuntimeError("MessageContent cannot be instantiated directly") - - # Each enum variant is a nested class of the enum itself. - class TEXT: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.TEXT{self._values!r}" - - def __eq__(self, other): - if not other.is_TEXT(): - return False - return self._values == other._values - class IMAGE: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.IMAGE{self._values!r}" - - def __eq__(self, other): - if not other.is_IMAGE(): - return False - return self._values == other._values - class TOOL_REQ: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.TOOL_REQ{self._values!r}" - - def __eq__(self, other): - if not other.is_TOOL_REQ(): - return False - return self._values == other._values - class TOOL_RESP: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.TOOL_RESP{self._values!r}" - - def __eq__(self, other): - if not other.is_TOOL_RESP(): - return False - return self._values == other._values - class THINKING: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.THINKING{self._values!r}" - - def __eq__(self, other): - if not other.is_THINKING(): - return False - return self._values == other._values - class REDACTED_THINKING: - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __str__(self): - return f"MessageContent.REDACTED_THINKING{self._values!r}" - - def __eq__(self, other): - if not other.is_REDACTED_THINKING(): - return False - return self._values == other._values - - - # For each variant, we have `is_NAME` and `is_name` methods for easily checking - # whether an instance is that variant. - def is_TEXT(self) -> bool: - return isinstance(self, MessageContent.TEXT) - def is_text(self) -> bool: - return isinstance(self, MessageContent.TEXT) - def is_IMAGE(self) -> bool: - return isinstance(self, MessageContent.IMAGE) - def is_image(self) -> bool: - return isinstance(self, MessageContent.IMAGE) - def is_TOOL_REQ(self) -> bool: - return isinstance(self, MessageContent.TOOL_REQ) - def is_tool_req(self) -> bool: - return isinstance(self, MessageContent.TOOL_REQ) - def is_TOOL_RESP(self) -> bool: - return isinstance(self, MessageContent.TOOL_RESP) - def is_tool_resp(self) -> bool: - return isinstance(self, MessageContent.TOOL_RESP) - def is_THINKING(self) -> bool: - return isinstance(self, MessageContent.THINKING) - def is_thinking(self) -> bool: - return isinstance(self, MessageContent.THINKING) - def is_REDACTED_THINKING(self) -> bool: - return isinstance(self, MessageContent.REDACTED_THINKING) - def is_redacted_thinking(self) -> bool: - return isinstance(self, MessageContent.REDACTED_THINKING) - - -# Now, a little trick - we make each nested variant class be a subclass of the main -# enum class, so that method calls and instance checks etc will work intuitively. -# We might be able to do this a little more neatly with a metaclass, but this'll do. -MessageContent.TEXT = type("MessageContent.TEXT", (MessageContent.TEXT, MessageContent,), {}) # type: ignore -MessageContent.IMAGE = type("MessageContent.IMAGE", (MessageContent.IMAGE, MessageContent,), {}) # type: ignore -MessageContent.TOOL_REQ = type("MessageContent.TOOL_REQ", (MessageContent.TOOL_REQ, MessageContent,), {}) # type: ignore -MessageContent.TOOL_RESP = type("MessageContent.TOOL_RESP", (MessageContent.TOOL_RESP, MessageContent,), {}) # type: ignore -MessageContent.THINKING = type("MessageContent.THINKING", (MessageContent.THINKING, MessageContent,), {}) # type: ignore -MessageContent.REDACTED_THINKING = type("MessageContent.REDACTED_THINKING", (MessageContent.REDACTED_THINKING, MessageContent,), {}) # type: ignore - - - - -class _UniffiConverterTypeMessageContent(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return MessageContent.TEXT( - _UniffiConverterTypeTextContent.read(buf), - ) - if variant == 2: - return MessageContent.IMAGE( - _UniffiConverterTypeImageContent.read(buf), - ) - if variant == 3: - return MessageContent.TOOL_REQ( - _UniffiConverterTypeToolRequest.read(buf), - ) - if variant == 4: - return MessageContent.TOOL_RESP( - _UniffiConverterTypeToolResponse.read(buf), - ) - if variant == 5: - return MessageContent.THINKING( - _UniffiConverterTypeThinkingContent.read(buf), - ) - if variant == 6: - return MessageContent.REDACTED_THINKING( - _UniffiConverterTypeRedactedThinkingContent.read(buf), - ) - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if value.is_TEXT(): - _UniffiConverterTypeTextContent.check_lower(value._values[0]) - return - if value.is_IMAGE(): - _UniffiConverterTypeImageContent.check_lower(value._values[0]) - return - if value.is_TOOL_REQ(): - _UniffiConverterTypeToolRequest.check_lower(value._values[0]) - return - if value.is_TOOL_RESP(): - _UniffiConverterTypeToolResponse.check_lower(value._values[0]) - return - if value.is_THINKING(): - _UniffiConverterTypeThinkingContent.check_lower(value._values[0]) - return - if value.is_REDACTED_THINKING(): - _UniffiConverterTypeRedactedThinkingContent.check_lower(value._values[0]) - return - raise ValueError(value) - - @staticmethod - def write(value, buf): - if value.is_TEXT(): - buf.write_i32(1) - _UniffiConverterTypeTextContent.write(value._values[0], buf) - if value.is_IMAGE(): - buf.write_i32(2) - _UniffiConverterTypeImageContent.write(value._values[0], buf) - if value.is_TOOL_REQ(): - buf.write_i32(3) - _UniffiConverterTypeToolRequest.write(value._values[0], buf) - if value.is_TOOL_RESP(): - buf.write_i32(4) - _UniffiConverterTypeToolResponse.write(value._values[0], buf) - if value.is_THINKING(): - buf.write_i32(5) - _UniffiConverterTypeThinkingContent.write(value._values[0], buf) - if value.is_REDACTED_THINKING(): - buf.write_i32(6) - _UniffiConverterTypeRedactedThinkingContent.write(value._values[0], buf) - - - - -# ProviderError -# We want to define each variant as a nested class that's also a subclass, -# which is tricky in Python. To accomplish this we're going to create each -# class separately, then manually add the child classes to the base class's -# __dict__. All of this happens in dummy class to avoid polluting the module -# namespace. -class ProviderError(Exception): - pass - -_UniffiTempProviderError = ProviderError - -class ProviderError: # type: ignore - class Authentication(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.Authentication({})".format(str(self)) - _UniffiTempProviderError.Authentication = Authentication # type: ignore - class ContextLengthExceeded(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.ContextLengthExceeded({})".format(str(self)) - _UniffiTempProviderError.ContextLengthExceeded = ContextLengthExceeded # type: ignore - class RateLimitExceeded(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.RateLimitExceeded({})".format(str(self)) - _UniffiTempProviderError.RateLimitExceeded = RateLimitExceeded # type: ignore - class ServerError(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.ServerError({})".format(str(self)) - _UniffiTempProviderError.ServerError = ServerError # type: ignore - class RequestFailed(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.RequestFailed({})".format(str(self)) - _UniffiTempProviderError.RequestFailed = RequestFailed # type: ignore - class ExecutionError(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.ExecutionError({})".format(str(self)) - _UniffiTempProviderError.ExecutionError = ExecutionError # type: ignore - class UsageError(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.UsageError({})".format(str(self)) - _UniffiTempProviderError.UsageError = UsageError # type: ignore - class ResponseParseError(_UniffiTempProviderError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ProviderError.ResponseParseError({})".format(str(self)) - _UniffiTempProviderError.ResponseParseError = ResponseParseError # type: ignore - -ProviderError = _UniffiTempProviderError # type: ignore -del _UniffiTempProviderError - - -class _UniffiConverterTypeProviderError(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return ProviderError.Authentication( - _UniffiConverterString.read(buf), - ) - if variant == 2: - return ProviderError.ContextLengthExceeded( - _UniffiConverterString.read(buf), - ) - if variant == 3: - return ProviderError.RateLimitExceeded( - _UniffiConverterString.read(buf), - ) - if variant == 4: - return ProviderError.ServerError( - _UniffiConverterString.read(buf), - ) - if variant == 5: - return ProviderError.RequestFailed( - _UniffiConverterString.read(buf), - ) - if variant == 6: - return ProviderError.ExecutionError( - _UniffiConverterString.read(buf), - ) - if variant == 7: - return ProviderError.UsageError( - _UniffiConverterString.read(buf), - ) - if variant == 8: - return ProviderError.ResponseParseError( - _UniffiConverterString.read(buf), - ) - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if isinstance(value, ProviderError.Authentication): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.ContextLengthExceeded): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.RateLimitExceeded): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.ServerError): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.RequestFailed): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.ExecutionError): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.UsageError): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ProviderError.ResponseParseError): - _UniffiConverterString.check_lower(value._values[0]) - return - - @staticmethod - def write(value, buf): - if isinstance(value, ProviderError.Authentication): - buf.write_i32(1) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.ContextLengthExceeded): - buf.write_i32(2) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.RateLimitExceeded): - buf.write_i32(3) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.ServerError): - buf.write_i32(4) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.RequestFailed): - buf.write_i32(5) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.ExecutionError): - buf.write_i32(6) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.UsageError): - buf.write_i32(7) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ProviderError.ResponseParseError): - buf.write_i32(8) - _UniffiConverterString.write(value._values[0], buf) - - - - - -class Role(enum.Enum): - USER = 0 - - ASSISTANT = 1 - - - -class _UniffiConverterTypeRole(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return Role.USER - if variant == 2: - return Role.ASSISTANT - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if value == Role.USER: - return - if value == Role.ASSISTANT: - return - raise ValueError(value) - - @staticmethod - def write(value, buf): - if value == Role.USER: - buf.write_i32(1) - if value == Role.ASSISTANT: - buf.write_i32(2) - - - - - - - -class ToolApprovalMode(enum.Enum): - AUTO = 0 - - MANUAL = 1 - - SMART = 2 - - - -class _UniffiConverterTypeToolApprovalMode(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return ToolApprovalMode.AUTO - if variant == 2: - return ToolApprovalMode.MANUAL - if variant == 3: - return ToolApprovalMode.SMART - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if value == ToolApprovalMode.AUTO: - return - if value == ToolApprovalMode.MANUAL: - return - if value == ToolApprovalMode.SMART: - return - raise ValueError(value) - - @staticmethod - def write(value, buf): - if value == ToolApprovalMode.AUTO: - buf.write_i32(1) - if value == ToolApprovalMode.MANUAL: - buf.write_i32(2) - if value == ToolApprovalMode.SMART: - buf.write_i32(3) - - - - -# ToolError -# We want to define each variant as a nested class that's also a subclass, -# which is tricky in Python. To accomplish this we're going to create each -# class separately, then manually add the child classes to the base class's -# __dict__. All of this happens in dummy class to avoid polluting the module -# namespace. -class ToolError(Exception): - pass - -_UniffiTempToolError = ToolError - -class ToolError: # type: ignore - class InvalidParameters(_UniffiTempToolError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ToolError.InvalidParameters({})".format(str(self)) - _UniffiTempToolError.InvalidParameters = InvalidParameters # type: ignore - class ExecutionError(_UniffiTempToolError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ToolError.ExecutionError({})".format(str(self)) - _UniffiTempToolError.ExecutionError = ExecutionError # type: ignore - class SchemaError(_UniffiTempToolError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ToolError.SchemaError({})".format(str(self)) - _UniffiTempToolError.SchemaError = SchemaError # type: ignore - class NotFound(_UniffiTempToolError): - def __init__(self, *values): - if len(values) != 1: - raise TypeError(f"Expected 1 arguments, found {len(values)}") - if not isinstance(values[0], str): - raise TypeError(f"unexpected type for tuple element 0 - expected 'str', got '{type(values[0])}'") - super().__init__(", ".join(map(repr, values))) - self._values = values - - def __getitem__(self, index): - return self._values[index] - - def __repr__(self): - return "ToolError.NotFound({})".format(str(self)) - _UniffiTempToolError.NotFound = NotFound # type: ignore - -ToolError = _UniffiTempToolError # type: ignore -del _UniffiTempToolError - - -class _UniffiConverterTypeToolError(_UniffiConverterRustBuffer): - @staticmethod - def read(buf): - variant = buf.read_i32() - if variant == 1: - return ToolError.InvalidParameters( - _UniffiConverterString.read(buf), - ) - if variant == 2: - return ToolError.ExecutionError( - _UniffiConverterString.read(buf), - ) - if variant == 3: - return ToolError.SchemaError( - _UniffiConverterString.read(buf), - ) - if variant == 4: - return ToolError.NotFound( - _UniffiConverterString.read(buf), - ) - raise InternalError("Raw enum value doesn't match any cases") - - @staticmethod - def check_lower(value): - if isinstance(value, ToolError.InvalidParameters): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ToolError.ExecutionError): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ToolError.SchemaError): - _UniffiConverterString.check_lower(value._values[0]) - return - if isinstance(value, ToolError.NotFound): - _UniffiConverterString.check_lower(value._values[0]) - return - - @staticmethod - def write(value, buf): - if isinstance(value, ToolError.InvalidParameters): - buf.write_i32(1) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ToolError.ExecutionError): - buf.write_i32(2) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ToolError.SchemaError): - buf.write_i32(3) - _UniffiConverterString.write(value._values[0], buf) - if isinstance(value, ToolError.NotFound): - buf.write_i32(4) - _UniffiConverterString.write(value._values[0], buf) - - - -class _UniffiConverterOptionalUInt32(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - if value is not None: - _UniffiConverterUInt32.check_lower(value) - - @classmethod - def write(cls, value, buf): - if value is None: - buf.write_u8(0) - return - - buf.write_u8(1) - _UniffiConverterUInt32.write(value, buf) - - @classmethod - def read(cls, buf): - flag = buf.read_u8() - if flag == 0: - return None - elif flag == 1: - return _UniffiConverterUInt32.read(buf) - else: - raise InternalError("Unexpected flag byte for optional type") - - - -class _UniffiConverterOptionalInt32(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - if value is not None: - _UniffiConverterInt32.check_lower(value) - - @classmethod - def write(cls, value, buf): - if value is None: - buf.write_u8(0) - return - - buf.write_u8(1) - _UniffiConverterInt32.write(value, buf) - - @classmethod - def read(cls, buf): - flag = buf.read_u8() - if flag == 0: - return None - elif flag == 1: - return _UniffiConverterInt32.read(buf) - else: - raise InternalError("Unexpected flag byte for optional type") - - - -class _UniffiConverterOptionalFloat(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - if value is not None: - _UniffiConverterFloat.check_lower(value) - - @classmethod - def write(cls, value, buf): - if value is None: - buf.write_u8(0) - return - - buf.write_u8(1) - _UniffiConverterFloat.write(value, buf) - - @classmethod - def read(cls, buf): - flag = buf.read_u8() - if flag == 0: - return None - elif flag == 1: - return _UniffiConverterFloat.read(buf) - else: - raise InternalError("Unexpected flag byte for optional type") - - - -class _UniffiConverterOptionalDouble(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - if value is not None: - _UniffiConverterDouble.check_lower(value) - - @classmethod - def write(cls, value, buf): - if value is None: - buf.write_u8(0) - return - - buf.write_u8(1) - _UniffiConverterDouble.write(value, buf) - - @classmethod - def read(cls, buf): - flag = buf.read_u8() - if flag == 0: - return None - elif flag == 1: - return _UniffiConverterDouble.read(buf) - else: - raise InternalError("Unexpected flag byte for optional type") - - - -class _UniffiConverterOptionalString(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - if value is not None: - _UniffiConverterString.check_lower(value) - - @classmethod - def write(cls, value, buf): - if value is None: - buf.write_u8(0) - return - - buf.write_u8(1) - _UniffiConverterString.write(value, buf) - - @classmethod - def read(cls, buf): - flag = buf.read_u8() - if flag == 0: - return None - elif flag == 1: - return _UniffiConverterString.read(buf) - else: - raise InternalError("Unexpected flag byte for optional type") - - - -class _UniffiConverterSequenceTypeExtensionConfig(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - for item in value: - _UniffiConverterTypeExtensionConfig.check_lower(item) - - @classmethod - def write(cls, value, buf): - items = len(value) - buf.write_i32(items) - for item in value: - _UniffiConverterTypeExtensionConfig.write(item, buf) - - @classmethod - def read(cls, buf): - count = buf.read_i32() - if count < 0: - raise InternalError("Unexpected negative sequence length") - - return [ - _UniffiConverterTypeExtensionConfig.read(buf) for i in range(count) - ] - - - -class _UniffiConverterSequenceTypeMessage(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - for item in value: - _UniffiConverterTypeMessage.check_lower(item) - - @classmethod - def write(cls, value, buf): - items = len(value) - buf.write_i32(items) - for item in value: - _UniffiConverterTypeMessage.write(item, buf) - - @classmethod - def read(cls, buf): - count = buf.read_i32() - if count < 0: - raise InternalError("Unexpected negative sequence length") - - return [ - _UniffiConverterTypeMessage.read(buf) for i in range(count) - ] - - - -class _UniffiConverterSequenceTypeMessageContent(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - for item in value: - _UniffiConverterTypeMessageContent.check_lower(item) - - @classmethod - def write(cls, value, buf): - items = len(value) - buf.write_i32(items) - for item in value: - _UniffiConverterTypeMessageContent.write(item, buf) - - @classmethod - def read(cls, buf): - count = buf.read_i32() - if count < 0: - raise InternalError("Unexpected negative sequence length") - - return [ - _UniffiConverterTypeMessageContent.read(buf) for i in range(count) - ] - - - -class _UniffiConverterSequenceTypeToolConfig(_UniffiConverterRustBuffer): - @classmethod - def check_lower(cls, value): - for item in value: - _UniffiConverterTypeToolConfig.check_lower(item) - - @classmethod - def write(cls, value, buf): - items = len(value) - buf.write_i32(items) - for item in value: - _UniffiConverterTypeToolConfig.write(item, buf) - - @classmethod - def read(cls, buf): - count = buf.read_i32() - if count < 0: - raise InternalError("Unexpected negative sequence length") - - return [ - _UniffiConverterTypeToolConfig.read(buf) for i in range(count) - ] - - -class _UniffiConverterTypeCompletionRequest: - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterString.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterString.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterString.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterString.lower(value) - - -class _UniffiConverterTypeContents: - @staticmethod - def write(value, buf): - _UniffiConverterSequenceTypeMessageContent.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterSequenceTypeMessageContent.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterSequenceTypeMessageContent.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterSequenceTypeMessageContent.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterSequenceTypeMessageContent.lower(value) - - -class _UniffiConverterTypeJsonValueFfi: - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterString.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterString.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterString.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterString.lower(value) - - -class _UniffiConverterTypeToolConfig: - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterString.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterString.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterString.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterString.lower(value) - - -class _UniffiConverterTypeToolRequestToolCall: - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterString.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterString.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterString.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterString.lower(value) - - -class _UniffiConverterTypeToolResponseToolResult: - @staticmethod - def write(value, buf): - _UniffiConverterString.write(value, buf) - - @staticmethod - def read(buf): - return _UniffiConverterString.read(buf) - - @staticmethod - def lift(value): - return _UniffiConverterString.lift(value) - - @staticmethod - def check_lower(value): - return _UniffiConverterString.check_lower(value) - - @staticmethod - def lower(value): - return _UniffiConverterString.lower(value) - -# objects. -CompletionRequest = str -Contents = typing.List[MessageContent] -JsonValueFfi = str -ToolConfig = str -ToolRequestToolCall = str -ToolResponseToolResult = str - -# Async support# RustFuturePoll values -_UNIFFI_RUST_FUTURE_POLL_READY = 0 -_UNIFFI_RUST_FUTURE_POLL_MAYBE_READY = 1 - -# Stores futures for _uniffi_continuation_callback -_UniffiContinuationHandleMap = _UniffiHandleMap() - -_UNIFFI_GLOBAL_EVENT_LOOP = None - -""" -Set the event loop to use for async functions - -This is needed if some async functions run outside of the eventloop, for example: - - A non-eventloop thread is spawned, maybe from `EventLoop.run_in_executor` or maybe from the - Rust code spawning its own thread. - - The Rust code calls an async callback method from a sync callback function, using something - like `pollster` to block on the async call. - -In this case, we need an event loop to run the Python async function, but there's no eventloop set -for the thread. Use `uniffi_set_event_loop` to force an eventloop to be used in this case. -""" -def uniffi_set_event_loop(eventloop: asyncio.BaseEventLoop): - global _UNIFFI_GLOBAL_EVENT_LOOP - _UNIFFI_GLOBAL_EVENT_LOOP = eventloop - -def _uniffi_get_event_loop(): - if _UNIFFI_GLOBAL_EVENT_LOOP is not None: - return _UNIFFI_GLOBAL_EVENT_LOOP - else: - return asyncio.get_running_loop() - -# Continuation callback for async functions -# lift the return value or error and resolve the future, causing the async function to resume. -@_UNIFFI_RUST_FUTURE_CONTINUATION_CALLBACK -def _uniffi_continuation_callback(future_ptr, poll_code): - (eventloop, future) = _UniffiContinuationHandleMap.remove(future_ptr) - eventloop.call_soon_threadsafe(_uniffi_set_future_result, future, poll_code) - -def _uniffi_set_future_result(future, poll_code): - if not future.cancelled(): - future.set_result(poll_code) - -async def _uniffi_rust_call_async(rust_future, ffi_poll, ffi_complete, ffi_free, lift_func, error_ffi_converter): - try: - eventloop = _uniffi_get_event_loop() - - # Loop and poll until we see a _UNIFFI_RUST_FUTURE_POLL_READY value - while True: - future = eventloop.create_future() - ffi_poll( - rust_future, - _uniffi_continuation_callback, - _UniffiContinuationHandleMap.insert((eventloop, future)), - ) - poll_code = await future - if poll_code == _UNIFFI_RUST_FUTURE_POLL_READY: - break - - return lift_func( - _uniffi_rust_call_with_error(error_ffi_converter, ffi_complete, rust_future) - ) - finally: - ffi_free(rust_future) -async def completion(req: "CompletionRequest") -> "CompletionResponse": - - """ - Public API for the Goose LLM completion function - """ - - _UniffiConverterTypeCompletionRequest.check_lower(req) - - return await _uniffi_rust_call_async( - _UniffiLib.uniffi_goose_llm_fn_func_completion( - _UniffiConverterTypeCompletionRequest.lower(req)), - _UniffiLib.ffi_goose_llm_rust_future_poll_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_complete_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_free_rust_buffer, - # lift function - _UniffiConverterTypeCompletionResponse.lift, - - # Error FFI converter -_UniffiConverterTypeCompletionError, - - ) - -def create_completion_request(provider_name: "str",provider_config: "JsonValueFfi",model_config: "ModelConfig",system_preamble: "str",messages: "typing.List[Message]",extensions: "typing.List[ExtensionConfig]") -> "CompletionRequest": - _UniffiConverterString.check_lower(provider_name) - - _UniffiConverterTypeJsonValueFfi.check_lower(provider_config) - - _UniffiConverterTypeModelConfig.check_lower(model_config) - - _UniffiConverterString.check_lower(system_preamble) - - _UniffiConverterSequenceTypeMessage.check_lower(messages) - - _UniffiConverterSequenceTypeExtensionConfig.check_lower(extensions) - - return _UniffiConverterTypeCompletionRequest.lift(_uniffi_rust_call(_UniffiLib.uniffi_goose_llm_fn_func_create_completion_request, - _UniffiConverterString.lower(provider_name), - _UniffiConverterTypeJsonValueFfi.lower(provider_config), - _UniffiConverterTypeModelConfig.lower(model_config), - _UniffiConverterString.lower(system_preamble), - _UniffiConverterSequenceTypeMessage.lower(messages), - _UniffiConverterSequenceTypeExtensionConfig.lower(extensions))) - - -def create_tool_config(name: "str",description: "str",input_schema: "JsonValueFfi",approval_mode: "ToolApprovalMode") -> "ToolConfig": - _UniffiConverterString.check_lower(name) - - _UniffiConverterString.check_lower(description) - - _UniffiConverterTypeJsonValueFfi.check_lower(input_schema) - - _UniffiConverterTypeToolApprovalMode.check_lower(approval_mode) - - return _UniffiConverterTypeToolConfig.lift(_uniffi_rust_call(_UniffiLib.uniffi_goose_llm_fn_func_create_tool_config, - _UniffiConverterString.lower(name), - _UniffiConverterString.lower(description), - _UniffiConverterTypeJsonValueFfi.lower(input_schema), - _UniffiConverterTypeToolApprovalMode.lower(approval_mode))) - -async def generate_session_name(provider_name: "str",provider_config: "JsonValueFfi",messages: "typing.List[Message]") -> "str": - - """ - Generates a short (≤4 words) session name - """ - - _UniffiConverterString.check_lower(provider_name) - - _UniffiConverterTypeJsonValueFfi.check_lower(provider_config) - - _UniffiConverterSequenceTypeMessage.check_lower(messages) - - return await _uniffi_rust_call_async( - _UniffiLib.uniffi_goose_llm_fn_func_generate_session_name( - _UniffiConverterString.lower(provider_name), - _UniffiConverterTypeJsonValueFfi.lower(provider_config), - _UniffiConverterSequenceTypeMessage.lower(messages)), - _UniffiLib.ffi_goose_llm_rust_future_poll_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_complete_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_free_rust_buffer, - # lift function - _UniffiConverterString.lift, - - # Error FFI converter -_UniffiConverterTypeProviderError, - - ) -async def generate_tooltip(provider_name: "str",provider_config: "JsonValueFfi",messages: "typing.List[Message]") -> "str": - - """ - Generates a tooltip summarizing the last two messages in the session, - including any tool calls or results. - """ - - _UniffiConverterString.check_lower(provider_name) - - _UniffiConverterTypeJsonValueFfi.check_lower(provider_config) - - _UniffiConverterSequenceTypeMessage.check_lower(messages) - - return await _uniffi_rust_call_async( - _UniffiLib.uniffi_goose_llm_fn_func_generate_tooltip( - _UniffiConverterString.lower(provider_name), - _UniffiConverterTypeJsonValueFfi.lower(provider_config), - _UniffiConverterSequenceTypeMessage.lower(messages)), - _UniffiLib.ffi_goose_llm_rust_future_poll_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_complete_rust_buffer, - _UniffiLib.ffi_goose_llm_rust_future_free_rust_buffer, - # lift function - _UniffiConverterString.lift, - - # Error FFI converter -_UniffiConverterTypeProviderError, - - ) - -def print_messages(messages: "typing.List[Message]") -> None: - _UniffiConverterSequenceTypeMessage.check_lower(messages) - - _uniffi_rust_call(_UniffiLib.uniffi_goose_llm_fn_func_print_messages, - _UniffiConverterSequenceTypeMessage.lower(messages)) - - -__all__ = [ - "InternalError", - "CompletionError", - "Content", - "MessageContent", - "ProviderError", - "Role", - "ToolApprovalMode", - "ToolError", - "CompletionResponse", - "ExtensionConfig", - "ImageContent", - "Message", - "ModelConfig", - "ProviderCompleteResponse", - "RedactedThinkingContent", - "RuntimeMetrics", - "TextContent", - "ThinkingContent", - "ToolRequest", - "ToolResponse", - "Usage", - "completion", - "create_completion_request", - "create_tool_config", - "generate_session_name", - "generate_tooltip", - "print_messages", -] - diff --git a/bindings/python/usage.py b/bindings/python/usage.py deleted file mode 100644 index bdfb39181211..000000000000 --- a/bindings/python/usage.py +++ /dev/null @@ -1,133 +0,0 @@ -import asyncio -import os -import time -from goose_llm import ( - Message, MessageContent, TextContent, ToolRequest, ToolResponse, - Role, ModelConfig, ToolApprovalMode, - create_tool_config, ExtensionConfig, - generate_session_name, generate_tooltip, - create_completion_request, completion -) - -async def main(): - now = int(time.time()) - - # 1) User sends a plain-text prompt - messages = [ - Message( - role=Role.USER, - created=now, - content=[MessageContent.TEXT(TextContent(text="What is 7 x 6?"))] - ), - - # 2) Assistant makes a tool request - Message( - role=Role.ASSISTANT, - created=now + 2, - content=[MessageContent.TOOL_REQ(ToolRequest( - id="calc1", - tool_call=""" - { - "status": "success", - "value": { - "name": "calculator_extension__toolname", - "arguments": { - "operation": "multiply", - "numbers": [7, 6] - }, - "needsApproval": false - } - } - """ - ))] - ), - - # 3) User sends tool result - Message( - role=Role.USER, - created=now + 3, - content=[MessageContent.TOOL_RESP(ToolResponse( - id="calc1", - tool_result=""" - { - "status": "success", - "value": [ - {"type": "text", "text": "42"} - ] - } - """ - ))] - ) - ] - - provider_name = "databricks" - provider_config = f'''{{ - "host": "{os.environ.get("DATABRICKS_HOST")}", - "token": "{os.environ.get("DATABRICKS_TOKEN")}" - }}''' - - print(f"Provider Name: {provider_name}") - print(f"Provider Config: {provider_config}") - - session_name = await generate_session_name(provider_name, provider_config, messages) - print(f"\nSession Name: {session_name}") - - tooltip = await generate_tooltip(provider_name, provider_config, messages) - print(f"\nTooltip: {tooltip}") - - model_config = ModelConfig( - model_name="goose-gpt-4-1", - max_tokens=500, - temperature=0.1, - context_limit=4096, - ) - - calculator_tool = create_tool_config( - name="calculator", - description="Perform basic arithmetic operations", - input_schema=""" - { - "type": "object", - "required": ["operation", "numbers"], - "properties": { - "operation": { - "type": "string", - "enum": ["add", "subtract", "multiply", "divide"], - "description": "The arithmetic operation to perform" - }, - "numbers": { - "type": "array", - "items": { "type": "number" }, - "description": "List of numbers to operate on in order" - } - } - } - """, - approval_mode=ToolApprovalMode.AUTO - ) - - calculator_extension = ExtensionConfig( - name="calculator_extension", - instructions="This extension provides a calculator tool.", - tools=[calculator_tool] - ) - - system_preamble = "You are a helpful assistant." - extensions = [calculator_extension] - - req = create_completion_request( - provider_name, - provider_config, - model_config, - system_preamble, - messages, - extensions - ) - - resp = await completion(req) - print(f"\nCompletion Response:\n{resp.message}") - print(f"Msg content: {resp.message.content[0][0]}") - - -if __name__ == "__main__": - asyncio.run(main()) \ No newline at end of file diff --git a/crates/goose-ffi/Cargo.toml b/crates/goose-ffi/Cargo.toml deleted file mode 100644 index f5e430f6dbcd..000000000000 --- a/crates/goose-ffi/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "goose-ffi" -build = "build.rs" -version.workspace = true -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -description.workspace = true - -[lints] -workspace = true - -[lib] -name = "goose_ffi" -crate-type = ["cdylib"] - -[dependencies] -goose = { path = "../goose" } -futures = "0.3" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -tokio = { version = "1", features = ["full"] } -libc = "0.2" -once_cell = "1.18" - -[build-dependencies] -cbindgen = "0.24.0" diff --git a/crates/goose-ffi/README.md b/crates/goose-ffi/README.md deleted file mode 100644 index acb5b8458539..000000000000 --- a/crates/goose-ffi/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# Goose FFI - -Foreign Function Interface (FFI) for the Goose AI agent framework, allowing integration with other programming languages. - -## Overview - -The Goose FFI library provides C-compatible bindings for the Goose AI agent framework, enabling you to: - -- Create and manage Goose agents from any language with C FFI support -- Configure and use the Databricks AI provider for now but is extensible to other providers as needed -- Send messages to agents and receive responses - -## Building - -To build the FFI library, you'll need Rust and Cargo installed. Then run: - -```bash -# Build the library in debug mode -cargo build --package goose-ffi - -# Build the library in release mode (recommended for production) -cargo build --release --package goose-ffi -``` - -This will generate a dynamic library (.so, .dll, or .dylib depending on your platform) in the `target` directory, and automatically generate the C header file in the `include` directory. - -You can also build cross-platform binaries using cross command. For example to build for linux x86_64 architecture from Mac would require running - -```bash -CROSS_BUILD_OPTS="--platform linux/amd64 --no-cache" CROSS_CONTAINER_OPTS="--platform linux/amd64" cross build -p goose-ffi --release --target x86_64-unknown-linux-gnu --no-default-features -``` -Note that this works only for gnu linux as it requires glibc. - -## Generated C Header - -The library uses cbindgen to automatically generate a C header file (`goose_ffi.h`) during the build process. This header contains all the necessary types and function declarations to use the library from C or any language with C FFI support. - -## Examples - -The FFI library includes examples in multiple languages to demonstrate how to use it. - -### Python Example - -The `examples/goose_agent.py` demonstrates using the FFI library from Python with ctypes. It shows: - -1. How to create a proper Python wrapper around the Goose FFI interface -2. Loading the shared library dynamically based on platform -3. Setting up C-compatible structures -4. Creating an object-oriented API for easier use - -Note: Tool callback functionality shown in earlier versions is not currently available and will be implemented in a future release. - -To run the Python example: - -```bash -# First, build the FFI library -cargo build --release --package goose-ffi - -# Then set the environment variables & run the example -DATABRICKS_HOST=... DATABRICKS_API_KEY=... python crates/goose-ffi/examples/goose_agent.py -``` - -You need to have Python 3.6+ installed with the `ctypes` module (included in standard library). - - -``` -> Tell me about the Eiffel Tower -``` - -The agent will respond with information about the Eiffel Tower. - -## Using from Other Languages - -The Goose FFI library can be used from many programming languages with C FFI support, including: - -- Python (via ctypes or cffi) -- JavaScript/Node.js (via node-ffi) -- Ruby (via fiddle) -- C#/.NET (via P/Invoke) -- Go (via cgo) -- Java / Kotlin (via JNA or JNI) - -Check the documentation for FFI support in your language of choice for details on how to load and use a C shared library. - -## Provider Configuration - -The FFI interface uses a provider type enumeration to specify which AI provider to use: - -```c -// C enum (defined in examples/simple_agent.c) -typedef enum { - PROVIDER_DATABRICKS = 0, // Databricks AI provider -} ProviderType; -``` - -```python -# Python enum (defined in examples/goose_agent.py) -class ProviderType(IntEnum): - DATABRICKS = 0 # Databricks AI provider -``` - -Currently, only the Databricks provider (provider_type = 0) is supported. If you attempt to use any other provider type, an error will be returned. - -### Environment-based Configuration - -The library supports configuration via environment variables, which makes it easier to use in containerized or CI/CD environments without hardcoding credentials: - -#### Databricks Provider (type = 0) - -``` -DATABRICKS_API_KEY=dapi... # Databricks API key -DATABRICKS_HOST=... # Databricks host URL (e.g., "https://your-workspace.cloud.databricks.com") -``` - -These environment variables will be used automatically if you don't provide the corresponding parameters when creating an agent. - -## Thread Safety - -The FFI library is designed to be thread-safe. Each agent instance is independent, and tools callbacks are handled in a thread-safe manner. However, the same agent instance should not be used from multiple threads simultaneously without external synchronization. - -## Error Handling - -Functions that can fail return either null pointers or special result structures that indicate success or failure. Always check return values and clean up resources using the appropriate free functions. - -## Memory Management - -The FFI interface handles memory allocation and deallocation. Use the provided free functions (like `goose_free_string` and `goose_free_async_result`) to release resources when you're done with them. diff --git a/crates/goose-ffi/build.rs b/crates/goose-ffi/build.rs deleted file mode 100644 index a26c3409cdf4..000000000000 --- a/crates/goose-ffi/build.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::env; -use std::path::PathBuf; - -fn main() { - let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - - let config = cbindgen::Config { - language: cbindgen::Language::C, - documentation: true, - header: Some( - r#" -#ifndef GOOSE_FFI_H -#define GOOSE_FFI_H - -/* Goose FFI - C interface for the Goose AI agent framework */ -"# - .trim_start() - .to_string(), - ), - trailer: Some("#endif // GOOSE_FFI_H".to_string()), - includes: vec![], - sys_includes: vec!["stdint.h".to_string(), "stdbool.h".to_string()], - export: cbindgen::ExportConfig { - prefix: Some("goose_".to_string()), - ..Default::default() - }, - documentation_style: cbindgen::DocumentationStyle::C, - enumeration: cbindgen::EnumConfig { - prefix_with_name: true, - derive_helper_methods: true, - ..Default::default() - }, - ..Default::default() - }; - - let bindings = cbindgen::Builder::new() - .with_crate(&crate_dir) - .with_config(config) - .generate() - .expect("Unable to generate bindings"); - - let out_path = PathBuf::from(&crate_dir).join("include"); - std::fs::create_dir_all(&out_path).expect("Failed to create include directory"); - bindings.write_to_file(out_path.join("goose_ffi.h")); - - println!("cargo:rerun-if-changed=src/lib.rs"); - println!("cargo:rerun-if-changed=build.rs"); -} diff --git a/crates/goose-ffi/examples/goose_agent.py b/crates/goose-ffi/examples/goose_agent.py deleted file mode 100644 index 76f3fed534ff..000000000000 --- a/crates/goose-ffi/examples/goose_agent.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python3 -""" -Python example for using the Goose FFI interface. - -This example demonstrates how to: -1. Load the Goose FFI library -2. Create an agent with a provider -3. Add a tool extension -4. Send messages to the agent -5. Handle tool calls and responses -""" - -import ctypes -import os -import platform -from ctypes import c_char_p, c_bool, c_uint32, c_void_p, Structure, POINTER - -class ProviderType: - DATABRICKS = 0 - -# Platform-specific dynamic lib name -if platform.system() == "Darwin": - LIB_NAME = "libgoose_ffi.dylib" -elif platform.system() == "Linux": - LIB_NAME = "libgoose_ffi.so" -elif platform.system() == "Windows": - LIB_NAME = "goose_ffi.dll" -else: - raise RuntimeError("Unsupported platform") - -# Adjust to your actual build output directory -LIB_PATH = os.path.join(os.path.dirname(__file__), "../../..", "target", "debug", LIB_NAME) - -# Load library -goose = ctypes.CDLL(LIB_PATH) - -# Forward declaration for goose_Agent -class goose_Agent(Structure): - pass - -# Agent pointer type -goose_AgentPtr = POINTER(goose_Agent) - -# C struct mappings -class ProviderConfig(Structure): - _fields_ = [ - ("provider_type", c_uint32), - ("api_key", c_char_p), - ("model_name", c_char_p), - ("host", c_char_p), - ] - -class AsyncResult(Structure): - _fields_ = [ - ("succeeded", c_bool), - ("error_message", c_char_p), - ] - -# Function signatures -goose.goose_agent_new.argtypes = [POINTER(ProviderConfig)] -goose.goose_agent_new.restype = goose_AgentPtr - -goose.goose_agent_free.argtypes = [goose_AgentPtr] -goose.goose_agent_free.restype = None - -goose.goose_agent_send_message.argtypes = [goose_AgentPtr, c_char_p] -goose.goose_agent_send_message.restype = c_void_p - -goose.goose_free_string.argtypes = [c_void_p] -goose.goose_free_string.restype = None - -goose.goose_free_async_result.argtypes = [POINTER(AsyncResult)] -goose.goose_free_async_result.restype = None - -class GooseAgent: - def __init__(self, provider_type=ProviderType.DATABRICKS, api_key=None, model_name=None, host=None): - self.config = ProviderConfig( - provider_type=provider_type, - api_key=api_key.encode("utf-8") if api_key else None, - model_name=model_name.encode("utf-8") if model_name else None, - host=host.encode("utf-8") if host else None, - ) - self.agent = goose.goose_agent_new(ctypes.byref(self.config)) - if not self.agent: - raise RuntimeError("Failed to create Goose agent") - - def __del__(self): - if getattr(self, "agent", None): - goose.goose_agent_free(self.agent) - - def send_message(self, message: str) -> str: - msg = message.encode("utf-8") - response_ptr = goose.goose_agent_send_message(self.agent, msg) - if not response_ptr: - return "Error or NULL response from agent" - response = ctypes.string_at(response_ptr).decode("utf-8") - # Free the string using the proper C function provided by the library - # This correctly releases memory allocated by the Rust side - goose.goose_free_string(response_ptr) - return response - -def main(): - api_key = os.getenv("DATABRICKS_API_KEY") - host = os.getenv("DATABRICKS_HOST") - agent = GooseAgent(api_key=api_key, model_name="claude-3-7-sonnet", host=host) - - print("Type a message (or 'quit' to exit):") - while True: - user_input = input("> ") - if user_input.lower() in ("quit", "exit"): - break - reply = agent.send_message(user_input) - print(f"Agent: {reply}\n") - -if __name__ == "__main__": - main() diff --git a/crates/goose-ffi/include/goose_ffi.h b/crates/goose-ffi/include/goose_ffi.h deleted file mode 100644 index 283e1471785b..000000000000 --- a/crates/goose-ffi/include/goose_ffi.h +++ /dev/null @@ -1,145 +0,0 @@ -#ifndef GOOSE_FFI_H -#define GOOSE_FFI_H - -/* Goose FFI - C interface for the Goose AI agent framework */ - - -#include -#include -#include -#include -#include -#include - -/* - Provider Type enumeration - Currently only Databricks is supported - */ -enum goose_ProviderType { - /* - Databricks AI provider - */ - goose_ProviderType_Databricks = 0, -}; -typedef uint32_t goose_ProviderType; - -/* - Result type for async operations - - - succeeded: true if the operation succeeded, false otherwise - - error_message: Error message if succeeded is false, NULL otherwise - */ -typedef struct goose_AsyncResult { - bool succeeded; - char *error_message; -} goose_AsyncResult; - -/* - Pointer type for the agent - */ -typedef goose_Agent *goose_AgentPtr; - -/* - Provider configuration used to initialize an AI provider - - - provider_type: Provider type (0 = Databricks, other values will produce an error) - - api_key: Provider API key (null for default from environment variables) - - model_name: Model name to use (null for provider default) - - host: Provider host URL (null for default from environment variables) - */ -typedef struct goose_ProviderConfigFFI { - goose_ProviderType provider_type; - const char *api_key; - const char *model_name; - const char *host; -} goose_ProviderConfigFFI; - -/* - Free an async result structure - - This function frees the memory allocated for an AsyncResult structure, - including any error message it contains. - - # Safety - - The result pointer must be a valid pointer returned by a goose FFI function, - or NULL. - */ -void goose_free_async_result(struct goose_AsyncResult *result); - -/* - Create a new agent with the given provider configuration - - # Parameters - - - config: Provider configuration - - # Returns - - A new agent pointer, or a null pointer if creation failed - - # Safety - - The config pointer must be valid or NULL. The resulting agent must be freed - with goose_agent_free when no longer needed. - */ -goose_AgentPtr goose_agent_new(const struct goose_ProviderConfigFFI *config); - -/* - Free an agent - - This function frees the memory allocated for an agent. - - # Parameters - - - agent_ptr: Agent pointer returned by goose_agent_new - - # Safety - - The agent_ptr must be a valid pointer returned by goose_agent_new, - or have a null internal pointer. The agent_ptr must not be used after - calling this function. - */ -void goose_agent_free(goose_AgentPtr agent_ptr); - -/* - Send a message to the agent and get the response - - This function sends a message to the agent and returns the response. - Tool handling is not yet supported and will be implemented in a future commit - so this may change significantly - - # Parameters - - - agent_ptr: Agent pointer - - message: Message to send - - # Returns - - A C string with the agent's response, or NULL on error. - This string must be freed with goose_free_string when no longer needed. - - # Safety - - The agent_ptr must be a valid pointer returned by goose_agent_new. - The message must be a valid C string. - */ -char *goose_agent_send_message(goose_AgentPtr agent_ptr, const char *message); - -/* - Free a string allocated by goose FFI functions - - This function frees memory allocated for strings returned by goose FFI functions. - - # Parameters - - - s: String to free - - # Safety - - The string must have been allocated by a goose FFI function, or be NULL. - The string must not be used after calling this function. - */ -void goose_free_string(char *s); - -#endif // GOOSE_FFI_H diff --git a/crates/goose-ffi/src/lib.rs b/crates/goose-ffi/src/lib.rs deleted file mode 100644 index 6bcf3abe1695..000000000000 --- a/crates/goose-ffi/src/lib.rs +++ /dev/null @@ -1,311 +0,0 @@ -use std::ffi::{c_char, CStr, CString}; -use std::ptr; -use std::sync::Arc; - -use futures::StreamExt; -use goose::agents::{Agent, AgentEvent}; -use goose::message::Message; -use goose::model::ModelConfig; -use goose::providers::databricks::DatabricksProvider; -use once_cell::sync::OnceCell; -use tokio::runtime::Runtime; - -// This class is in alpha and not yet ready for production use -// and the API is not yet stable. Use at your own risk. - -// Thread-safe global runtime -static RUNTIME: OnceCell = OnceCell::new(); - -// Get or initialize the global runtime -fn get_runtime() -> &'static Runtime { - RUNTIME.get_or_init(|| { - // Runtime with all features enabled - Runtime::new().expect("Failed to create Tokio runtime") - }) -} - -/// Pointer type for the agent -pub type AgentPtr = *mut Agent; -/// Provider Type enumeration -/// Currently only Databricks is supported -#[repr(u32)] -#[derive(Debug, Clone, Copy)] -pub enum ProviderType { - /// Databricks AI provider - Databricks = 0, -} - -/// Provider configuration used to initialize an AI provider -/// -/// - provider_type: Provider type (0 = Databricks, other values will produce an error) -/// - api_key: Provider API key (null for default from environment variables) -/// - model_name: Model name to use (null for provider default) -/// - host: Provider host URL (null for default from environment variables) -#[repr(C)] -pub struct ProviderConfigFFI { - pub provider_type: ProviderType, - pub api_key: *const c_char, - pub model_name: *const c_char, - pub host: *const c_char, -} - -// Extension configuration will be implemented in a future commit - -/// Role enum for message participants -#[repr(u32)] -#[derive(Debug, Clone, Copy)] -pub enum MessageRole { - /// User message role - User = 0, - /// Assistant message role - Assistant = 1, - /// System message role - System = 2, -} - -/// Message structure for agent interactions -/// -/// - role: Message role (User, Assistant, or System) -/// - content: Text content of the message -#[repr(C)] -pub struct MessageFFI { - pub role: MessageRole, - pub content: *const c_char, -} - -// Tool callbacks will be implemented in a future commit - -/// Result type for async operations -/// -/// - succeeded: true if the operation succeeded, false otherwise -/// - error_message: Error message if succeeded is false, NULL otherwise -#[repr(C)] -pub struct AsyncResult { - pub succeeded: bool, - pub error_message: *mut c_char, -} - -/// Free an async result structure -/// -/// This function frees the memory allocated for an AsyncResult structure, -/// including any error message it contains. -/// -/// # Safety -/// -/// The result pointer must be a valid pointer returned by a goose FFI function, -/// or NULL. -#[no_mangle] -pub unsafe extern "C" fn goose_free_async_result(result: *mut AsyncResult) { - if !result.is_null() { - let result = &mut *result; - if !result.error_message.is_null() { - let _ = CString::from_raw(result.error_message); - } - let _ = Box::from_raw(result); - } -} - -/// Create a new agent with the given provider configuration -/// -/// # Parameters -/// -/// - config: Provider configuration -/// -/// # Returns -/// -/// A new agent pointer, or a null pointer if creation failed -/// -/// # Safety -/// -/// The config pointer must be valid or NULL. The resulting agent must be freed -/// with goose_agent_free when no longer needed. -#[no_mangle] -pub unsafe extern "C" fn goose_agent_new(config: *const ProviderConfigFFI) -> AgentPtr { - // Check for null pointer - if config.is_null() { - eprintln!("Error: config pointer is null"); - return ptr::null_mut(); - } - - let config = &*config; - - // We currently only support Databricks provider - // This match ensures future compiler errors if new provider types are added without handling - match config.provider_type { - ProviderType::Databricks => (), // Databricks provider is supported - } - - // Get api_key from config or environment - let api_key = if !config.api_key.is_null() { - CStr::from_ptr(config.api_key).to_string_lossy().to_string() - } else { - match std::env::var("DATABRICKS_API_KEY") { - Ok(key) => key, - Err(_) => { - eprintln!("Error: api_key not provided and DATABRICKS_API_KEY environment variable not set"); - return ptr::null_mut(); - } - } - }; - - // Check and get required model_name (no env fallback for model) - if config.model_name.is_null() { - eprintln!("Error: model_name is required but was null"); - return ptr::null_mut(); - } - let model_name = CStr::from_ptr(config.model_name) - .to_string_lossy() - .to_string(); - - // Get host from config or environment - let host = if !config.host.is_null() { - CStr::from_ptr(config.host).to_string_lossy().to_string() - } else { - match std::env::var("DATABRICKS_HOST") { - Ok(url) => url, - Err(_) => { - eprintln!( - "Error: host not provided and DATABRICKS_HOST environment variable not set" - ); - return ptr::null_mut(); - } - } - }; - - // Create model config with model name - let model_config = ModelConfig::new(model_name); - - // Create Databricks provider with required parameters - match DatabricksProvider::from_params(host, api_key, model_config) { - Ok(provider) => { - let agent = Agent::new(); - get_runtime().block_on(async { - let _ = agent.update_provider(Arc::new(provider)).await; - }); - Box::into_raw(Box::new(agent)) - } - Err(e) => { - eprintln!("Error creating Databricks provider: {:?}", e); - ptr::null_mut() - } - } -} - -/// Free an agent -/// -/// This function frees the memory allocated for an agent. -/// -/// # Parameters -/// -/// - agent_ptr: Agent pointer returned by goose_agent_new -/// -/// # Safety -/// -/// The agent_ptr must be a valid pointer returned by goose_agent_new, -/// or have a null internal pointer. The agent_ptr must not be used after -/// calling this function. -#[no_mangle] -pub unsafe extern "C" fn goose_agent_free(agent_ptr: AgentPtr) { - if !agent_ptr.is_null() { - let _ = Box::from_raw(agent_ptr); - } -} - -/// Send a message to the agent and get the response -/// -/// This function sends a message to the agent and returns the response. -/// Tool handling is not yet supported and will be implemented in a future commit -/// so this may change significantly -/// -/// # Parameters -/// -/// - agent_ptr: Agent pointer -/// - message: Message to send -/// -/// # Returns -/// -/// A C string with the agent's response, or NULL on error. -/// This string must be freed with goose_free_string when no longer needed. -/// -/// # Safety -/// -/// The agent_ptr must be a valid pointer returned by goose_agent_new. -/// The message must be a valid C string. -#[no_mangle] -pub unsafe extern "C" fn goose_agent_send_message( - agent_ptr: AgentPtr, - message: *const c_char, -) -> *mut c_char { - if agent_ptr.is_null() || message.is_null() { - return ptr::null_mut(); - } - - let agent = &mut *agent_ptr; - let message = CStr::from_ptr(message).to_string_lossy().to_string(); - - let messages = vec![Message::user().with_text(&message)]; - - // Block on the async call using our global runtime - let response = get_runtime().block_on(async { - let mut stream = match agent.reply(&messages, None, None).await { - Ok(stream) => stream, - Err(e) => return format!("Error getting reply from agent: {}", e), - }; - - let mut full_response = String::new(); - - while let Some(message_result) = stream.next().await { - match message_result { - Ok(AgentEvent::Message(message)) => { - // Get text or serialize to JSON - // Note: Message doesn't have as_text method, we'll serialize to JSON - if let Ok(json) = serde_json::to_string(&message) { - full_response.push_str(&json); - } - } - Ok(AgentEvent::McpNotification(_)) => { - // TODO: Handle MCP notifications. - } - Ok(AgentEvent::ModelChange { .. }) => { - // Model change events are informational, just continue - } - - Err(e) => { - full_response.push_str(&format!("\nError in message stream: {}", e)); - } - } - } - full_response - }); - - string_to_c_char(&response) -} - -// Tool schema creation will be implemented in a future commit - -/// Free a string allocated by goose FFI functions -/// -/// This function frees memory allocated for strings returned by goose FFI functions. -/// -/// # Parameters -/// -/// - s: String to free -/// -/// # Safety -/// -/// The string must have been allocated by a goose FFI function, or be NULL. -/// The string must not be used after calling this function. -#[no_mangle] -pub unsafe extern "C" fn goose_free_string(s: *mut c_char) { - if !s.is_null() { - let _ = CString::from_raw(s); - } -} - -// Helper function to convert a Rust string to a C char pointer -fn string_to_c_char(s: &str) -> *mut c_char { - match CString::new(s) { - Ok(c_string) => c_string.into_raw(), - Err(_) => ptr::null_mut(), - } -} diff --git a/crates/goose-llm/Cargo.toml b/crates/goose-llm/Cargo.toml deleted file mode 100644 index 9f8c851f5419..000000000000 --- a/crates/goose-llm/Cargo.toml +++ /dev/null @@ -1,73 +0,0 @@ -[package] -name = "goose-llm" -edition.workspace = true -version.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -description.workspace = true - -[lints] -workspace = true - -[lib] -crate-type = ["lib", "cdylib"] -name = "goose_llm" - -[dependencies] -goose = { path = "../goose" } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -anyhow = "1.0" -thiserror = "1.0" -minijinja = "2.8.0" -include_dir = "0.7.4" -once_cell = "1.20.2" -chrono = { version = "0.4.38", features = ["serde"] } -reqwest = { version = "0.12.9", features = [ - "rustls-tls-native-roots", - "json", - "cookies", - "gzip", - "brotli", - "deflate", - "zstd", - "charset", - "http2", - "stream" - ], default-features = false } -async-trait = "0.1" -url = "2.5" -base64 = "0.21" -regex = "1.11.1" -tracing = "0.1" -smallvec = { version = "1.13", features = ["serde"] } -indoc = "1.0" -# https://github.com/mozilla/uniffi-rs/blob/c7f6caa3d1bf20f934346cefd8e82b5093f0dc6f/fixtures/futures/Cargo.toml#L22 -uniffi = { version = "0.29", features = ["tokio", "cli", "scaffolding-ffi-buffer-fns"] } -tokio = { version = "1.43", features = ["time", "sync"] } - -[dev-dependencies] -criterion = "0.5" -tempfile = "3.15.0" -dotenvy = "0.15.7" -lazy_static = "1.5" -ctor = "0.2.7" -tokio = { version = "1.43", features = ["full"] } - -[[bin]] -# https://mozilla.github.io/uniffi-rs/latest/tutorial/foreign_language_bindings.html -name = "uniffi-bindgen" -path = "uniffi-bindgen.rs" - -[[example]] -name = "simple" -path = "examples/simple.rs" - -[[example]] -name = "image" -path = "examples/image.rs" - -[[example]] -name = "prompt_override" -path = "examples/prompt_override.rs" diff --git a/crates/goose-llm/README.md b/crates/goose-llm/README.md deleted file mode 100644 index f32b61c3b6d1..000000000000 --- a/crates/goose-llm/README.md +++ /dev/null @@ -1,80 +0,0 @@ -## goose-llm - -This crate is meant to be used for foreign function interface (FFI). It's meant to be -stateless and contain logic related to providers and prompts: -- chat completion with model providers -- detecting read-only tools for smart approval -- methods for summarization / truncation - - -Run: -``` -cargo run -p goose-llm --example simple -``` - - -## Kotlin bindings - -Structure: -``` -. -└── crates - └── goose-llm/... -└── target - └── debug/libgoose_llm.dylib -├── bindings -│ └── kotlin -│ ├── example -│ │ └── Usage.kt ← your demo app -│ └── uniffi -│ └── goose_llm -│ └── goose_llm.kt ← auto-generated bindings -``` - - -#### Kotlin -> Rust: run example - -The following `just` command creates kotlin bindings, then compiles and runs an example. - -```bash -just kotlin-example -``` - -You will have to download jars in `bindings/kotlin/libs` directory (only the first time): -```bash -pushd bindings/kotlin/libs/ -curl -O https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.9.0/kotlin-stdlib-1.9.0.jar -curl -O https://repo1.maven.org/maven2/org/jetbrains/kotlinx/kotlinx-coroutines-core-jvm/1.7.3/kotlinx-coroutines-core-jvm-1.7.3.jar -curl -O https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.13.0/jna-5.13.0.jar -popd -``` - -To just create the Kotlin bindings (for MacOS): - -```bash -# run from project root directory -cargo build -p goose-llm - -cargo run --features=uniffi/cli --bin uniffi-bindgen generate --library ./target/debug/libgoose_llm.dylib --language kotlin --out-dir bindings/kotlin -``` - -Creating `libgoose_llm.so` for Linux distribution: - -Use `cross` to build for the specific target and then create the bindings: -``` -# x86-64 GNU/Linux (kGoose uses this) -rustup target add x86_64-unknown-linux-gnu -cross build --release --target x86_64-unknown-linux-gnu -p goose-llm - -# The goose_llm.kt bindings produced should be the same whether we use 'libgoose_llm.dylib' or 'libgoose_llm.so' -cross run --features=uniffi/cli --bin uniffi-bindgen generate --library ./target/x86_64-unknown-linux-gnu/release/libgoose_llm.so --language kotlin --out-dir bindings/kotlin -``` - - -#### Python -> Rust: generate bindings, run example - -```bash -cargo run --features=uniffi/cli --bin uniffi-bindgen generate --library ./target/debug/libgoose_llm.dylib --language python --out-dir bindings/python - -DYLD_LIBRARY_PATH=./target/debug python bindings/python/usage.py -``` diff --git a/crates/goose-llm/examples/image.rs b/crates/goose-llm/examples/image.rs deleted file mode 100644 index 7c607713e9cf..000000000000 --- a/crates/goose-llm/examples/image.rs +++ /dev/null @@ -1,53 +0,0 @@ -use anyhow::Result; -use base64::{engine::general_purpose::STANDARD as BASE64, Engine as _}; -use goose_llm::{ - completion, - message::MessageContent, - types::completion::{CompletionRequest, CompletionResponse}, - Message, ModelConfig, -}; -use serde_json::json; -use std::{fs, vec}; - -#[tokio::main] -async fn main() -> Result<()> { - let provider = "databricks"; - let provider_config = json!({ - "host": std::env::var("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"), - "token": std::env::var("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"), - }); - let model_name = "goose-claude-4-sonnet"; // "gpt-4o"; - let model_config = ModelConfig::new(model_name.to_string()); - - let system_preamble = "You are a helpful assistant."; - - // Read and encode test image - let image_data = fs::read("examples/test_assets/test_image.png")?; - let base64_image = BASE64.encode(image_data); - - let user_msg = Message::user() - .with_text("What do you see in this image?") - .with_content(MessageContent::image(base64_image, "image/png")); - - let messages = vec![user_msg]; - - let completion_response: CompletionResponse = completion( - CompletionRequest::new( - provider.to_string(), - provider_config.clone(), - model_config.clone(), - Some(system_preamble.to_string()), - None, - messages, - vec![], - ) - .with_request_id("test-image-1".to_string()), - ) - .await?; - - // Print the response - println!("\nCompletion Response:"); - println!("{}", serde_json::to_string_pretty(&completion_response)?); - - Ok(()) -} diff --git a/crates/goose-llm/examples/prompt_override.rs b/crates/goose-llm/examples/prompt_override.rs deleted file mode 100644 index 3cebffc5198b..000000000000 --- a/crates/goose-llm/examples/prompt_override.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::vec; - -use anyhow::Result; -use goose_llm::{ - completion, - types::completion::{CompletionRequest, CompletionResponse}, - Message, ModelConfig, -}; -use serde_json::json; - -#[tokio::main] -async fn main() -> Result<()> { - let provider = "databricks"; - let provider_config = json!({ - "host": std::env::var("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"), - "token": std::env::var("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"), - }); - // let model_name = "goose-gpt-4-1"; // parallel tool calls - let model_name = "claude-3-5-haiku"; - let model_config = ModelConfig::new(model_name.to_string()); - - let system_prompt_override = "You are a helpful assistant. Talk in the style of pirates."; - - for text in ["How was your day?"] { - println!("\n---------------\n"); - println!("User Input: {text}"); - let messages = vec![ - Message::user().with_text("Hi there!"), - Message::assistant().with_text("How can I help?"), - Message::user().with_text(text), - ]; - let completion_response: CompletionResponse = completion(CompletionRequest::new( - provider.to_string(), - provider_config.clone(), - model_config.clone(), - None, - Some(system_prompt_override.to_string()), - messages.clone(), - vec![], - )) - .await?; - // Print the response - println!("\nCompletion Response:"); - println!("{}", serde_json::to_string_pretty(&completion_response)?); - } - - Ok(()) -} diff --git a/crates/goose-llm/examples/simple.rs b/crates/goose-llm/examples/simple.rs deleted file mode 100644 index efab4b0abc57..000000000000 --- a/crates/goose-llm/examples/simple.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::vec; - -use anyhow::Result; -use goose_llm::{ - completion, - extractors::generate_tooltip, - types::completion::{ - CompletionRequest, CompletionResponse, ExtensionConfig, ToolApprovalMode, ToolConfig, - }, - Message, ModelConfig, -}; -use serde_json::json; - -#[tokio::main] -async fn main() -> Result<()> { - let provider = "databricks"; - let provider_config = json!({ - "host": std::env::var("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"), - "token": std::env::var("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"), - }); - // let model_name = "goose-gpt-4-1"; // parallel tool calls - let model_name = "claude-3-5-haiku"; - let model_config = ModelConfig::new(model_name.to_string()); - - let calculator_tool = ToolConfig::new( - "calculator", - "Perform basic arithmetic operations", - json!({ - "type": "object", - "required": ["operation", "numbers"], - "properties": { - "operation": { - "type": "string", - "enum": ["add", "subtract", "multiply", "divide"], - "description": "The arithmetic operation to perform", - }, - "numbers": { - "type": "array", - "items": {"type": "number"}, - "description": "List of numbers to operate on in order", - } - } - }), - ToolApprovalMode::Auto, - ); - - let bash_tool = ToolConfig::new( - "bash_shell", - "Run a shell command", - json!({ - "type": "object", - "required": ["command"], - "properties": { - "command": { - "type": "string", - "description": "The shell command to execute" - } - } - }), - ToolApprovalMode::Manual, - ); - - let list_dir_tool = ToolConfig::new( - "list_directory", - "List files in a directory", - json!({ - "type": "object", - "required": ["path"], - "properties": { - "path": { - "type": "string", - "description": "The directory path to list files from" - } - } - }), - ToolApprovalMode::Auto, - ); - - let extensions = vec![ - ExtensionConfig::new( - "calculator_extension".to_string(), - Some("This extension provides a calculator tool.".to_string()), - vec![calculator_tool], - ), - ExtensionConfig::new( - "bash_extension".to_string(), - Some("This extension provides a bash shell tool.".to_string()), - vec![bash_tool, list_dir_tool], - ), - ]; - - let system_preamble = "You are a helpful assistant."; - - for text in [ - "Add 10037 + 23123 using calculator and also run 'date -u' using bash", - "List all files in the current directory", - ] { - println!("\n---------------\n"); - println!("User Input: {text}"); - let messages = vec![ - Message::user().with_text("Hi there!"), - Message::assistant().with_text("How can I help?"), - Message::user().with_text(text), - ]; - let completion_response: CompletionResponse = completion(CompletionRequest::new( - provider.to_string(), - provider_config.clone(), - model_config.clone(), - Some(system_preamble.to_string()), - None, - messages.clone(), - extensions.clone(), - )) - .await?; - // Print the response - println!("\nCompletion Response:"); - println!("{}", serde_json::to_string_pretty(&completion_response)?); - - let tooltip = generate_tooltip(provider, provider_config.clone(), &messages, None).await?; - println!("\nTooltip: {}", tooltip); - } - - Ok(()) -} diff --git a/crates/goose-llm/examples/test_assets/test_image.png b/crates/goose-llm/examples/test_assets/test_image.png deleted file mode 100644 index f72b65986d19..000000000000 Binary files a/crates/goose-llm/examples/test_assets/test_image.png and /dev/null differ diff --git a/crates/goose-llm/src/completion.rs b/crates/goose-llm/src/completion.rs deleted file mode 100644 index 13f09810b8c4..000000000000 --- a/crates/goose-llm/src/completion.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::{collections::HashMap, time::Instant}; - -use anyhow::Result; -use chrono::Utc; -use serde_json::Value; - -use crate::{ - message::{Message, MessageContent}, - prompt_template, - providers::create, - types::{ - completion::{ - CompletionError, CompletionRequest, CompletionResponse, ExtensionConfig, - RuntimeMetrics, ToolApprovalMode, ToolConfig, - }, - core::ToolCall, - }, -}; - -#[uniffi::export] -pub fn print_messages(messages: Vec) { - for msg in messages { - println!("[{:?} @ {}] {:?}", msg.role, msg.created, msg.content); - } -} - -/// Public API for the Goose LLM completion function -#[uniffi::export(async_runtime = "tokio")] -pub async fn completion(req: CompletionRequest) -> Result { - let start_total = Instant::now(); - - let provider = create( - &req.provider_name, - req.provider_config.clone(), - req.model_config.clone(), - ) - .map_err(|_| CompletionError::UnknownProvider(req.provider_name.to_string()))?; - - let system_prompt = construct_system_prompt( - &req.system_preamble, - &req.system_prompt_override, - &req.extensions, - )?; - let tools = collect_prefixed_tools(&req.extensions); - - // Call the LLM provider - let start_provider = Instant::now(); - let mut response = provider - .complete( - &system_prompt, - &req.messages, - &tools, - req.request_id.as_deref(), - ) - .await?; - let provider_elapsed_sec = start_provider.elapsed().as_secs_f32(); - let usage_tokens = response.usage.total_tokens; - - let tool_configs = collect_prefixed_tool_configs(&req.extensions); - update_needs_approval_for_tool_calls(&mut response.message, &tool_configs)?; - - Ok(CompletionResponse::new( - response.message, - response.model, - response.usage, - calculate_runtime_metrics(start_total, provider_elapsed_sec, usage_tokens), - )) -} - -/// Render the global `system.md` template with the provided context. -fn construct_system_prompt( - preamble: &Option, - prompt_override: &Option, - extensions: &[ExtensionConfig], -) -> Result { - // If both system_preamble and system_prompt_override are provided, then prompt_override takes precedence - // and we don't render the template using preamble and extensions. Just return the prompt_override as is. - if prompt_override.is_some() { - return Ok(prompt_override.clone().unwrap()); - } - - let system_preamble = { - if let Some(p) = preamble { - p - } else { - "You are a helpful assistant." - } - }; - - let mut context: HashMap<&str, Value> = HashMap::new(); - context.insert("system_preamble", Value::String(system_preamble.to_owned())); - context.insert("extensions", serde_json::to_value(extensions)?); - context.insert( - "current_date", - Value::String(Utc::now().format("%Y-%m-%d").to_string()), - ); - - Ok(prompt_template::render_global_file("system.md", &context)?) -} - -/// Determine if a tool call requires manual approval. -fn determine_needs_approval(config: &ToolConfig, _call: &ToolCall) -> bool { - match config.approval_mode { - ToolApprovalMode::Auto => false, - ToolApprovalMode::Manual => true, - ToolApprovalMode::Smart => { - // TODO: Implement smart approval logic later - true - } - } -} - -/// Set `needs_approval` on every tool call in the message. -/// Returns a `ToolNotFound` error if the corresponding `ToolConfig` is missing. -pub fn update_needs_approval_for_tool_calls( - message: &mut Message, - tool_configs: &HashMap, -) -> Result<(), CompletionError> { - for content in &mut message.content.iter_mut() { - if let MessageContent::ToolReq(req) = content { - if let Ok(call) = &mut req.tool_call.0 { - // Provide a clear error message when the tool config is missing - let config = tool_configs.get(&call.name).ok_or_else(|| { - CompletionError::ToolNotFound(format!( - "could not find tool config for '{}'", - call.name - )) - })?; - let needs_approval = determine_needs_approval(config, call); - call.set_needs_approval(needs_approval); - } - } - } - Ok(()) -} - -/// Collect all `Tool` instances from the extensions. -fn collect_prefixed_tools(extensions: &[ExtensionConfig]) -> Vec { - extensions - .iter() - .flat_map(|ext| ext.get_prefixed_tools()) - .collect() -} - -/// Collect all `ToolConfig` entries from the extensions into a map. -fn collect_prefixed_tool_configs(extensions: &[ExtensionConfig]) -> HashMap { - extensions - .iter() - .flat_map(|ext| ext.get_prefixed_tool_configs()) - .collect() -} - -/// Compute runtime metrics for the request. -fn calculate_runtime_metrics( - total_start: Instant, - provider_elapsed_sec: f32, - token_count: Option, -) -> RuntimeMetrics { - let total_ms = total_start.elapsed().as_secs_f32(); - let tokens_per_sec = token_count.and_then(|toks| { - if provider_elapsed_sec > 0.0 { - Some(toks as f64 / (provider_elapsed_sec as f64)) - } else { - None - } - }); - RuntimeMetrics::new(total_ms, provider_elapsed_sec, tokens_per_sec) -} diff --git a/crates/goose-llm/src/extractors/mod.rs b/crates/goose-llm/src/extractors/mod.rs deleted file mode 100644 index 6b5e3be5f21a..000000000000 --- a/crates/goose-llm/src/extractors/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod session_name; -mod tooltip; - -pub use session_name::generate_session_name; -pub use tooltip::generate_tooltip; diff --git a/crates/goose-llm/src/extractors/session_name.rs b/crates/goose-llm/src/extractors/session_name.rs deleted file mode 100644 index 0f35a7822097..000000000000 --- a/crates/goose-llm/src/extractors/session_name.rs +++ /dev/null @@ -1,110 +0,0 @@ -use crate::generate_structured_outputs; -use crate::providers::errors::ProviderError; -use crate::types::core::Role; -use crate::{message::Message, types::json_value_ffi::JsonValueFfi}; -use anyhow::Result; -use goose::utils::safe_truncate; -use indoc::indoc; -use serde_json::{json, Value}; - -const SESSION_NAME_EXAMPLES: &[&str] = &[ - "Research Synthesis", - "Sentiment Analysis", - "Performance Report", - "Feedback Collector", - "Accessibility Check", - "Design Reminder", - "Project Reminder", - "Launch Checklist", - "Metrics Monitor", - "Incident Response", - "Deploy Cabinet App", - "Design Reminder Alert", - "Generate Monthly Expense Report", - "Automate Incident Response Workflow", - "Analyze Brand Sentiment Trends", - "Monitor Device Health Issues", - "Collect UI Feedback Summary", - "Schedule Project Deadline Reminders", -]; - -fn build_system_prompt() -> String { - let examples = SESSION_NAME_EXAMPLES - .iter() - .map(|e| format!("- {}", e)) - .collect::>() - .join("\n"); - - indoc! {r#" - You are an assistant that crafts a concise session title. - Given the first couple user messages in the conversation so far, - reply with only a short name (up to 4 words) that best describes - this session's goal. - - Examples: - "#} - .to_string() - + &examples -} - -/// Generates a short (≤4 words) session name -#[uniffi::export(async_runtime = "tokio", default(request_id = None))] -pub async fn generate_session_name( - provider_name: &str, - provider_config: JsonValueFfi, - messages: &[Message], - request_id: Option, -) -> Result { - // Collect up to the first 3 user messages (truncated to 300 chars each) - let context: Vec = messages - .iter() - .filter(|m| m.role == Role::User) - .take(3) - .map(|m| { - let text = m.content.concat_text_str(); - safe_truncate(&text, 300) - }) - .collect(); - - if context.is_empty() { - return Err(ProviderError::ExecutionError( - "No user messages found to generate a session name.".to_string(), - )); - } - - let system_prompt = build_system_prompt(); - let user_msg_text = format!("Here are the user messages:\n{}", context.join("\n")); - - // Use `extract` with a simple string schema - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "required": ["name"], - "additionalProperties": false - }); - - let resp = generate_structured_outputs( - provider_name, - provider_config, - &system_prompt, - &[Message::user().with_text(&user_msg_text)], - schema, - request_id, - ) - .await?; - - let obj = resp - .data - .as_object() - .ok_or_else(|| ProviderError::ResponseParseError("Expected object".into()))?; - - let name = obj - .get("name") - .and_then(Value::as_str) - .ok_or_else(|| ProviderError::ResponseParseError("Missing or non-string name".into()))? - .to_string(); - - Ok(name) -} diff --git a/crates/goose-llm/src/extractors/tooltip.rs b/crates/goose-llm/src/extractors/tooltip.rs deleted file mode 100644 index 48336a546ea6..000000000000 --- a/crates/goose-llm/src/extractors/tooltip.rs +++ /dev/null @@ -1,171 +0,0 @@ -use crate::generate_structured_outputs; -use crate::message::{Message, MessageContent}; -use crate::providers::errors::ProviderError; -use crate::types::core::{Content, Role}; -use crate::types::json_value_ffi::JsonValueFfi; -use anyhow::Result; -use indoc::indoc; -use serde_json::{json, Value}; - -const TOOLTIP_EXAMPLES: &[&str] = &[ - "analyzing KPIs", - "detecting anomalies", - "building artifacts in Buildkite", - "categorizing issues", - "checking dependencies", - "collecting feedback", - "deploying changes in AWS", - "drafting report in Google Docs", - "extracting action items", - "generating insights", - "logging issues", - "monitoring tickets in Zendesk", - "notifying design team", - "running integration tests", - "scanning threads in Figma", - "sending reminders in Gmail", - "sending surveys", - "sharing with stakeholders", - "summarizing findings", - "transcribing meeting", - "tracking resolution", - "updating status in Linear", -]; - -fn build_system_prompt() -> String { - let examples = TOOLTIP_EXAMPLES - .iter() - .map(|e| format!("- {}", e)) - .collect::>() - .join("\n"); - - indoc! {r#" - You are an assistant that summarizes the recent conversation into a tooltip. - Given the last two messages, reply with only a short tooltip (up to 4 words) - describing what is happening now. - - Examples: - "#} - .to_string() - + &examples -} - -/// Generates a tooltip summarizing the last two messages in the session, -/// including any tool calls or results. -#[uniffi::export(async_runtime = "tokio", default(request_id = None))] -pub async fn generate_tooltip( - provider_name: &str, - provider_config: JsonValueFfi, - messages: &[Message], - request_id: Option, -) -> Result { - // Need at least two messages to generate a tooltip - if messages.len() < 2 { - return Err(ProviderError::ExecutionError( - "Need at least two messages to generate a tooltip".to_string(), - )); - } - - // Helper to render a single message's content - fn render_message(m: &Message) -> String { - let mut parts = Vec::new(); - for content in m.content.iter() { - match content { - MessageContent::Text(text_block) => { - let txt = text_block.text.trim(); - if !txt.is_empty() { - parts.push(txt.to_string()); - } - } - MessageContent::ToolReq(req) => { - if let Ok(tool_call) = &req.tool_call.0 { - parts.push(format!( - "called tool '{}' with args {}", - tool_call.name, tool_call.arguments - )); - } else if let Err(e) = &req.tool_call.0 { - parts.push(format!("tool request error: {}", e)); - } - } - MessageContent::ToolResp(resp) => match &resp.tool_result.0 { - Ok(contents) => { - let results: Vec = contents - .iter() - .map(|c| match c { - Content::Text(t) => t.text.clone(), - Content::Image(_) => "[image]".to_string(), - }) - .collect(); - parts.push(format!("tool responded with: {}", results.join(" "))); - } - Err(e) => { - parts.push(format!("tool error: {}", e)); - } - }, - _ => {} // ignore other variants - } - } - - let role = match m.role { - Role::User => "User", - Role::Assistant => "Assistant", - }; - - format!("{}: {}", role, parts.join("; ")) - } - - // Take the last two messages (in correct chronological order) - let rendered: Vec = messages - .iter() - .rev() - .take(2) - .map(render_message) - .collect::>() - .into_iter() - .rev() - .collect(); - - let system_prompt = build_system_prompt(); - - let user_msg_text = format!( - "Here are the last two messages:\n{}\n\nTooltip:", - rendered.join("\n") - ); - - // Schema wrapping our tooltip string - let schema = json!({ - "type": "object", - "properties": { - "tooltip": { "type": "string" } - }, - "required": ["tooltip"], - "additionalProperties": false - }); - - // Get the structured outputs - let resp = generate_structured_outputs( - provider_name, - provider_config, - &system_prompt, - &[Message::user().with_text(&user_msg_text)], - schema, - request_id, - ) - .await?; - - // Pull out the tooltip field - let obj = resp - .data - .as_object() - .ok_or_else(|| ProviderError::ResponseParseError("Expected JSON object".into()))?; - - let tooltip = obj - .get("tooltip") - .and_then(Value::as_str) - .ok_or_else(|| { - ProviderError::ResponseParseError("Missing or non-string `tooltip` field".into()) - })? - .to_string(); - - Ok(tooltip) -} diff --git a/crates/goose-llm/src/lib.rs b/crates/goose-llm/src/lib.rs deleted file mode 100644 index cd698356bcef..000000000000 --- a/crates/goose-llm/src/lib.rs +++ /dev/null @@ -1,15 +0,0 @@ -uniffi::setup_scaffolding!(); - -mod completion; -pub mod extractors; -pub mod message; -mod model; -mod prompt_template; -pub mod providers; -mod structured_outputs; -pub mod types; - -pub use completion::completion; -pub use message::Message; -pub use model::ModelConfig; -pub use structured_outputs::generate_structured_outputs; diff --git a/crates/goose-llm/src/message/contents.rs b/crates/goose-llm/src/message/contents.rs deleted file mode 100644 index 9c8f459f3c1c..000000000000 --- a/crates/goose-llm/src/message/contents.rs +++ /dev/null @@ -1,186 +0,0 @@ -use std::{iter::FromIterator, ops::Deref}; - -use crate::message::MessageContent; -use serde::{Deserialize, Serialize}; -use smallvec::SmallVec; - -/// Holds the heterogeneous fragments that make up one chat message. -/// -/// * Up to two items are stored inline on the stack. -/// * Falls back to a heap allocation only when necessary. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] -#[serde(transparent)] -pub struct Contents(SmallVec<[MessageContent; 2]>); - -impl Contents { - /*---------------------------------------------------------- - * 1-line ergonomic helpers - *---------------------------------------------------------*/ - - pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, MessageContent> { - self.0.iter_mut() - } - - pub fn push(&mut self, item: impl Into) { - self.0.push(item.into()); - } - - pub fn texts(&self) -> impl Iterator { - self.0.iter().filter_map(|c| c.as_text()) - } - - pub fn concat_text_str(&self) -> String { - self.texts().collect::>().join("\n") - } - - /// Returns `true` if *any* item satisfies the predicate. - pub fn any_is

(&self, pred: P) -> bool - where - P: FnMut(&MessageContent) -> bool, - { - self.iter().any(pred) - } - - /// Returns `true` if *every* item satisfies the predicate. - pub fn all_are

(&self, pred: P) -> bool - where - P: FnMut(&MessageContent) -> bool, - { - self.iter().all(pred) - } -} - -impl From> for Contents { - fn from(v: Vec) -> Self { - Contents(SmallVec::from_vec(v)) - } -} - -impl FromIterator for Contents { - fn from_iter>(iter: I) -> Self { - Contents(SmallVec::from_iter(iter)) - } -} - -/*-------------------------------------------------------------- - * Allow &message.content to behave like a slice of fragments. - *-------------------------------------------------------------*/ -impl Deref for Contents { - type Target = [MessageContent]; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -// — Register the contents type with UniFFI, converting to/from Vec — -// We need to do this because UniFFI’s FFI layer supports only primitive buffers (here Vec), -uniffi::custom_type!(Contents, Vec, { - lower: |contents: &Contents| { - contents.0.to_vec() - }, - try_lift: |contents: Vec| { - Ok(Contents::from(contents)) - }, -}); - -#[cfg(test)] -mod tests { - use super::*; - use crate::types::core::{Content, TextContent, ToolCall, ToolError}; - use serde_json::json; - - // ------------------------------------------------------------ - // Helpers - // ------------------------------------------------------------ - fn make_tool_req_ok(id: &str) -> MessageContent { - let call = ToolCall::new("echo", json!({"text": "hi"})); - MessageContent::tool_request(id, Ok(call).into()) - } - - fn make_tool_resp_ok(id: &str) -> MessageContent { - let body = vec![Content::Text(TextContent { - text: "done".into(), - })]; - MessageContent::tool_response(id, Ok(body).into()) - } - - fn make_tool_req_err(id: &str) -> MessageContent { - let err = ToolError::NotFound(format!( - "The provided function name '{}' had invalid characters", - "bad$name" - )); - MessageContent::tool_request(id, Err(err).into()) - } - - fn make_tool_resp_err(id: &str) -> MessageContent { - let err = ToolError::InvalidParameters("Could not interpret tool use parameters".into()); - MessageContent::tool_response(id, Err(err).into()) - } - - // ------------------------------------------------------------ - // Round-trip: success - // ------------------------------------------------------------ - #[test] - fn contents_roundtrip_ok() { - let items: Contents = vec![make_tool_req_ok("req-1"), make_tool_resp_ok("resp-1")].into(); - - // ---- serialise - let json_str = serde_json::to_string(&items).expect("serialise OK"); - println!("JSON: {:?}", json_str); - - assert!( - json_str.contains(r#""type":"toolReq""#) - && json_str.contains(r#""type":"toolResp""#) - && json_str.contains(r#""status":"success""#), - "JSON should contain both variants and success-status" - ); - - // ---- deserialise - let parsed: Contents = serde_json::from_str(&json_str).expect("deserialise OK"); - - assert_eq!(parsed, items, "full round-trip equality"); - } - - // ------------------------------------------------------------ - // Round-trip: error (all variants collapse to ExecutionError) - // ------------------------------------------------------------ - #[test] - fn contents_roundtrip_err() { - let original_items: Contents = - vec![make_tool_req_err("req-e"), make_tool_resp_err("resp-e")].into(); - - // ---- serialise - let json_str = serde_json::to_string(&original_items).expect("serialise OK"); - println!("JSON: {:?}", json_str); - - assert!(json_str.contains(r#""status":"error""#)); - - // ---- deserialise - let parsed: Contents = serde_json::from_str(&json_str).expect("deserialise OK"); - - // ─── validate structure ─────────────────────────────────── - assert_eq!(parsed.len(), 2); - - // ToolReq error - match &parsed[0] { - MessageContent::ToolReq(req) => match &*req.tool_call { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("invalid characters")) - } - other => panic!("expected ExecutionError, got {:?}", other), - }, - other => panic!("expected ToolReq, got {:?}", other), - } - - // ToolResp error - match &parsed[1] { - MessageContent::ToolResp(resp) => match &*resp.tool_result { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("interpret tool use parameters")) - } - other => panic!("expected ExecutionError, got {:?}", other), - }, - other => panic!("expected ToolResp, got {:?}", other), - } - } -} diff --git a/crates/goose-llm/src/message/message_content.rs b/crates/goose-llm/src/message/message_content.rs deleted file mode 100644 index 75daa3a825e7..000000000000 --- a/crates/goose-llm/src/message/message_content.rs +++ /dev/null @@ -1,465 +0,0 @@ -use serde::{Deserialize, Serialize}; -use serde_json::{self, Deserializer, Serializer}; - -use crate::message::tool_result_serde; -use crate::types::core::{Content, ImageContent, TextContent, ToolCall, ToolResult}; - -// — Newtype wrappers (local structs) so we satisfy Rust’s orphan rules — -// We need these because we can’t implement UniFFI’s FfiConverter directly on a type alias. - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ToolRequestToolCall(#[serde(with = "tool_result_serde")] pub ToolResult); - -impl ToolRequestToolCall { - pub fn as_result(&self) -> &ToolResult { - &self.0 - } -} -impl std::ops::Deref for ToolRequestToolCall { - type Target = ToolResult; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl From> for ToolRequestToolCall { - fn from(res: Result) -> Self { - ToolRequestToolCall(res) - } -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ToolResponseToolResult( - #[serde(with = "tool_result_serde")] pub ToolResult>, -); - -impl ToolResponseToolResult { - pub fn as_result(&self) -> &ToolResult> { - &self.0 - } -} -impl std::ops::Deref for ToolResponseToolResult { - type Target = ToolResult>; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl From, crate::types::core::ToolError>> for ToolResponseToolResult { - fn from(res: Result, crate::types::core::ToolError>) -> Self { - ToolResponseToolResult(res) - } -} - -// — Register the newtypes with UniFFI, converting via JSON strings — -// UniFFI’s FFI layer supports only primitive buffers (here String), so we JSON-serialize -// through our `tool_result_serde` to preserve the same success/error schema on both sides. -// see https://github.com/mozilla/uniffi-rs/issues/2533 - -uniffi::custom_type!(ToolRequestToolCall, String, { - lower: |wrapper: &ToolRequestToolCall| { - let mut buf = Vec::new(); - { - let mut ser = Serializer::new(&mut buf); - // note the borrow on wrapper.0 - tool_result_serde::serialize(&wrapper.0, &mut ser) - .expect("ToolRequestToolCall serialization failed"); - } - String::from_utf8(buf).expect("ToolRequestToolCall produced invalid UTF-8") - }, - try_lift: |s: String| { - let mut de = Deserializer::from_str(&s); - let result = tool_result_serde::deserialize(&mut de) - .map_err(anyhow::Error::new)?; - Ok(ToolRequestToolCall(result)) - }, -}); - -uniffi::custom_type!(ToolResponseToolResult, String, { - lower: |wrapper: &ToolResponseToolResult| { - let mut buf = Vec::new(); - { - let mut ser = Serializer::new(&mut buf); - // note the borrow on wrapper.0 - tool_result_serde::serialize(&wrapper.0, &mut ser) - .expect("ToolResponseToolResult serialization failed"); - } - String::from_utf8(buf).expect("ToolResponseToolResult produced invalid UTF-8") - }, - try_lift: |s: String| { - let mut de = Deserializer::from_str(&s); - let result = tool_result_serde::deserialize(&mut de) - .map_err(anyhow::Error::new)?; - Ok(ToolResponseToolResult(result)) - }, -}); - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -#[serde(rename_all = "camelCase")] -pub struct ToolRequest { - pub id: String, - pub tool_call: ToolRequestToolCall, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -#[serde(rename_all = "camelCase")] -pub struct ToolResponse { - pub id: String, - pub tool_result: ToolResponseToolResult, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -pub struct ThinkingContent { - pub thinking: String, - pub signature: String, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -pub struct RedactedThinkingContent { - pub data: String, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Enum)] -/// Content passed inside a message, which can be both simple content and tool content -#[serde(tag = "type", rename_all = "camelCase")] -pub enum MessageContent { - Text(TextContent), - Image(ImageContent), - ToolReq(ToolRequest), - ToolResp(ToolResponse), - Thinking(ThinkingContent), - RedactedThinking(RedactedThinkingContent), -} - -impl MessageContent { - pub fn text>(text: S) -> Self { - MessageContent::Text(TextContent { text: text.into() }) - } - - pub fn image, T: Into>(data: S, mime_type: T) -> Self { - MessageContent::Image(ImageContent { - data: data.into(), - mime_type: mime_type.into(), - }) - } - - pub fn tool_request>(id: S, tool_call: ToolRequestToolCall) -> Self { - MessageContent::ToolReq(ToolRequest { - id: id.into(), - tool_call, - }) - } - - pub fn tool_response>(id: S, tool_result: ToolResponseToolResult) -> Self { - MessageContent::ToolResp(ToolResponse { - id: id.into(), - tool_result, - }) - } - - pub fn thinking, S2: Into>(thinking: S1, signature: S2) -> Self { - MessageContent::Thinking(ThinkingContent { - thinking: thinking.into(), - signature: signature.into(), - }) - } - - pub fn redacted_thinking>(data: S) -> Self { - MessageContent::RedactedThinking(RedactedThinkingContent { data: data.into() }) - } - - pub fn as_tool_request(&self) -> Option<&ToolRequest> { - if let MessageContent::ToolReq(ref tool_request) = self { - Some(tool_request) - } else { - None - } - } - - pub fn as_tool_response(&self) -> Option<&ToolResponse> { - if let MessageContent::ToolResp(ref tool_response) = self { - Some(tool_response) - } else { - None - } - } - - pub fn as_tool_response_text(&self) -> Option { - if let Some(tool_response) = self.as_tool_response() { - if let Ok(contents) = &tool_response.tool_result.0 { - let texts: Vec = contents - .iter() - .filter_map(|content| content.as_text().map(String::from)) - .collect(); - if !texts.is_empty() { - return Some(texts.join("\n")); - } - } - } - None - } - - pub fn as_tool_request_id(&self) -> Option<&str> { - if let Self::ToolReq(r) = self { - Some(&r.id) - } else { - None - } - } - - pub fn as_tool_response_id(&self) -> Option<&str> { - if let Self::ToolResp(r) = self { - Some(&r.id) - } else { - None - } - } - - /// Get the text content if this is a TextContent variant - pub fn as_text(&self) -> Option<&str> { - match self { - MessageContent::Text(text) => Some(&text.text), - _ => None, - } - } - - /// Get the thinking content if this is a ThinkingContent variant - pub fn as_thinking(&self) -> Option<&ThinkingContent> { - match self { - MessageContent::Thinking(thinking) => Some(thinking), - _ => None, - } - } - - /// Get the redacted thinking content if this is a RedactedThinkingContent variant - pub fn as_redacted_thinking(&self) -> Option<&RedactedThinkingContent> { - match self { - MessageContent::RedactedThinking(redacted) => Some(redacted), - _ => None, - } - } - - pub fn is_text(&self) -> bool { - matches!(self, Self::Text(_)) - } - pub fn is_image(&self) -> bool { - matches!(self, Self::Image(_)) - } - pub fn is_tool_request(&self) -> bool { - matches!(self, Self::ToolReq(_)) - } - pub fn is_tool_response(&self) -> bool { - matches!(self, Self::ToolResp(_)) - } -} - -impl From for MessageContent { - fn from(content: Content) -> Self { - match content { - Content::Text(text) => MessageContent::Text(text), - Content::Image(image) => MessageContent::Image(image), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::types::core::{ToolCall, ToolError}; - use crate::UniFfiTag; - use serde_json::json; - use uniffi::{FfiConverter, RustBuffer}; - - // ---------- ToolRequestToolCall ---------------------------------------------------------- - - #[test] - fn tool_request_tool_call_roundtrip_ok() { - // Build a valid ToolCall - let call = ToolCall::new("my_function", json!({"a": 1, "b": 2})); - - // Wrap it in the new-type - let wrapper = ToolRequestToolCall::from(Ok(call.clone())); - - // Serialize → JSON - let json_str = serde_json::to_string(&wrapper).expect("serialize OK"); - assert!( - json_str.contains(r#""status":"success""#), - "must mark success" - ); - - // Deserialize ← JSON - let parsed: ToolRequestToolCall = serde_json::from_str(&json_str).expect("deserialize OK"); - - // Round-trip equality - assert_eq!(*parsed, Ok(call)); - } - - #[test] - fn tool_request_tool_call_roundtrip_err() { - // Typical failure variant that could come from `is_valid_function_name` - let err = ToolError::NotFound( - "The provided function name 'bad$name' had invalid characters".into(), - ); - - let wrapper = ToolRequestToolCall::from(Err(err.clone())); - - let json_str = serde_json::to_string(&wrapper).expect("serialize OK"); - assert!( - json_str.contains(r#""status":"error""#) && json_str.contains("invalid characters"), - "must mark error and carry message" - ); - - let parsed: ToolRequestToolCall = serde_json::from_str(&json_str).expect("deserialize OK"); - - match &*parsed { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("invalid characters")) - } - other => panic!("expected ExecutionError, got {:?}", other), - } - } - - // ---------- ToolResponseToolResult ------------------------------------------------------- - - #[test] - fn tool_response_tool_result_roundtrip_ok() { - // Minimal content vector (one text item) - let content_vec = vec![Content::Text(TextContent { - text: "hello".into(), - })]; - - let wrapper = ToolResponseToolResult::from(Ok(content_vec.clone())); - - let json_str = serde_json::to_string(&wrapper).expect("serialize OK"); - assert!(json_str.contains(r#""status":"success""#)); - - let parsed: ToolResponseToolResult = - serde_json::from_str(&json_str).expect("deserialize OK"); - - assert_eq!(*parsed, Ok(content_vec)); - } - - #[test] - fn tool_response_tool_result_roundtrip_err() { - let err = ToolError::InvalidParameters("Could not interpret tool use parameters".into()); - - let wrapper = ToolResponseToolResult::from(Err(err.clone())); - - let json_str = serde_json::to_string(&wrapper).expect("serialize OK"); - assert!(json_str.contains(r#""status":"error""#)); - - let parsed: ToolResponseToolResult = - serde_json::from_str(&json_str).expect("deserialize OK"); - - match &*parsed { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("interpret tool use")) - } - other => panic!("expected ExecutionError, got {:?}", other), - } - } - - // ---------- FFI (lower / lift) round-trips ---------------------------------------------- - // https://mozilla.github.io/uniffi-rs/latest/internals/lifting_and_lowering.html - - #[test] - fn ffi_roundtrip_tool_request_ok_and_err() { - // ---------- status: success ---------- - let ok_call = ToolCall::new("echo", json!({"text": "hi"})); - let ok_wrapper = ToolRequestToolCall::from(Ok(ok_call.clone())); - - // First lower → inspect JSON - let buf1: RustBuffer = - >::lower(ok_wrapper.clone()); - - let json_ok: String = - >::try_lift(buf1).expect("lift String OK"); - println!("ToolReq - Lowered JSON (status: success): {:?}", json_ok); - assert!(json_ok.contains(r#""status":"success""#)); - - // Second lower → round-trip wrapper - let buf2: RustBuffer = - >::lower(ok_wrapper.clone()); - - let lifted_ok = >::try_lift(buf2) - .expect("lift wrapper OK"); - println!( - "ToolReq - Lifted wrapper (status: success): {:?}", - lifted_ok - ); - assert_eq!(lifted_ok, ok_wrapper); - - // ---------- status: error ---------- - let err_call = ToolError::NotFound("no such function".into()); - let err_wrapper = ToolRequestToolCall::from(Err(err_call.clone())); - - let buf1: RustBuffer = - >::lower(err_wrapper.clone()); - let json_err: String = - >::try_lift(buf1).expect("lift String ERR"); - println!("ToolReq - Lowered JSON (status: error): {:?}", json_err); - assert!(json_err.contains(r#""status":"error""#)); - - let buf2: RustBuffer = - >::lower(err_wrapper.clone()); - let lifted_err = >::try_lift(buf2) - .expect("lift wrapper ERR"); - println!("ToolReq - Lifted wrapper (status: error): {:?}", lifted_err); - - match &*lifted_err { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("no such function")) - } - other => panic!("expected ExecutionError, got {:?}", other), - } - } - - #[test] - fn ffi_roundtrip_tool_response_ok_and_err() { - // ---------- status: success ---------- - let body = vec![Content::Text(TextContent { - text: "done".into(), - })]; - let ok_wrapper = ToolResponseToolResult::from(Ok(body.clone())); - - let buf1: RustBuffer = - >::lower(ok_wrapper.clone()); - let json_ok: String = >::try_lift(buf1).unwrap(); - println!("ToolResp - Lowered JSON (status: success): {:?}", json_ok); - assert!(json_ok.contains(r#""status":"success""#)); - - let buf2: RustBuffer = - >::lower(ok_wrapper.clone()); - let lifted_ok = - >::try_lift(buf2).unwrap(); - println!( - "ToolResp - Lifted wrapper (status: success): {:?}", - lifted_ok - ); - assert_eq!(lifted_ok, ok_wrapper); - - // ---------- status: error ---------- - let err_call = ToolError::InvalidParameters("bad params".into()); - let err_wrapper = ToolResponseToolResult::from(Err(err_call.clone())); - - let buf1: RustBuffer = - >::lower(err_wrapper.clone()); - let json_err: String = >::try_lift(buf1).unwrap(); - println!("ToolResp - Lowered JSON (status: error): {:?}", json_err); - assert!(json_err.contains(r#""status":"error""#)); - - let buf2: RustBuffer = - >::lower(err_wrapper.clone()); - let lifted_err = - >::try_lift(buf2).unwrap(); - println!( - "ToolResp - Lifted wrapper (status: error): {:?}", - lifted_err - ); - - match &*lifted_err { - Err(ToolError::ExecutionError(msg)) => { - assert!(msg.contains("bad params")) - } - other => panic!("expected ExecutionError, got {:?}", other), - } - } -} diff --git a/crates/goose-llm/src/message/mod.rs b/crates/goose-llm/src/message/mod.rs deleted file mode 100644 index ac2aaf278f6f..000000000000 --- a/crates/goose-llm/src/message/mod.rs +++ /dev/null @@ -1,284 +0,0 @@ -//! Messages which represent the content sent back and forth to LLM provider -//! -//! We use these messages in the agent code, and interfaces which interact with -//! the agent. That let's us reuse message histories across different interfaces. -//! -//! The content of the messages uses MCP types to avoid additional conversions -//! when interacting with MCP servers. - -mod contents; -mod message_content; -mod tool_result_serde; - -pub use contents::Contents; -pub use message_content::{ - MessageContent, RedactedThinkingContent, ThinkingContent, ToolRequest, ToolRequestToolCall, - ToolResponse, ToolResponseToolResult, -}; - -use chrono::Utc; -use serde::{Deserialize, Serialize}; -use std::collections::HashSet; - -use crate::types::core::Role; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -/// A message to or from an LLM -#[serde(rename_all = "camelCase")] -pub struct Message { - pub role: Role, - pub created: i64, - pub content: Contents, -} - -impl Message { - pub fn new(role: Role) -> Self { - Self { - role, - created: Utc::now().timestamp_millis(), - content: Contents::default(), - } - } - - /// Create a new user message with the current timestamp - pub fn user() -> Self { - Self::new(Role::User) - } - - /// Create a new assistant message with the current timestamp - pub fn assistant() -> Self { - Self::new(Role::Assistant) - } - - /// Add any item that implements Into to the message - pub fn with_content(mut self, item: impl Into) -> Self { - self.content.push(item); - self - } - - /// Add text content to the message - pub fn with_text>(self, text: S) -> Self { - self.with_content(MessageContent::text(text)) - } - - /// Add image content to the message - pub fn with_image, T: Into>(self, data: S, mime_type: T) -> Self { - self.with_content(MessageContent::image(data, mime_type)) - } - - /// Add a tool request to the message - pub fn with_tool_request, T: Into>( - self, - id: S, - tool_call: T, - ) -> Self { - self.with_content(MessageContent::tool_request(id, tool_call.into())) - } - - /// Add a tool response to the message - pub fn with_tool_response>( - self, - id: S, - result: ToolResponseToolResult, - ) -> Self { - self.with_content(MessageContent::tool_response(id, result)) - } - - /// Add thinking content to the message - pub fn with_thinking, S2: Into>( - self, - thinking: S1, - signature: S2, - ) -> Self { - self.with_content(MessageContent::thinking(thinking, signature)) - } - - /// Add redacted thinking content to the message - pub fn with_redacted_thinking>(self, data: S) -> Self { - self.with_content(MessageContent::redacted_thinking(data)) - } - - /// Check if the message is a tool call - pub fn contains_tool_call(&self) -> bool { - self.content.any_is(MessageContent::is_tool_request) - } - - /// Check if the message is a tool response - pub fn contains_tool_response(&self) -> bool { - self.content.any_is(MessageContent::is_tool_response) - } - - /// Check if the message contains only text content - pub fn has_only_text_content(&self) -> bool { - self.content.all_are(MessageContent::is_text) - } - - /// Retrieves all tool `id` from ToolRequest messages - pub fn tool_request_ids(&self) -> HashSet<&str> { - self.content - .iter() - .filter_map(MessageContent::as_tool_request_id) - .collect() - } - - /// Retrieves all tool `id` from ToolResponse messages - pub fn tool_response_ids(&self) -> HashSet<&str> { - self.content - .iter() - .filter_map(MessageContent::as_tool_response_id) - .collect() - } - - /// Retrieves all tool `id` from the message - pub fn tool_ids(&self) -> HashSet<&str> { - self.tool_request_ids() - .into_iter() - .chain(self.tool_response_ids()) - .collect() - } -} - -#[cfg(test)] -mod tests { - use serde_json::{json, Value}; - - use super::*; - use crate::types::core::{ToolCall, ToolError}; - - #[test] - fn test_message_serialization() { - let message = Message::assistant() - .with_text("Hello, I'll help you with that.") - .with_tool_request( - "tool123", - Ok(ToolCall::new("test_tool", json!({"param": "value"}))), - ); - - let json_str = serde_json::to_string_pretty(&message).unwrap(); - println!("Serialized message: {}", json_str); - - // Parse back to Value to check structure - let value: Value = serde_json::from_str(&json_str).unwrap(); - println!( - "Read back serialized message: {}", - serde_json::to_string_pretty(&value).unwrap() - ); - - // Check top-level fields - assert_eq!(value["role"], "assistant"); - assert!(value["created"].is_i64()); - assert!(value["content"].is_array()); - - // Check content items - let content = &value["content"]; - - // First item should be text - assert_eq!(content[0]["type"], "text"); - assert_eq!(content[0]["text"], "Hello, I'll help you with that."); - - // Second item should be toolRequest - assert_eq!(content[1]["type"], "toolReq"); - assert_eq!(content[1]["id"], "tool123"); - - // Check tool_call serialization - assert_eq!(content[1]["toolCall"]["status"], "success"); - assert_eq!(content[1]["toolCall"]["value"]["name"], "test_tool"); - assert_eq!( - content[1]["toolCall"]["value"]["arguments"]["param"], - "value" - ); - } - - #[test] - fn test_error_serialization() { - let message = Message::assistant().with_tool_request( - "tool123", - Err(ToolError::ExecutionError( - "Something went wrong".to_string(), - )), - ); - - let json_str = serde_json::to_string_pretty(&message).unwrap(); - println!("Serialized error: {}", json_str); - - // Parse back to Value to check structure - let value: Value = serde_json::from_str(&json_str).unwrap(); - - // Check tool_call serialization with error - let tool_call = &value["content"][0]["toolCall"]; - assert_eq!(tool_call["status"], "error"); - assert_eq!(tool_call["error"], "Execution failed: Something went wrong"); - } - - #[test] - fn test_deserialization() { - // Create a JSON string with our new format - let json_str = r#"{ - "role": "assistant", - "created": 1740171566, - "content": [ - { - "type": "text", - "text": "I'll help you with that." - }, - { - "type": "toolReq", - "id": "tool123", - "toolCall": { - "status": "success", - "value": { - "name": "test_tool", - "arguments": {"param": "value"}, - "needsApproval": false - } - } - } - ] - }"#; - - let message: Message = serde_json::from_str(json_str).unwrap(); - - assert_eq!(message.role, Role::Assistant); - assert_eq!(message.created, 1740171566); - assert_eq!(message.content.len(), 2); - - // Check first content item - if let MessageContent::Text(text) = &message.content[0] { - assert_eq!(text.text, "I'll help you with that."); - } else { - panic!("Expected Text content"); - } - - // Check second content item - if let MessageContent::ToolReq(req) = &message.content[1] { - assert_eq!(req.id, "tool123"); - if let Ok(tool_call) = req.tool_call.as_result() { - assert_eq!(tool_call.name, "test_tool"); - assert_eq!(tool_call.arguments, json!({"param": "value"})); - } else { - panic!("Expected successful tool call"); - } - } else { - panic!("Expected ToolRequest content"); - } - } - - #[test] - fn test_message_with_text() { - let message = Message::user().with_text("Hello"); - assert_eq!(message.content.concat_text_str(), "Hello"); - } - - #[test] - fn test_message_with_tool_request() { - let tool_call = Ok(ToolCall::new("test_tool", json!({}))); - - let message = Message::assistant().with_tool_request("req1", tool_call); - assert!(message.contains_tool_call()); - assert!(!message.contains_tool_response()); - - let ids = message.tool_ids(); - assert_eq!(ids.len(), 1); - assert!(ids.contains("req1")); - } -} diff --git a/crates/goose-llm/src/message/tool_result_serde.rs b/crates/goose-llm/src/message/tool_result_serde.rs deleted file mode 100644 index 7f1143228deb..000000000000 --- a/crates/goose-llm/src/message/tool_result_serde.rs +++ /dev/null @@ -1,64 +0,0 @@ -use serde::{ser::SerializeStruct, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::types::core::{ToolError, ToolResult}; - -pub fn serialize(value: &ToolResult, serializer: S) -> Result -where - T: Serialize, - S: Serializer, -{ - match value { - Ok(val) => { - let mut state = serializer.serialize_struct("ToolResult", 2)?; - state.serialize_field("status", "success")?; - state.serialize_field("value", val)?; - state.end() - } - Err(err) => { - let mut state = serializer.serialize_struct("ToolResult", 2)?; - state.serialize_field("status", "error")?; - state.serialize_field("error", &err.to_string())?; - state.end() - } - } -} - -// For deserialization, let's use a simpler approach that works with the format we're serializing to -pub fn deserialize<'de, T, D>(deserializer: D) -> Result, D::Error> -where - T: Deserialize<'de>, - D: Deserializer<'de>, -{ - // Define a helper enum to handle the two possible formats - #[derive(Deserialize)] - #[serde(untagged)] - enum ResultFormat { - Success { status: String, value: T }, - Error { status: String, error: String }, - } - - let format = ResultFormat::deserialize(deserializer)?; - - match format { - ResultFormat::Success { status, value } => { - if status == "success" { - Ok(Ok(value)) - } else { - Err(serde::de::Error::custom(format!( - "Expected status 'success', got '{}'", - status - ))) - } - } - ResultFormat::Error { status, error } => { - if status == "error" { - Ok(Err(ToolError::ExecutionError(error))) - } else { - Err(serde::de::Error::custom(format!( - "Expected status 'error', got '{}'", - status - ))) - } - } - } -} diff --git a/crates/goose-llm/src/model.rs b/crates/goose-llm/src/model.rs deleted file mode 100644 index 5ad89ab2216d..000000000000 --- a/crates/goose-llm/src/model.rs +++ /dev/null @@ -1,119 +0,0 @@ -use serde::{Deserialize, Serialize}; - -const DEFAULT_CONTEXT_LIMIT: u32 = 128_000; - -/// Configuration for model-specific settings and limits -#[derive(Debug, Clone, Serialize, Deserialize, uniffi::Record)] -pub struct ModelConfig { - /// The name of the model to use - pub model_name: String, - /// Optional explicit context limit that overrides any defaults - pub context_limit: Option, - /// Optional temperature setting (0.0 - 1.0) - pub temperature: Option, - /// Optional maximum tokens to generate - pub max_tokens: Option, -} - -impl ModelConfig { - /// Create a new ModelConfig with the specified model name - /// - /// The context limit is set with the following precedence: - /// 1. Explicit context_limit if provided in config - /// 2. Model-specific default based on model name - /// 3. Global default (128_000) (in get_context_limit) - pub fn new(model_name: String) -> Self { - let context_limit = Self::get_model_specific_limit(&model_name); - - Self { - model_name, - context_limit, - temperature: None, - max_tokens: None, - } - } - - /// Get model-specific context limit based on model name - fn get_model_specific_limit(model_name: &str) -> Option { - // Implement some sensible defaults - match model_name { - // OpenAI models, https://platform.openai.com/docs/models#models-overview - name if name.contains("gpt-4o") => Some(128_000), - name if name.contains("gpt-4-turbo") => Some(128_000), - - // Anthropic models, https://docs.anthropic.com/en/docs/about-claude/models - name if name.contains("claude-3") => Some(200_000), - name if name.contains("claude-4") => Some(200_000), - - // Meta Llama models, https://github.com/meta-llama/llama-models/tree/main?tab=readme-ov-file#llama-models-1 - name if name.contains("llama3.2") => Some(128_000), - name if name.contains("llama3.3") => Some(128_000), - _ => None, - } - } - - /// Set an explicit context limit - pub fn with_context_limit(mut self, limit: Option) -> Self { - // Default is None and therefore DEFAULT_CONTEXT_LIMIT, only set - // if input is Some to allow passing through with_context_limit in - // configuration cases - if limit.is_some() { - self.context_limit = limit; - } - self - } - - /// Set the temperature - pub fn with_temperature(mut self, temp: Option) -> Self { - self.temperature = temp; - self - } - - /// Set the max tokens - pub fn with_max_tokens(mut self, tokens: Option) -> Self { - self.max_tokens = tokens; - self - } - - /// Get the context_limit for the current model - /// If none are defined, use the DEFAULT_CONTEXT_LIMIT - pub fn context_limit(&self) -> u32 { - self.context_limit.unwrap_or(DEFAULT_CONTEXT_LIMIT) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_model_config_context_limits() { - // Test explicit limit - let config = - ModelConfig::new("claude-3-opus".to_string()).with_context_limit(Some(150_000)); - assert_eq!(config.context_limit(), 150_000); - - // Test model-specific defaults - let config = ModelConfig::new("claude-3-opus".to_string()); - assert_eq!(config.context_limit(), 200_000); - - let config = ModelConfig::new("gpt-4-turbo".to_string()); - assert_eq!(config.context_limit(), 128_000); - - // Test fallback to default - let config = ModelConfig::new("unknown-model".to_string()); - assert_eq!(config.context_limit(), DEFAULT_CONTEXT_LIMIT); - } - - #[test] - fn test_model_config_settings() { - let config = ModelConfig::new("test-model".to_string()) - .with_temperature(Some(0.7)) - .with_max_tokens(Some(1000)) - .with_context_limit(Some(50_000)); - - assert_eq!(config.temperature, Some(0.7)); - assert_eq!(config.max_tokens, Some(1000)); - assert_eq!(config.context_limit, Some(50_000)); - } -} diff --git a/crates/goose-llm/src/prompt_template.rs b/crates/goose-llm/src/prompt_template.rs deleted file mode 100644 index eca9facb6e23..000000000000 --- a/crates/goose-llm/src/prompt_template.rs +++ /dev/null @@ -1,115 +0,0 @@ -use std::{ - path::PathBuf, - sync::{Arc, RwLock}, -}; - -use include_dir::{include_dir, Dir}; -use minijinja::{Environment, Error as MiniJinjaError, Value as MJValue}; -use once_cell::sync::Lazy; -use serde::Serialize; - -/// This directory will be embedded into the final binary. -/// Typically used to store "core" or "system" prompts. -static CORE_PROMPTS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/src/prompts"); - -/// A global MiniJinja environment storing the "core" prompts. -/// -/// - Loaded at startup from the `CORE_PROMPTS_DIR`. -/// - Ideal for "system" templates that don't change often. -/// - *Not* used for extension prompts (which are ephemeral). -static GLOBAL_ENV: Lazy>>> = Lazy::new(|| { - let mut env = Environment::new(); - - // Pre-load all core templates from the embedded dir. - for file in CORE_PROMPTS_DIR.files() { - let name = file.path().to_string_lossy().to_string(); - let source = String::from_utf8_lossy(file.contents()).to_string(); - - // Since we're using 'static lifetime for the Environment, we need to ensure - // the strings we add as templates live for the entire program duration. - // We can achieve this by leaking the strings (acceptable for initialization). - let static_name: &'static str = Box::leak(name.into_boxed_str()); - let static_source: &'static str = Box::leak(source.into_boxed_str()); - - if let Err(e) = env.add_template(static_name, static_source) { - println!("Failed to add template {}: {}", static_name, e); - } - } - - Arc::new(RwLock::new(env)) -}); - -/// Renders a prompt from the global environment by name. -/// -/// # Arguments -/// * `template_name` - The name of the template (usually the file path or a custom ID). -/// * `context_data` - Data to be inserted into the template (must be `Serialize`). -pub fn render_global_template( - template_name: &str, - context_data: &T, -) -> Result { - let env = GLOBAL_ENV.read().expect("GLOBAL_ENV lock poisoned"); - let tmpl = env.get_template(template_name)?; - let ctx = MJValue::from_serialize(context_data); - let rendered = tmpl.render(ctx)?; - Ok(rendered.trim().to_string()) -} - -/// Renders a file from `CORE_PROMPTS_DIR` within the global environment. -/// -/// # Arguments -/// * `template_file` - The file path within the embedded directory (e.g. "system.md"). -/// * `context_data` - Data to be inserted into the template (must be `Serialize`). -/// -/// This function **assumes** the file is already in `CORE_PROMPTS_DIR`. If it wasn't -/// added to the global environment at startup (due to parse errors, etc.), this will error out. -pub fn render_global_file( - template_file: impl Into, - context_data: &T, -) -> Result { - let file_path = template_file.into(); - let template_name = file_path.to_string_lossy().to_string(); - - render_global_template(&template_name, context_data) -} - -#[cfg(test)] -mod tests { - use super::*; - - /// For convenience in tests, define a small struct or use a HashMap to provide context. - #[derive(Serialize)] - struct TestContext { - name: String, - age: u32, - } - - #[test] - fn test_global_file_render() { - // "mock.md" should exist in the embedded CORE_PROMPTS_DIR - // and have placeholders for `name` and `age`. - let context = TestContext { - name: "Alice".to_string(), - age: 30, - }; - - let result = render_global_file("mock.md", &context).unwrap(); - // Assume mock.md content is something like: - // "This prompt is only used for testing.\n\nHello, {{ name }}! You are {{ age }} years old." - assert_eq!( - result, - "This prompt is only used for testing.\n\nHello, Alice! You are 30 years old." - ); - } - - #[test] - fn test_global_file_not_found() { - let context = TestContext { - name: "Unused".to_string(), - age: 99, - }; - - let result = render_global_file("non_existent.md", &context); - assert!(result.is_err(), "Should fail because file is missing"); - } -} diff --git a/crates/goose-llm/src/prompts/mock.md b/crates/goose-llm/src/prompts/mock.md deleted file mode 100644 index 46c1e708e42e..000000000000 --- a/crates/goose-llm/src/prompts/mock.md +++ /dev/null @@ -1,3 +0,0 @@ -This prompt is only used for testing. - -Hello, {{ name }}! You are {{ age }} years old. \ No newline at end of file diff --git a/crates/goose-llm/src/prompts/system.md b/crates/goose-llm/src/prompts/system.md deleted file mode 100644 index 4a2aacde7e8d..000000000000 --- a/crates/goose-llm/src/prompts/system.md +++ /dev/null @@ -1,34 +0,0 @@ -{{system_preamble}} - -The current date is {{current_date}}. - -Goose uses LLM providers with tool calling capability. You can be used with different language models (gpt-4o, claude-3.5-sonnet, o1, llama-3.2, deepseek-r1, etc). -These models have varying knowledge cut-off dates depending on when they were trained, but typically it's between 5-10 months prior to the current date. - -# Extensions - -Extensions allow other applications to provide context to Goose. Extensions connect Goose to different data sources and tools. - -{% if (extensions is defined) and extensions %} -Because you dynamically load extensions, your conversation history may refer -to interactions with extensions that are not currently active. The currently -active extensions are below. Each of these extensions provides tools that are -in your tool specification. - -{% for extension in extensions %} -## {{extension.name}} -{% if extension.instructions %}### Instructions -{{extension.instructions}}{% endif %} -{% endfor %} -{% else %} -No extensions are defined. You should let the user know that they should add extensions.{% endif %} - -# Response Guidelines - -- Use Markdown formatting for all responses. -- Follow best practices for Markdown, including: - - Using headers for organization. - - Bullet points for lists. - - Links formatted correctly, either as linked text (e.g., [this is linked text](https://example.com)) or automatic links using angle brackets (e.g., ). -- For code examples, use fenced code blocks by placing triple backticks (` ``` `) before and after the code. Include the language identifier after the opening backticks (e.g., ` ```python `) to enable syntax highlighting. -- Ensure clarity, conciseness, and proper formatting to enhance readability and usability. diff --git a/crates/goose-llm/src/providers/base.rs b/crates/goose-llm/src/providers/base.rs deleted file mode 100644 index 92a3948df28f..000000000000 --- a/crates/goose-llm/src/providers/base.rs +++ /dev/null @@ -1,135 +0,0 @@ -use anyhow::Result; -use async_trait::async_trait; -use serde::{Deserialize, Serialize}; - -use super::errors::ProviderError; -use crate::{message::Message, types::core::Tool}; - -#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, uniffi::Record)] -pub struct Usage { - pub input_tokens: Option, - pub output_tokens: Option, - pub total_tokens: Option, -} - -impl Usage { - pub fn new( - input_tokens: Option, - output_tokens: Option, - total_tokens: Option, - ) -> Self { - Self { - input_tokens, - output_tokens, - total_tokens, - } - } -} - -#[derive(Debug, Clone, uniffi::Record)] -pub struct ProviderCompleteResponse { - pub message: Message, - pub model: String, - pub usage: Usage, -} - -impl ProviderCompleteResponse { - pub fn new(message: Message, model: String, usage: Usage) -> Self { - Self { - message, - model, - usage, - } - } -} - -/// Response from a structured‐extraction call -#[derive(Debug, Clone, uniffi::Record)] -pub struct ProviderExtractResponse { - /// The extracted JSON object - pub data: serde_json::Value, - /// Which model produced it - pub model: String, - /// Token usage stats - pub usage: Usage, -} - -impl ProviderExtractResponse { - pub fn new(data: serde_json::Value, model: String, usage: Usage) -> Self { - Self { data, model, usage } - } -} - -/// Base trait for AI providers (OpenAI, Anthropic, etc) -#[async_trait] -pub trait Provider: Send + Sync { - /// Generate the next message using the configured model and other parameters - /// - /// # Arguments - /// * `system` - The system prompt that guides the model's behavior - /// * `messages` - The conversation history as a sequence of messages - /// * `tools` - Optional list of tools the model can use - /// * `request_id` - Optional request ID (only used by some providers like Databricks) - /// - /// # Returns - /// A tuple containing the model's response message and provider usage statistics - /// - /// # Errors - /// ProviderError - /// - It's important to raise ContextLengthExceeded correctly since agent handles it - async fn complete( - &self, - system: &str, - messages: &[Message], - tools: &[Tool], - request_id: Option<&str>, - ) -> Result; - - /// Structured extraction: always JSON‐Schema - /// - /// # Arguments - /// * `system` – system prompt guiding the extraction task - /// * `messages` – conversation history - /// * `schema` – a JSON‐Schema for the expected output. - /// Will set strict=true for OpenAI & Databricks. - /// * `request_id` - Optional request ID (only used by some providers like Databricks) - /// - /// # Returns - /// A `ProviderExtractResponse` whose `data` is a JSON object matching `schema`. - /// - /// # Errors - /// * `ProviderError::ContextLengthExceeded` if the prompt is too large - /// * other `ProviderError` variants for API/network failures - async fn extract( - &self, - system: &str, - messages: &[Message], - schema: &serde_json::Value, - request_id: Option<&str>, - ) -> Result; -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_usage_creation() { - let usage = Usage::new(Some(10), Some(20), Some(30)); - assert_eq!(usage.input_tokens, Some(10)); - assert_eq!(usage.output_tokens, Some(20)); - assert_eq!(usage.total_tokens, Some(30)); - } - - #[test] - fn test_provider_complete_response_creation() { - let message = Message::user().with_text("Hello, world!"); - let usage = Usage::new(Some(10), Some(20), Some(30)); - let response = - ProviderCompleteResponse::new(message.clone(), "test_model".to_string(), usage.clone()); - - assert_eq!(response.message, message); - assert_eq!(response.model, "test_model"); - assert_eq!(response.usage, usage); - } -} diff --git a/crates/goose-llm/src/providers/databricks.rs b/crates/goose-llm/src/providers/databricks.rs deleted file mode 100644 index 0bfe2ffef67b..000000000000 --- a/crates/goose-llm/src/providers/databricks.rs +++ /dev/null @@ -1,327 +0,0 @@ -use std::time::Duration; - -use anyhow::Result; -use async_trait::async_trait; -use reqwest::{Client, StatusCode}; -use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; -use url::Url; - -use super::{ - errors::ProviderError, - formats::databricks::{create_request, get_usage, response_to_message}, - utils::{get_env, get_model, ImageFormat}, -}; -use crate::{ - message::Message, - model::ModelConfig, - providers::{Provider, ProviderCompleteResponse, ProviderExtractResponse, Usage}, - types::core::Tool, -}; - -pub const DATABRICKS_DEFAULT_MODEL: &str = "databricks-claude-3-7-sonnet"; -// Databricks can passthrough to a wide range of models, we only provide the default -pub const _DATABRICKS_KNOWN_MODELS: &[&str] = &[ - "databricks-meta-llama-3-3-70b-instruct", - "databricks-claude-3-7-sonnet", -]; - -fn default_timeout() -> u64 { - 60 -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DatabricksProviderConfig { - pub host: String, - pub token: String, - #[serde(default)] - pub image_format: ImageFormat, - #[serde(default = "default_timeout")] - pub timeout: u64, // timeout in seconds -} - -impl DatabricksProviderConfig { - pub fn new(host: String, token: String) -> Self { - Self { - host, - token, - image_format: ImageFormat::OpenAi, - timeout: default_timeout(), - } - } - - pub fn from_env() -> Self { - let host = get_env("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"); - let token = get_env("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"); - Self::new(host, token) - } -} - -#[derive(Debug)] -pub struct DatabricksProvider { - config: DatabricksProviderConfig, - model: ModelConfig, - client: Client, -} - -impl DatabricksProvider { - pub fn from_env(model: ModelConfig) -> Self { - let config = DatabricksProviderConfig::from_env(); - DatabricksProvider::from_config(config, model) - .expect("Failed to initialize DatabricksProvider") - } -} - -impl Default for DatabricksProvider { - fn default() -> Self { - let config = DatabricksProviderConfig::from_env(); - let model = ModelConfig::new(DATABRICKS_DEFAULT_MODEL.to_string()); - DatabricksProvider::from_config(config, model) - .expect("Failed to initialize DatabricksProvider") - } -} - -impl DatabricksProvider { - pub fn from_config(config: DatabricksProviderConfig, model: ModelConfig) -> Result { - let client = Client::builder() - .timeout(Duration::from_secs(config.timeout)) - .build()?; - - Ok(Self { - config, - model, - client, - }) - } - - async fn post(&self, payload: Value) -> Result { - let base_url = Url::parse(&self.config.host) - .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; - let path = format!("serving-endpoints/{}/invocations", self.model.model_name); - let url = base_url.join(&path).map_err(|e| { - ProviderError::RequestFailed(format!("Failed to construct endpoint URL: {e}")) - })?; - - let auth_header = format!("Bearer {}", &self.config.token); - let response = self - .client - .post(url) - .header("Authorization", auth_header) - .json(&payload) - .send() - .await?; - - let status = response.status(); - let payload: Option = response.json().await.ok(); - - match status { - StatusCode::OK => payload.ok_or_else(|| { - ProviderError::RequestFailed("Response body is not valid JSON".to_string()) - }), - StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => { - Err(ProviderError::Authentication(format!( - "Authentication failed. Please ensure your API keys are valid and have the required permissions. \ - Status: {}. Response: {:?}", - status, payload - ))) - } - StatusCode::BAD_REQUEST => { - // Databricks provides a generic 'error' but also includes 'external_model_message' which is provider specific - // We try to extract the error message from the payload and check for phrases that indicate context length exceeded - let payload_str = serde_json::to_string(&payload) - .unwrap_or_default() - .to_lowercase(); - let check_phrases = [ - "too long", - "context length", - "context_length_exceeded", - "reduce the length", - "token count", - "exceeds", - "exceed context limit", - "input length", - "max_tokens", - "decrease input length", - "context limit", - ]; - if check_phrases.iter().any(|c| payload_str.contains(c)) { - return Err(ProviderError::ContextLengthExceeded(payload_str)); - } - - let mut error_msg = "Unknown error".to_string(); - if let Some(payload) = &payload { - // try to convert message to string, if that fails use external_model_message - error_msg = payload - .get("message") - .and_then(|m| m.as_str()) - .or_else(|| { - payload - .get("external_model_message") - .and_then(|ext| ext.get("message")) - .and_then(|m| m.as_str()) - }) - .unwrap_or("Unknown error") - .to_string(); - } - - tracing::debug!( - "{}", - format!( - "Provider request failed with status: {}. Payload: {:?}", - status, payload - ) - ); - Err(ProviderError::RequestFailed(format!( - "Request failed with status: {}. Message: {}", - status, error_msg - ))) - } - StatusCode::TOO_MANY_REQUESTS => { - Err(ProviderError::RateLimitExceeded(format!("{:?}", payload))) - } - StatusCode::INTERNAL_SERVER_ERROR | StatusCode::SERVICE_UNAVAILABLE => { - Err(ProviderError::ServerError(format!("{:?}", payload))) - } - _ => { - tracing::debug!( - "{}", - format!( - "Provider request failed with status: {}. Payload: {:?}", - status, payload - ) - ); - Err(ProviderError::RequestFailed(format!( - "Request failed with status: {}", - status - ))) - } - } - } -} - -#[async_trait] -impl Provider for DatabricksProvider { - #[tracing::instrument( - skip(self, system, messages, tools), - fields(model_config, input, output, input_tokens, output_tokens, total_tokens) - )] - async fn complete( - &self, - system: &str, - messages: &[Message], - tools: &[Tool], - request_id: Option<&str>, - ) -> Result { - let mut payload = create_request( - &self.model, - system, - messages, - tools, - &self.config.image_format, - )?; - // Remove the model key which is part of the url with databricks - payload - .as_object_mut() - .expect("payload should have model key") - .remove("model"); - - // Add client_request_id if provided - if let Some(req_id) = request_id { - payload - .as_object_mut() - .expect("payload should be an object") - .insert( - "client_request_id".to_string(), - serde_json::Value::String(req_id.to_string()), - ); - } - - let response = self.post(payload.clone()).await?; - - // Parse response - let message = response_to_message(response.clone())?; - let usage = match get_usage(&response) { - Ok(usage) => usage, - Err(ProviderError::UsageError(e)) => { - tracing::debug!("Failed to get usage data: {}", e); - Usage::default() - } - Err(e) => return Err(e), - }; - let model = get_model(&response); - super::utils::emit_debug_trace(&self.model, &payload, &response, &usage); - - Ok(ProviderCompleteResponse::new(message, model, usage)) - } - - async fn extract( - &self, - system: &str, - messages: &[Message], - schema: &Value, - request_id: Option<&str>, - ) -> Result { - // 1. Build base payload (no tools) - let mut payload = create_request(&self.model, system, messages, &[], &ImageFormat::OpenAi)?; - - // 2. Inject strict JSON‐Schema wrapper - payload - .as_object_mut() - .expect("payload must be an object") - .insert( - "response_format".to_string(), - json!({ - "type": "json_schema", - "json_schema": { - "name": "extraction", - "schema": schema, - "strict": true - } - }), - ); - - // Add client_request_id if provided - if let Some(req_id) = request_id { - payload - .as_object_mut() - .expect("payload should be an object") - .insert( - "client_request_id".to_string(), - serde_json::Value::String(req_id.to_string()), - ); - } - - // 3. Call OpenAI - let response = self.post(payload.clone()).await?; - - // 4. Extract the assistant’s `content` and parse it into JSON - let msg = &response["choices"][0]["message"]; - let raw = msg.get("content").cloned().ok_or_else(|| { - ProviderError::ResponseParseError("Missing content in extract response".into()) - })?; - let data = match raw { - Value::String(s) => serde_json::from_str(&s) - .map_err(|e| ProviderError::ResponseParseError(format!("Invalid JSON: {}", e)))?, - Value::Object(_) | Value::Array(_) => raw, - other => { - return Err(ProviderError::ResponseParseError(format!( - "Unexpected content type: {:?}", - other - ))) - } - }; - - // 5. Gather usage & model info - let usage = match get_usage(&response) { - Ok(u) => u, - Err(ProviderError::UsageError(e)) => { - tracing::debug!("Failed to get usage in extract: {}", e); - Usage::default() - } - Err(e) => return Err(e), - }; - let model = get_model(&response); - - Ok(ProviderExtractResponse::new(data, model, usage)) - } -} diff --git a/crates/goose-llm/src/providers/errors.rs b/crates/goose-llm/src/providers/errors.rs deleted file mode 100644 index 826a6e116711..000000000000 --- a/crates/goose-llm/src/providers/errors.rs +++ /dev/null @@ -1,144 +0,0 @@ -use thiserror::Error; - -#[derive(Error, Debug, uniffi::Error)] -pub enum ProviderError { - #[error("Authentication error: {0}")] - Authentication(String), - - #[error("Context length exceeded: {0}")] - ContextLengthExceeded(String), - - #[error("Rate limit exceeded: {0}")] - RateLimitExceeded(String), - - #[error("Server error: {0}")] - ServerError(String), - - #[error("Request failed: {0}")] - RequestFailed(String), - - #[error("Execution error: {0}")] - ExecutionError(String), - - #[error("Usage data error: {0}")] - UsageError(String), - - #[error("Invalid response: {0}")] - ResponseParseError(String), -} - -impl From for ProviderError { - fn from(error: anyhow::Error) -> Self { - ProviderError::ExecutionError(error.to_string()) - } -} - -impl From for ProviderError { - fn from(error: reqwest::Error) -> Self { - ProviderError::ExecutionError(error.to_string()) - } -} - -#[derive(serde::Deserialize, Debug)] -pub struct OpenAIError { - #[serde(deserialize_with = "code_as_string")] - pub code: Option, - pub message: Option, - #[serde(rename = "type")] - pub error_type: Option, -} - -fn code_as_string<'de, D>(deserializer: D) -> Result, D::Error> -where - D: serde::Deserializer<'de>, -{ - use std::fmt; - - use serde::de::{self, Visitor}; - - struct CodeVisitor; - - impl<'de> Visitor<'de> for CodeVisitor { - type Value = Option; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a string, a number, null, or none for the code field") - } - - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - Ok(Some(value.to_string())) - } - - fn visit_u64(self, value: u64) -> Result - where - E: de::Error, - { - Ok(Some(value.to_string())) - } - - fn visit_none(self) -> Result - where - E: de::Error, - { - Ok(None) - } - - fn visit_unit(self) -> Result - where - E: de::Error, - { - Ok(None) - } - - fn visit_some(self, deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserializer.deserialize_any(CodeVisitor) - } - } - - deserializer.deserialize_option(CodeVisitor) -} - -impl OpenAIError { - pub fn is_context_length_exceeded(&self) -> bool { - if let Some(code) = &self.code { - code == "context_length_exceeded" || code == "string_above_max_length" - } else { - false - } - } -} - -impl std::fmt::Display for OpenAIError { - /// Format the error for display. - /// E.g. {"message": "Invalid API key", "code": "invalid_api_key", "type": "client_error"} - /// would be formatted as "Invalid API key (code: invalid_api_key, type: client_error)" - /// and {"message": "Foo"} as just "Foo", etc. - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(message) = &self.message { - write!(f, "{}", message)?; - } - let mut in_parenthesis = false; - if let Some(code) = &self.code { - write!(f, " (code: {}", code)?; - in_parenthesis = true; - } - if let Some(typ) = &self.error_type { - if in_parenthesis { - write!(f, ", type: {}", typ)?; - } else { - write!(f, " (type: {}", typ)?; - in_parenthesis = true; - } - } - if in_parenthesis { - write!(f, ")")?; - } - Ok(()) - } -} diff --git a/crates/goose-llm/src/providers/factory.rs b/crates/goose-llm/src/providers/factory.rs deleted file mode 100644 index a70be3d44ef8..000000000000 --- a/crates/goose-llm/src/providers/factory.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::sync::Arc; - -use anyhow::Result; - -use super::{ - base::Provider, - databricks::{DatabricksProvider, DatabricksProviderConfig}, - openai::{OpenAiProvider, OpenAiProviderConfig}, -}; -use crate::model::ModelConfig; - -pub fn create( - name: &str, - provider_config: serde_json::Value, - model: ModelConfig, -) -> Result> { - // We use Arc instead of Box to be able to clone for multiple async tasks - match name { - "openai" => { - let config: OpenAiProviderConfig = serde_json::from_value(provider_config)?; - Ok(Arc::new(OpenAiProvider::from_config(config, model)?)) - } - "databricks" => { - let config: DatabricksProviderConfig = serde_json::from_value(provider_config)?; - Ok(Arc::new(DatabricksProvider::from_config(config, model)?)) - } - _ => Err(anyhow::anyhow!("Unknown provider: {}", name)), - } -} diff --git a/crates/goose-llm/src/providers/formats/databricks.rs b/crates/goose-llm/src/providers/formats/databricks.rs deleted file mode 100644 index 37343f2ebe09..000000000000 --- a/crates/goose-llm/src/providers/formats/databricks.rs +++ /dev/null @@ -1,1059 +0,0 @@ -use anyhow::{anyhow, Error}; -use serde_json::{json, Value}; - -use crate::{ - message::{Message, MessageContent}, - model::ModelConfig, - providers::{ - base::Usage, - errors::ProviderError, - utils::{convert_image, is_valid_function_name, sanitize_function_name, ImageFormat}, - }, - types::core::{Content, Role, Tool, ToolCall, ToolError}, -}; - -/// Convert internal Message format to Databricks' API message specification -/// Databricks is mostly OpenAI compatible, but has some differences (reasoning type, etc) -/// some openai compatible endpoints use the anthropic image spec at the content level -/// even though the message structure is otherwise following openai, the enum switches this -pub fn format_messages(messages: &[Message], image_format: &ImageFormat) -> Vec { - let mut result = Vec::new(); - for message in messages { - let mut converted = json!({ - "role": message.role - }); - - let mut content_array = Vec::new(); - let mut has_tool_calls = false; - let mut has_multiple_content = false; - - for content in message.content.iter() { - match content { - MessageContent::Text(text) => { - if !text.text.is_empty() { - content_array.push(json!({ - "type": "text", - "text": text.text - })); - } - } - MessageContent::Image(image) => { - // Handle direct image content - let converted_image = convert_image(image, image_format); - content_array.push(converted_image); - } - MessageContent::Thinking(content) => { - has_multiple_content = true; - content_array.push(json!({ - "type": "reasoning", - "summary": [ - { - "type": "summary_text", - "text": content.thinking, - "signature": content.signature - } - ] - })); - } - MessageContent::RedactedThinking(content) => { - has_multiple_content = true; - content_array.push(json!({ - "type": "reasoning", - "summary": [ - { - "type": "summary_encrypted_text", - "data": content.data - } - ] - })); - } - MessageContent::ToolReq(request) => { - has_tool_calls = true; - match &request.tool_call.as_result() { - Ok(tool_call) => { - let sanitized_name = sanitize_function_name(&tool_call.name); - - // Get mutable access to the "tool_calls" field in the converted object - // If "tool_calls" doesn't exist, insert an empty JSON array - let tool_calls = converted - .as_object_mut() - .unwrap() - .entry("tool_calls") - .or_insert(json!([])); - - tool_calls.as_array_mut().unwrap().push(json!({ - "id": request.id, - "type": "function", - "function": { - "name": sanitized_name, - "arguments": tool_call.arguments.to_string(), - } - })); - } - Err(e) => { - content_array.push(json!({ - "type": "text", - "text": format!("Error: {}", e) - })); - } - } - } - MessageContent::ToolResp(response) => { - match &response.tool_result.0 { - Ok(contents) => { - // Process all content, replacing images with placeholder text - let mut tool_content = Vec::new(); - let mut image_messages = Vec::new(); - - for content in contents { - match content { - Content::Image(image) => { - // Add placeholder text in the tool response - tool_content.push(Content::text("This tool result included an image that is uploaded in the next message.")); - - // Create a separate image message - image_messages.push(json!({ - "role": "user", - "content": [convert_image(image, image_format)] - })); - } - _ => { - tool_content.push(content.clone()); - } - } - } - let tool_response_content: Value = json!(tool_content - .iter() - .map(|content| match content { - Content::Text(text) => text.text.clone(), - _ => String::new(), - }) - .collect::>() - .join(" ")); - - // Add tool response as a separate message - result.push(json!({ - "role": "tool", - "content": tool_response_content, - "tool_call_id": response.id - })); - // Then add any image messages that need to follow - result.extend(image_messages); - } - Err(e) => { - // A tool result error is shown as output so the model can interpret the error message - result.push(json!({ - "role": "tool", - "content": format!("The tool call returned the following error:\n{}", e), - "tool_call_id": response.id - })); - } - } - } - } - } - - if !content_array.is_empty() { - // If we only have a single text content and no other special content, - // use the simple string format - if content_array.len() == 1 - && !has_multiple_content - && content_array[0]["type"] == "text" - { - converted["content"] = json!(content_array[0]["text"]); - } else { - converted["content"] = json!(content_array); - } - } - - if !content_array.is_empty() || has_tool_calls { - result.push(converted); - } - } - - result -} - -/// Convert internal Tool format to OpenAI's API tool specification -/// https://docs.databricks.com/aws/en/machine-learning/foundation-model-apis/api-reference#functionobject -pub fn format_tools(tools: &[Tool]) -> anyhow::Result> { - let mut tool_names = std::collections::HashSet::new(); - let mut result = Vec::new(); - - for tool in tools { - if !tool_names.insert(&tool.name) { - return Err(anyhow!("Duplicate tool name: {}", tool.name)); - } - - let mut description = tool.description.clone(); - description.truncate(1024); - - // OpenAI's tool description max str len is 1024 - result.push(json!({ - "type": "function", - "function": { - "name": tool.name, - "description": description, - "parameters": tool.input_schema, - } - })); - } - - Ok(result) -} - -/// Convert Databricks' API response to internal Message format -pub fn response_to_message(response: Value) -> anyhow::Result { - let original = response["choices"][0]["message"].clone(); - let mut content: Vec = Vec::new(); - - // Handle array-based content - if let Some(content_array) = original.get("content").and_then(|c| c.as_array()) { - for content_item in content_array { - match content_item.get("type").and_then(|t| t.as_str()) { - Some("text") => { - if let Some(text) = content_item.get("text").and_then(|t| t.as_str()) { - content.push(MessageContent::text(text)); - } - } - Some("reasoning") => { - if let Some(summary_array) = - content_item.get("summary").and_then(|s| s.as_array()) - { - for summary in summary_array { - match summary.get("type").and_then(|t| t.as_str()) { - Some("summary_text") => { - let text = summary - .get("text") - .and_then(|t| t.as_str()) - .unwrap_or_default(); - let signature = summary - .get("signature") - .and_then(|s| s.as_str()) - .unwrap_or_default(); - content.push(MessageContent::thinking(text, signature)); - } - Some("summary_encrypted_text") => { - if let Some(data) = summary.get("data").and_then(|d| d.as_str()) - { - content.push(MessageContent::redacted_thinking(data)); - } - } - _ => continue, - } - } - } - } - _ => continue, - } - } - } else if let Some(text) = original.get("content").and_then(|t| t.as_str()) { - // Handle legacy single string content - content.push(MessageContent::text(text)); - } - - // Handle tool calls - if let Some(tool_calls) = original.get("tool_calls") { - if let Some(tool_calls_array) = tool_calls.as_array() { - for tool_call in tool_calls_array { - let id = tool_call["id"].as_str().unwrap_or_default().to_string(); - let function_name = tool_call["function"]["name"] - .as_str() - .unwrap_or_default() - .to_string(); - let mut arguments = tool_call["function"]["arguments"] - .as_str() - .unwrap_or_default() - .to_string(); - // If arguments is empty, we will have invalid json parsing error later. - if arguments.is_empty() { - arguments = "{}".to_string(); - } - - if !is_valid_function_name(&function_name) { - let error = ToolError::NotFound(format!( - "The provided function name '{}' had invalid characters, it must match this regex [a-zA-Z0-9_-]+", - function_name - )); - content.push(MessageContent::tool_request(id, Err(error).into())); - } else { - match serde_json::from_str::(&arguments) { - Ok(params) => { - content.push(MessageContent::tool_request( - id, - Ok(ToolCall::new(&function_name, params)).into(), - )); - } - Err(e) => { - let error = ToolError::InvalidParameters(format!( - "Could not interpret tool use parameters for id {}: {}", - id, e - )); - content.push(MessageContent::tool_request(id, Err(error).into())); - } - } - } - } - } - } - - Ok(Message { - role: Role::Assistant, - created: chrono::Utc::now().timestamp_millis(), - content: content.into(), - }) -} - -pub fn get_usage(data: &Value) -> Result { - let usage = data - .get("usage") - .ok_or_else(|| ProviderError::UsageError("No usage data in response".to_string()))?; - - let input_tokens = usage - .get("prompt_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32); - - let output_tokens = usage - .get("completion_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32); - - let total_tokens = usage - .get("total_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32) - .or_else(|| match (input_tokens, output_tokens) { - (Some(input), Some(output)) => Some(input + output), - _ => None, - }); - - Ok(Usage::new(input_tokens, output_tokens, total_tokens)) -} - -/// Validates and fixes tool schemas to ensure they have proper parameter structure. -/// If parameters exist, ensures they have properties and required fields, or removes parameters entirely. -pub fn validate_tool_schemas(tools: &mut [Value]) { - for tool in tools.iter_mut() { - if let Some(function) = tool.get_mut("function") { - if let Some(parameters) = function.get_mut("parameters") { - if parameters.is_object() { - ensure_valid_json_schema(parameters); - } - } - } - } -} - -/// Ensures that the given JSON value follows the expected JSON Schema structure. -fn ensure_valid_json_schema(schema: &mut Value) { - if let Some(params_obj) = schema.as_object_mut() { - // Check if this is meant to be an object type schema - let is_object_type = params_obj - .get("type") - .and_then(|t| t.as_str()) - .is_none_or(|t| t == "object"); // Default to true if no type is specified - - // Only apply full schema validation to object types - if is_object_type { - // Ensure required fields exist with default values - params_obj.entry("properties").or_insert_with(|| json!({})); - params_obj.entry("required").or_insert_with(|| json!([])); - params_obj.entry("type").or_insert_with(|| json!("object")); - - // Recursively validate properties if it exists - if let Some(properties) = params_obj.get_mut("properties") { - if let Some(properties_obj) = properties.as_object_mut() { - for (_key, prop) in properties_obj.iter_mut() { - if prop.is_object() - && prop.get("type").and_then(|t| t.as_str()) == Some("object") - { - ensure_valid_json_schema(prop); - } - } - } - } - } - } -} - -pub fn create_request( - model_config: &ModelConfig, - system: &str, - messages: &[Message], - tools: &[Tool], - image_format: &ImageFormat, -) -> anyhow::Result { - if model_config.model_name.starts_with("o1-mini") { - return Err(anyhow!( - "o1-mini model is not currently supported since Goose uses tool calling and o1-mini does not support it. Please use o1 or o3 models instead." - )); - } - - let model_name = model_config.model_name.to_string(); - let is_o1 = model_name.starts_with("o1") || model_name.starts_with("goose-o1"); - let is_o3 = model_name.starts_with("o3") || model_name.starts_with("goose-o3"); - let is_claude_3_7_sonnet = model_name.contains("claude-3-7-sonnet"); // can be goose- or databricks- - - // Only extract reasoning effort for O1/O3 models - let (model_name, reasoning_effort) = if is_o1 || is_o3 { - let parts: Vec<&str> = model_config.model_name.split('-').collect(); - let last_part = parts.last().unwrap(); - - match *last_part { - "low" | "medium" | "high" => { - let base_name = parts[..parts.len() - 1].join("-"); - (base_name, Some(last_part.to_string())) - } - _ => ( - model_config.model_name.to_string(), - Some("medium".to_string()), - ), - } - } else { - // For non-O family models, use the model name as is and no reasoning effort - (model_config.model_name.to_string(), None) - }; - - let system_message = json!({ - "role": if is_o1 || is_o3 { "developer" } else { "system" }, - "content": system - }); - - let messages_spec = format_messages(messages, image_format); - let mut tools_spec = if !tools.is_empty() { - format_tools(tools)? - } else { - vec![] - }; - - // Validate tool schemas - validate_tool_schemas(&mut tools_spec); - - let mut messages_array = vec![system_message]; - messages_array.extend(messages_spec); - - let mut payload = json!({ - "model": model_name, - "messages": messages_array - }); - - if let Some(effort) = reasoning_effort { - payload - .as_object_mut() - .unwrap() - .insert("reasoning_effort".to_string(), json!(effort)); - } - - if !tools_spec.is_empty() { - payload - .as_object_mut() - .unwrap() - .insert("tools".to_string(), json!(tools_spec)); - } - - // Add thinking parameters for Claude 3.7 Sonnet model when requested - let is_thinking_enabled = std::env::var("CLAUDE_THINKING_ENABLED").is_ok(); - if is_claude_3_7_sonnet && is_thinking_enabled { - // Minimum budget_tokens is 1024 - let budget_tokens = std::env::var("CLAUDE_THINKING_BUDGET") - .unwrap_or_else(|_| "16000".to_string()) - .parse() - .unwrap_or(16000); - - // For Claude models with thinking enabled, we need to add max_tokens + budget_tokens - // Default to 8192 (Claude max output) + budget if not specified - let max_completion_tokens = model_config.max_tokens.unwrap_or(8192); - payload.as_object_mut().unwrap().insert( - "max_tokens".to_string(), - json!(max_completion_tokens + budget_tokens), - ); - - payload.as_object_mut().unwrap().insert( - "thinking".to_string(), - json!({ - "type": "enabled", - "budget_tokens": budget_tokens - }), - ); - - // Temperature is fixed to 2 when using claude 3.7 thinking with Databricks - payload - .as_object_mut() - .unwrap() - .insert("temperature".to_string(), json!(2)); - } else { - // o1, o3 models currently don't support temperature - if !is_o1 && !is_o3 { - if let Some(temp) = model_config.temperature { - payload - .as_object_mut() - .unwrap() - .insert("temperature".to_string(), json!(temp)); - } - } - - // o1 models use max_completion_tokens instead of max_tokens - if let Some(tokens) = model_config.max_tokens { - let key = if is_o1 || is_o3 { - "max_completion_tokens" - } else { - "max_tokens" - }; - payload - .as_object_mut() - .unwrap() - .insert(key.to_string(), json!(tokens)); - } - } - - Ok(payload) -} - -#[cfg(test)] -mod tests { - use serde_json::json; - - use super::*; - use crate::types::core::Content; - - #[test] - fn test_validate_tool_schemas() { - // Test case 1: Empty parameters object - // Input JSON with an incomplete parameters object - let mut actual = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object" - } - } - })]; - - // Run the function to validate and update schemas - validate_tool_schemas(&mut actual); - - // Expected JSON after validation - let expected = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object", - "properties": {}, - "required": [] - } - } - })]; - - // Compare entire JSON structures instead of individual fields - assert_eq!(actual, expected); - - // Test case 2: Missing type field - let mut tools = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "properties": {} - } - } - })]; - - validate_tool_schemas(&mut tools); - - let params = tools[0]["function"]["parameters"].as_object().unwrap(); - assert_eq!(params["type"], "object"); - - // Test case 3: Complete valid schema should remain unchanged - let original_schema = json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "City and country" - } - }, - "required": ["location"] - } - } - }); - - let mut tools = vec![original_schema.clone()]; - validate_tool_schemas(&mut tools); - assert_eq!(tools[0], original_schema); - } - - const OPENAI_TOOL_USE_RESPONSE: &str = r#"{ - "choices": [{ - "role": "assistant", - "message": { - "tool_calls": [{ - "id": "1", - "function": { - "name": "example_fn", - "arguments": "{\"param\": \"value\"}" - } - }] - } - }], - "usage": { - "input_tokens": 10, - "output_tokens": 25, - "total_tokens": 35 - } - }"#; - - #[test] - fn test_format_messages() -> anyhow::Result<()> { - let message = Message::user().with_text("Hello"); - let spec = format_messages(&[message], &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 1); - assert_eq!(spec[0]["role"], "user"); - assert_eq!(spec[0]["content"], "Hello"); - Ok(()) - } - - #[test] - fn test_format_tools() -> anyhow::Result<()> { - let tool = Tool::new( - "test_tool", - "A test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let spec = format_tools(&[tool])?; - - assert_eq!(spec.len(), 1); - assert_eq!(spec[0]["type"], "function"); - assert_eq!(spec[0]["function"]["name"], "test_tool"); - Ok(()) - } - - #[test] - fn test_format_messages_complex() -> anyhow::Result<()> { - let mut messages = vec![ - Message::assistant().with_text("Hello!"), - Message::user().with_text("How are you?"), - Message::assistant().with_tool_request( - "tool1", - Ok(ToolCall::new("example", json!({"param1": "value1"}))), - ), - ]; - - // Get the ID from the tool request to use in the response - let tool_id = if let MessageContent::ToolReq(request) = &messages[2].content[0] { - request.id.clone() - } else { - panic!("should be tool request"); - }; - - messages.push( - Message::user().with_tool_response(tool_id, Ok(vec![Content::text("Result")]).into()), - ); - - let spec = format_messages(&messages, &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 4); - assert_eq!(spec[0]["role"], "assistant"); - assert_eq!(spec[0]["content"], "Hello!"); - assert_eq!(spec[1]["role"], "user"); - assert_eq!(spec[1]["content"], "How are you?"); - assert_eq!(spec[2]["role"], "assistant"); - assert!(spec[2]["tool_calls"].is_array()); - assert_eq!(spec[3]["role"], "tool"); - assert_eq!(spec[3]["content"], "Result"); - assert_eq!(spec[3]["tool_call_id"], spec[2]["tool_calls"][0]["id"]); - - Ok(()) - } - - #[test] - fn test_format_messages_multiple_content() -> anyhow::Result<()> { - let mut messages = vec![Message::assistant().with_tool_request( - "tool1", - Ok(ToolCall::new("example", json!({"param1": "value1"}))), - )]; - - // Get the ID from the tool request to use in the response - let tool_id = if let MessageContent::ToolReq(request) = &messages[0].content[0] { - request.id.clone() - } else { - panic!("should be tool request"); - }; - - messages.push( - Message::user().with_tool_response(tool_id, Ok(vec![Content::text("Result")]).into()), - ); - - let spec = format_messages(&messages, &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 2); - assert_eq!(spec[0]["role"], "assistant"); - assert!(spec[0]["tool_calls"].is_array()); - assert_eq!(spec[1]["role"], "tool"); - assert_eq!(spec[1]["content"], "Result"); - assert_eq!(spec[1]["tool_call_id"], spec[0]["tool_calls"][0]["id"]); - - Ok(()) - } - - #[test] - fn test_format_tools_duplicate() -> anyhow::Result<()> { - let tool1 = Tool::new( - "test_tool", - "Test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let tool2 = Tool::new( - "test_tool", - "Test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let result = format_tools(&[tool1, tool2]); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Duplicate tool name")); - - Ok(()) - } - - #[test] - fn test_format_tools_empty() -> anyhow::Result<()> { - let spec = format_tools(&[])?; - assert!(spec.is_empty()); - Ok(()) - } - - #[test] - fn test_response_to_message_text() -> anyhow::Result<()> { - let response = json!({ - "choices": [{ - "role": "assistant", - "message": { - "content": "Hello from John Cena!" - } - }], - "usage": { - "input_tokens": 10, - "output_tokens": 25, - "total_tokens": 35 - } - }); - - let message = response_to_message(response)?; - assert_eq!(message.content.len(), 1); - if let MessageContent::Text(text) = &message.content[0] { - assert_eq!(text.text, "Hello from John Cena!"); - } else { - panic!("Expected Text content"); - } - assert!(matches!(message.role, Role::Assistant)); - - Ok(()) - } - - #[test] - fn test_response_to_message_valid_toolrequest() -> anyhow::Result<()> { - let response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - let message = response_to_message(response)?; - - assert_eq!(message.content.len(), 1); - if let MessageContent::ToolReq(request) = &message.content[0] { - let tool_call = request.tool_call.as_ref().unwrap(); - assert_eq!(tool_call.name, "example_fn"); - assert_eq!(tool_call.arguments, json!({"param": "value"})); - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_invalid_func_name() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["name"] = - json!("invalid fn"); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - match &request.tool_call.as_result() { - Err(ToolError::NotFound(msg)) => { - assert!(msg.starts_with("The provided function name")); - } - _ => panic!("Expected ToolNotFound error"), - } - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_json_decode_error() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = - json!("invalid json {"); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - match &request.tool_call.as_result() { - Err(ToolError::InvalidParameters(msg)) => { - assert!(msg.starts_with("Could not interpret tool use parameters")); - } - _ => panic!("Expected InvalidParameters error"), - } - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_empty_argument() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = - serde_json::Value::String("".to_string()); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - let tool_call = request.tool_call.as_ref().unwrap(); - assert_eq!(tool_call.name, "example_fn"); - assert_eq!(tool_call.arguments, json!({})); - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_create_request_gpt_4o() -> anyhow::Result<()> { - // Test default medium reasoning effort for O3 model - let model_config = ModelConfig { - model_name: "gpt-4o".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "gpt-4o", - "messages": [ - { - "role": "system", - "content": "system" - } - ], - "max_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } - - #[test] - fn test_create_request_o1_default() -> anyhow::Result<()> { - // Test default medium reasoning effort for O1 model - let model_config = ModelConfig { - model_name: "o1".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "o1", - "messages": [ - { - "role": "developer", - "content": "system" - } - ], - "reasoning_effort": "medium", - "max_completion_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } - - #[test] - fn test_create_request_o3_custom_reasoning_effort() -> anyhow::Result<()> { - // Test custom reasoning effort for O3 model - let model_config = ModelConfig { - model_name: "o3-mini-high".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "o3-mini", - "messages": [ - { - "role": "developer", - "content": "system" - } - ], - "reasoning_effort": "high", - "max_completion_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_claude_thinking() -> anyhow::Result<()> { - let response = json!({ - "model": "us.anthropic.claude-3-7-sonnet-20250219-v1:0", - "choices": [{ - "message": { - "role": "assistant", - "content": [ - { - "type": "reasoning", - "summary": [ - { - "type": "summary_text", - "text": "Test thinking content", - "signature": "test-signature" - } - ] - }, - { - "type": "text", - "text": "Regular text content" - } - ] - }, - "index": 0, - "finish_reason": "stop" - }] - }); - - let message = response_to_message(response)?; - assert_eq!(message.content.len(), 2); - - if let MessageContent::Thinking(thinking) = &message.content[0] { - assert_eq!(thinking.thinking, "Test thinking content"); - assert_eq!(thinking.signature, "test-signature"); - } else { - panic!("Expected Thinking content"); - } - - if let MessageContent::Text(text) = &message.content[1] { - assert_eq!(text.text, "Regular text content"); - } else { - panic!("Expected Text content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_claude_encrypted_thinking() -> anyhow::Result<()> { - let response = json!({ - "model": "claude-3-7-sonnet-20250219", - "choices": [{ - "message": { - "role": "assistant", - "content": [ - { - "type": "reasoning", - "summary": [ - { - "type": "summary_encrypted_text", - "data": "E23sQFCkYIARgCKkATCHitsdf327Ber3v4NYUq2" - } - ] - }, - { - "type": "text", - "text": "Regular text content" - } - ] - }, - "index": 0, - "finish_reason": "stop" - }] - }); - - let message = response_to_message(response)?; - assert_eq!(message.content.len(), 2); - - if let MessageContent::RedactedThinking(redacted) = &message.content[0] { - assert_eq!(redacted.data, "E23sQFCkYIARgCKkATCHitsdf327Ber3v4NYUq2"); - } else { - panic!("Expected RedactedThinking content"); - } - - if let MessageContent::Text(text) = &message.content[1] { - assert_eq!(text.text, "Regular text content"); - } else { - panic!("Expected Text content"); - } - - Ok(()) - } -} diff --git a/crates/goose-llm/src/providers/formats/mod.rs b/crates/goose-llm/src/providers/formats/mod.rs deleted file mode 100644 index cf929f39cc48..000000000000 --- a/crates/goose-llm/src/providers/formats/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod databricks; -pub mod openai; diff --git a/crates/goose-llm/src/providers/formats/openai.rs b/crates/goose-llm/src/providers/formats/openai.rs deleted file mode 100644 index a2eb43b414eb..000000000000 --- a/crates/goose-llm/src/providers/formats/openai.rs +++ /dev/null @@ -1,846 +0,0 @@ -use anyhow::{anyhow, Error}; -use serde_json::{json, Value}; - -use crate::{ - message::{Message, MessageContent}, - model::ModelConfig, - providers::{ - base::Usage, - errors::ProviderError, - utils::{convert_image, is_valid_function_name, sanitize_function_name, ImageFormat}, - }, - types::core::{Content, Role, Tool, ToolCall, ToolError}, -}; - -/// Convert internal Message format to OpenAI's API message specification -/// some openai compatible endpoints use the anthropic image spec at the content level -/// even though the message structure is otherwise following openai, the enum switches this -pub fn format_messages(messages: &[Message], image_format: &ImageFormat) -> Vec { - let mut messages_spec = Vec::new(); - for message in messages { - let mut converted = json!({ - "role": message.role - }); - - let mut output = Vec::new(); - - for content in message.content.iter() { - match content { - MessageContent::Text(text) => { - if !text.text.is_empty() { - converted["content"] = json!(text.text); - } - } - MessageContent::Image(image) => { - // Handle direct image content - converted["content"] = json!([convert_image(image, image_format)]); - } - MessageContent::Thinking(_) => { - // Thinking blocks are not directly used in OpenAI format - continue; - } - MessageContent::RedactedThinking(_) => { - // Redacted thinking blocks are not directly used in OpenAI format - continue; - } - MessageContent::ToolReq(request) => match &request.tool_call.as_result() { - Ok(tool_call) => { - let sanitized_name = sanitize_function_name(&tool_call.name); - let tool_calls = converted - .as_object_mut() - .unwrap() - .entry("tool_calls") - .or_insert(json!([])); - - tool_calls.as_array_mut().unwrap().push(json!({ - "id": request.id, - "type": "function", - "function": { - "name": sanitized_name, - "arguments": tool_call.arguments.to_string(), - } - })); - } - Err(e) => { - output.push(json!({ - "role": "tool", - "content": format!("Error: {}", e), - "tool_call_id": request.id - })); - } - }, - MessageContent::ToolResp(response) => { - match &response.tool_result.0 { - Ok(contents) => { - // Process all content, replacing images with placeholder text - let mut tool_content = Vec::new(); - let mut image_messages = Vec::new(); - - for content in contents { - match content { - Content::Image(image) => { - // Add placeholder text in the tool response - tool_content.push(Content::text("This tool result included an image that is uploaded in the next message.")); - - // Create a separate image message - image_messages.push(json!({ - "role": "user", - "content": [convert_image(image, image_format)] - })); - } - _ => { - tool_content.push(content.clone()); - } - } - } - let tool_response_content: Value = json!(tool_content - .iter() - .map(|content| match content { - Content::Text(text) => text.text.clone(), - _ => String::new(), - }) - .collect::>() - .join(" ")); - - // First add the tool response with all content - output.push(json!({ - "role": "tool", - "content": tool_response_content, - "tool_call_id": response.id - })); - // Then add any image messages that need to follow - output.extend(image_messages); - } - Err(e) => { - // A tool result error is shown as output so the model can interpret the error message - output.push(json!({ - "role": "tool", - "content": format!("The tool call returned the following error:\n{}", e), - "tool_call_id": response.id - })); - } - } - } - } - } - - if converted.get("content").is_some() || converted.get("tool_calls").is_some() { - output.insert(0, converted); - } - messages_spec.extend(output); - } - - messages_spec -} - -/// Convert internal Tool format to OpenAI's API tool specification -pub fn format_tools(tools: &[Tool]) -> anyhow::Result> { - let mut tool_names = std::collections::HashSet::new(); - let mut result = Vec::new(); - - for tool in tools { - if !tool_names.insert(&tool.name) { - return Err(anyhow!("Duplicate tool name: {}", tool.name)); - } - - let mut description = tool.description.clone(); - description.truncate(1024); - - // OpenAI's tool description max str len is 1024 - result.push(json!({ - "type": "function", - "function": { - "name": tool.name, - "description": description, - "parameters": tool.input_schema, - } - })); - } - - Ok(result) -} - -/// Convert OpenAI's API response to internal Message format -pub fn response_to_message(response: Value) -> anyhow::Result { - let original = response["choices"][0]["message"].clone(); - let mut content = Vec::new(); - - if let Some(text) = original.get("content") { - if let Some(text_str) = text.as_str() { - content.push(MessageContent::text(text_str)); - } - } - - if let Some(tool_calls) = original.get("tool_calls") { - if let Some(tool_calls_array) = tool_calls.as_array() { - for tool_call in tool_calls_array { - let id = tool_call["id"].as_str().unwrap_or_default().to_string(); - let function_name = tool_call["function"]["name"] - .as_str() - .unwrap_or_default() - .to_string(); - let mut arguments = tool_call["function"]["arguments"] - .as_str() - .unwrap_or_default() - .to_string(); - // If arguments is empty, we will have invalid json parsing error later. - if arguments.is_empty() { - arguments = "{}".to_string(); - } - - if !is_valid_function_name(&function_name) { - let error = ToolError::NotFound(format!( - "The provided function name '{}' had invalid characters, it must match this regex [a-zA-Z0-9_-]+", - function_name - )); - content.push(MessageContent::tool_request(id, Err(error).into())); - } else { - match serde_json::from_str::(&arguments) { - Ok(params) => { - content.push(MessageContent::tool_request( - id, - Ok(ToolCall::new(&function_name, params)).into(), - )); - } - Err(e) => { - let error = ToolError::InvalidParameters(format!( - "Could not interpret tool use parameters for id {}: {}", - id, e - )); - content.push(MessageContent::tool_request(id, Err(error).into())); - } - } - } - } - } - } - - Ok(Message { - role: Role::Assistant, - created: chrono::Utc::now().timestamp_millis(), - content: content.into(), - }) -} - -pub fn get_usage(data: &Value) -> Result { - let usage = data - .get("usage") - .ok_or_else(|| ProviderError::UsageError("No usage data in response".to_string()))?; - - let input_tokens = usage - .get("prompt_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32); - - let output_tokens = usage - .get("completion_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32); - - let total_tokens = usage - .get("total_tokens") - .and_then(|v| v.as_i64()) - .map(|v| v as i32) - .or_else(|| match (input_tokens, output_tokens) { - (Some(input), Some(output)) => Some(input + output), - _ => None, - }); - - Ok(Usage::new(input_tokens, output_tokens, total_tokens)) -} - -/// Validates and fixes tool schemas to ensure they have proper parameter structure. -/// If parameters exist, ensures they have properties and required fields, or removes parameters entirely. -pub fn validate_tool_schemas(tools: &mut [Value]) { - for tool in tools.iter_mut() { - if let Some(function) = tool.get_mut("function") { - if let Some(parameters) = function.get_mut("parameters") { - if parameters.is_object() { - ensure_valid_json_schema(parameters); - } - } - } - } -} - -/// Ensures that the given JSON value follows the expected JSON Schema structure. -fn ensure_valid_json_schema(schema: &mut Value) { - if let Some(params_obj) = schema.as_object_mut() { - // Check if this is meant to be an object type schema - let is_object_type = params_obj - .get("type") - .and_then(|t| t.as_str()) - .is_none_or(|t| t == "object"); // Default to true if no type is specified - - // Only apply full schema validation to object types - if is_object_type { - // Ensure required fields exist with default values - params_obj.entry("properties").or_insert_with(|| json!({})); - params_obj.entry("required").or_insert_with(|| json!([])); - params_obj.entry("type").or_insert_with(|| json!("object")); - - // Recursively validate properties if it exists - if let Some(properties) = params_obj.get_mut("properties") { - if let Some(properties_obj) = properties.as_object_mut() { - for (_key, prop) in properties_obj.iter_mut() { - if prop.is_object() - && prop.get("type").and_then(|t| t.as_str()) == Some("object") - { - ensure_valid_json_schema(prop); - } - } - } - } - } - } -} - -pub fn create_request( - model_config: &ModelConfig, - system: &str, - messages: &[Message], - tools: &[Tool], - image_format: &ImageFormat, -) -> anyhow::Result { - if model_config.model_name.starts_with("o1-mini") { - return Err(anyhow!( - "o1-mini model is not currently supported since Goose uses tool calling and o1-mini does not support it. Please use o1 or o3 models instead." - )); - } - - let is_ox_model = model_config.model_name.starts_with("o"); - - // Only extract reasoning effort for O1/O3 models - let (model_name, reasoning_effort) = if is_ox_model { - let parts: Vec<&str> = model_config.model_name.split('-').collect(); - let last_part = parts.last().unwrap(); - - match *last_part { - "low" | "medium" | "high" => { - let base_name = parts[..parts.len() - 1].join("-"); - (base_name, Some(last_part.to_string())) - } - _ => ( - model_config.model_name.to_string(), - Some("medium".to_string()), - ), - } - } else { - // For non-O family models, use the model name as is and no reasoning effort - (model_config.model_name.to_string(), None) - }; - - let system_message = json!({ - "role": if is_ox_model { "developer" } else { "system" }, - "content": system - }); - - let messages_spec = format_messages(messages, image_format); - let mut tools_spec = if !tools.is_empty() { - format_tools(tools)? - } else { - vec![] - }; - - // Validate tool schemas - validate_tool_schemas(&mut tools_spec); - - let mut messages_array = vec![system_message]; - messages_array.extend(messages_spec); - - let mut payload = json!({ - "model": model_name, - "messages": messages_array - }); - - if let Some(effort) = reasoning_effort { - payload - .as_object_mut() - .unwrap() - .insert("reasoning_effort".to_string(), json!(effort)); - } - - if !tools_spec.is_empty() { - payload - .as_object_mut() - .unwrap() - .insert("tools".to_string(), json!(tools_spec)); - } - // o1, o3 models currently don't support temperature - if !is_ox_model { - if let Some(temp) = model_config.temperature { - payload - .as_object_mut() - .unwrap() - .insert("temperature".to_string(), json!(temp)); - } - } - - // o1 models use max_completion_tokens instead of max_tokens - if let Some(tokens) = model_config.max_tokens { - let key = if is_ox_model { - "max_completion_tokens" - } else { - "max_tokens" - }; - payload - .as_object_mut() - .unwrap() - .insert(key.to_string(), json!(tokens)); - } - Ok(payload) -} - -#[cfg(test)] -mod tests { - use serde_json::json; - - use super::*; - use crate::types::core::Content; - - #[test] - fn test_validate_tool_schemas() { - // Test case 1: Empty parameters object - // Input JSON with an incomplete parameters object - let mut actual = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object" - } - } - })]; - - // Run the function to validate and update schemas - validate_tool_schemas(&mut actual); - - // Expected JSON after validation - let expected = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object", - "properties": {}, - "required": [] - } - } - })]; - - // Compare entire JSON structures instead of individual fields - assert_eq!(actual, expected); - - // Test case 2: Missing type field - let mut tools = vec![json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "properties": {} - } - } - })]; - - validate_tool_schemas(&mut tools); - - let params = tools[0]["function"]["parameters"].as_object().unwrap(); - assert_eq!(params["type"], "object"); - - // Test case 3: Complete valid schema should remain unchanged - let original_schema = json!({ - "type": "function", - "function": { - "name": "test_func", - "description": "test description", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "City and country" - } - }, - "required": ["location"] - } - } - }); - - let mut tools = vec![original_schema.clone()]; - validate_tool_schemas(&mut tools); - assert_eq!(tools[0], original_schema); - } - - const OPENAI_TOOL_USE_RESPONSE: &str = r#"{ - "choices": [{ - "role": "assistant", - "message": { - "tool_calls": [{ - "id": "1", - "function": { - "name": "example_fn", - "arguments": "{\"param\": \"value\"}" - } - }] - } - }], - "usage": { - "input_tokens": 10, - "output_tokens": 25, - "total_tokens": 35 - } - }"#; - - #[test] - fn test_format_messages() -> anyhow::Result<()> { - let message = Message::user().with_text("Hello"); - let spec = format_messages(&[message], &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 1); - assert_eq!(spec[0]["role"], "user"); - assert_eq!(spec[0]["content"], "Hello"); - Ok(()) - } - - #[test] - fn test_format_tools() -> anyhow::Result<()> { - let tool = Tool::new( - "test_tool", - "A test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let spec = format_tools(&[tool])?; - - assert_eq!(spec.len(), 1); - assert_eq!(spec[0]["type"], "function"); - assert_eq!(spec[0]["function"]["name"], "test_tool"); - Ok(()) - } - - #[test] - fn test_format_messages_complex() -> anyhow::Result<()> { - let mut messages = vec![ - Message::assistant().with_text("Hello!"), - Message::user().with_text("How are you?"), - Message::assistant().with_tool_request( - "tool1", - Ok(ToolCall::new("example", json!({"param1": "value1"}))), - ), - ]; - - // Get the ID from the tool request to use in the response - let tool_id = if let MessageContent::ToolReq(request) = &messages[2].content[0] { - request.id.clone() - } else { - panic!("should be tool request"); - }; - - messages.push( - Message::user().with_tool_response(tool_id, Ok(vec![Content::text("Result")]).into()), - ); - - let spec = format_messages(&messages, &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 4); - assert_eq!(spec[0]["role"], "assistant"); - assert_eq!(spec[0]["content"], "Hello!"); - assert_eq!(spec[1]["role"], "user"); - assert_eq!(spec[1]["content"], "How are you?"); - assert_eq!(spec[2]["role"], "assistant"); - assert!(spec[2]["tool_calls"].is_array()); - assert_eq!(spec[3]["role"], "tool"); - assert_eq!(spec[3]["content"], "Result"); - assert_eq!(spec[3]["tool_call_id"], spec[2]["tool_calls"][0]["id"]); - - Ok(()) - } - - #[test] - fn test_format_messages_multiple_content() -> anyhow::Result<()> { - let mut messages = vec![Message::assistant().with_tool_request( - "tool1", - Ok(ToolCall::new("example", json!({"param1": "value1"}))), - )]; - - // Get the ID from the tool request to use in the response - let tool_id = if let MessageContent::ToolReq(request) = &messages[0].content[0] { - request.id.clone() - } else { - panic!("should be tool request"); - }; - - messages.push( - Message::user().with_tool_response(tool_id, Ok(vec![Content::text("Result")]).into()), - ); - - let spec = format_messages(&messages, &ImageFormat::OpenAi); - - assert_eq!(spec.len(), 2); - assert_eq!(spec[0]["role"], "assistant"); - assert!(spec[0]["tool_calls"].is_array()); - assert_eq!(spec[1]["role"], "tool"); - assert_eq!(spec[1]["content"], "Result"); - assert_eq!(spec[1]["tool_call_id"], spec[0]["tool_calls"][0]["id"]); - - Ok(()) - } - - #[test] - fn test_format_tools_duplicate() -> anyhow::Result<()> { - let tool1 = Tool::new( - "test_tool", - "Test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let tool2 = Tool::new( - "test_tool", - "Test tool", - json!({ - "type": "object", - "properties": { - "input": { - "type": "string", - "description": "Test parameter" - } - }, - "required": ["input"] - }), - ); - - let result = format_tools(&[tool1, tool2]); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Duplicate tool name")); - - Ok(()) - } - - #[test] - fn test_format_tools_empty() -> anyhow::Result<()> { - let spec = format_tools(&[])?; - assert!(spec.is_empty()); - Ok(()) - } - - #[test] - fn test_response_to_message_text() -> anyhow::Result<()> { - let response = json!({ - "choices": [{ - "role": "assistant", - "message": { - "content": "Hello from John Cena!" - } - }], - "usage": { - "input_tokens": 10, - "output_tokens": 25, - "total_tokens": 35 - } - }); - - let message = response_to_message(response)?; - assert_eq!(message.content.len(), 1); - if let MessageContent::Text(text) = &message.content[0] { - assert_eq!(text.text, "Hello from John Cena!"); - } else { - panic!("Expected Text content"); - } - assert!(matches!(message.role, Role::Assistant)); - - Ok(()) - } - - #[test] - fn test_response_to_message_valid_toolrequest() -> anyhow::Result<()> { - let response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - let message = response_to_message(response)?; - - assert_eq!(message.content.len(), 1); - if let MessageContent::ToolReq(request) = &message.content[0] { - let tool_call = request.tool_call.as_ref().unwrap(); - assert_eq!(tool_call.name, "example_fn"); - assert_eq!(tool_call.arguments, json!({"param": "value"})); - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_invalid_func_name() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["name"] = - json!("invalid fn"); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - match &request.tool_call.as_result() { - Err(ToolError::NotFound(msg)) => { - assert!(msg.starts_with("The provided function name")); - } - _ => panic!("Expected ToolNotFound error"), - } - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_json_decode_error() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = - json!("invalid json {"); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - match &request.tool_call.as_result() { - Err(ToolError::InvalidParameters(msg)) => { - assert!(msg.starts_with("Could not interpret tool use parameters")); - } - _ => panic!("Expected InvalidParameters error"), - } - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_response_to_message_empty_argument() -> anyhow::Result<()> { - let mut response: Value = serde_json::from_str(OPENAI_TOOL_USE_RESPONSE)?; - response["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] = - serde_json::Value::String("".to_string()); - - let message = response_to_message(response)?; - - if let MessageContent::ToolReq(request) = &message.content[0] { - let tool_call = request.tool_call.as_ref().unwrap(); - assert_eq!(tool_call.name, "example_fn"); - assert_eq!(tool_call.arguments, json!({})); - } else { - panic!("Expected ToolRequest content"); - } - - Ok(()) - } - - #[test] - fn test_create_request_gpt_4o() -> anyhow::Result<()> { - // Test default medium reasoning effort for O3 model - let model_config = ModelConfig { - model_name: "gpt-4o".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "gpt-4o", - "messages": [ - { - "role": "system", - "content": "system" - } - ], - "max_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } - - #[test] - fn test_create_request_o1_default() -> anyhow::Result<()> { - // Test default medium reasoning effort for O1 model - let model_config = ModelConfig { - model_name: "o1".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "o1", - "messages": [ - { - "role": "developer", - "content": "system" - } - ], - "reasoning_effort": "medium", - "max_completion_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } - - #[test] - fn test_create_request_o3_custom_reasoning_effort() -> anyhow::Result<()> { - // Test custom reasoning effort for O3 model - let model_config = ModelConfig { - model_name: "o3-mini-high".to_string(), - context_limit: Some(4096), - temperature: None, - max_tokens: Some(1024), - }; - let request = create_request(&model_config, "system", &[], &[], &ImageFormat::OpenAi)?; - let obj = request.as_object().unwrap(); - let expected = json!({ - "model": "o3-mini", - "messages": [ - { - "role": "developer", - "content": "system" - } - ], - "reasoning_effort": "high", - "max_completion_tokens": 1024 - }); - - for (key, value) in expected.as_object().unwrap() { - assert_eq!(obj.get(key).unwrap(), value); - } - - Ok(()) - } -} diff --git a/crates/goose-llm/src/providers/mod.rs b/crates/goose-llm/src/providers/mod.rs deleted file mode 100644 index c808938048f9..000000000000 --- a/crates/goose-llm/src/providers/mod.rs +++ /dev/null @@ -1,10 +0,0 @@ -pub mod base; -pub mod databricks; -pub mod errors; -mod factory; -pub mod formats; -pub mod openai; -pub mod utils; - -pub use base::{Provider, ProviderCompleteResponse, ProviderExtractResponse, Usage}; -pub use factory::create; diff --git a/crates/goose-llm/src/providers/openai.rs b/crates/goose-llm/src/providers/openai.rs deleted file mode 100644 index 82d736f366cf..000000000000 --- a/crates/goose-llm/src/providers/openai.rs +++ /dev/null @@ -1,233 +0,0 @@ -use std::{collections::HashMap, time::Duration}; - -use anyhow::Result; -use async_trait::async_trait; -use reqwest::Client; -use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; - -use super::{ - errors::ProviderError, - formats::openai::{create_request, get_usage, response_to_message}, - utils::{emit_debug_trace, get_env, get_model, handle_response_openai_compat, ImageFormat}, -}; -use crate::{ - message::Message, - model::ModelConfig, - providers::{Provider, ProviderCompleteResponse, ProviderExtractResponse, Usage}, - types::core::Tool, -}; - -pub const OPEN_AI_DEFAULT_MODEL: &str = "gpt-4o"; -pub const _OPEN_AI_KNOWN_MODELS: &[&str] = &["gpt-4o", "gpt-4.1", "o1", "o3", "o4-mini"]; - -fn default_timeout() -> u64 { - 60 -} - -fn default_base_path() -> String { - "v1/chat/completions".to_string() -} - -fn default_host() -> String { - "https://api.openai.com".to_string() -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OpenAiProviderConfig { - pub api_key: String, - #[serde(default = "default_host")] - pub host: String, - #[serde(default)] - pub organization: Option, - #[serde(default = "default_base_path")] - pub base_path: String, - #[serde(default)] - pub project: Option, - #[serde(default)] - pub custom_headers: Option>, - #[serde(default = "default_timeout")] - pub timeout: u64, // timeout in seconds -} - -impl OpenAiProviderConfig { - pub fn new(api_key: String) -> Self { - Self { - api_key, - host: default_host(), - organization: None, - base_path: default_base_path(), - project: None, - custom_headers: None, - timeout: 600, - } - } - - pub fn from_env() -> Self { - let api_key = get_env("OPENAI_API_KEY").expect("Missing OPENAI_API_KEY"); - Self::new(api_key) - } -} - -#[derive(Debug)] -pub struct OpenAiProvider { - config: OpenAiProviderConfig, - model: ModelConfig, - client: Client, -} - -impl OpenAiProvider { - pub fn from_env(model: ModelConfig) -> Self { - let config = OpenAiProviderConfig::from_env(); - OpenAiProvider::from_config(config, model).expect("Failed to initialize OpenAiProvider") - } -} - -impl Default for OpenAiProvider { - fn default() -> Self { - let config = OpenAiProviderConfig::from_env(); - let model = ModelConfig::new(OPEN_AI_DEFAULT_MODEL.to_string()); - OpenAiProvider::from_config(config, model).expect("Failed to initialize OpenAiProvider") - } -} - -impl OpenAiProvider { - pub fn from_config(config: OpenAiProviderConfig, model: ModelConfig) -> Result { - let client = Client::builder() - .timeout(Duration::from_secs(config.timeout)) - .build()?; - - Ok(Self { - config, - model, - client, - }) - } - - async fn post(&self, payload: Value) -> Result { - let base_url = url::Url::parse(&self.config.host) - .map_err(|e| ProviderError::RequestFailed(format!("Invalid base URL: {e}")))?; - let url = base_url.join(&self.config.base_path).map_err(|e| { - ProviderError::RequestFailed(format!("Failed to construct endpoint URL: {e}")) - })?; - - let mut request = self - .client - .post(url) - .header("Authorization", format!("Bearer {}", self.config.api_key)); - - // Add organization header if present - if let Some(org) = &self.config.organization { - request = request.header("OpenAI-Organization", org); - } - - // Add project header if present - if let Some(project) = &self.config.project { - request = request.header("OpenAI-Project", project); - } - - if let Some(custom_headers) = &self.config.custom_headers { - for (key, value) in custom_headers { - request = request.header(key, value); - } - } - - let response = request.json(&payload).send().await?; - - handle_response_openai_compat(response).await - } -} - -#[async_trait] -impl Provider for OpenAiProvider { - #[tracing::instrument( - skip(self, system, messages, tools), - fields(model_config, input, output, input_tokens, output_tokens, total_tokens) - )] - async fn complete( - &self, - system: &str, - messages: &[Message], - tools: &[Tool], - _request_id: Option<&str>, // OpenAI doesn't use request_id, so we ignore it - ) -> Result { - let payload = create_request(&self.model, system, messages, tools, &ImageFormat::OpenAi)?; - - // Make request - let response = self.post(payload.clone()).await?; - - // Parse response - let message = response_to_message(response.clone())?; - let usage = match get_usage(&response) { - Ok(usage) => usage, - Err(ProviderError::UsageError(e)) => { - tracing::debug!("Failed to get usage data: {}", e); - Usage::default() - } - Err(e) => return Err(e), - }; - let model = get_model(&response); - emit_debug_trace(&self.model, &payload, &response, &usage); - Ok(ProviderCompleteResponse::new(message, model, usage)) - } - - async fn extract( - &self, - system: &str, - messages: &[Message], - schema: &Value, - _request_id: Option<&str>, // OpenAI doesn't use request_id, so we ignore it - ) -> Result { - // 1. Build base payload (no tools) - let mut payload = create_request(&self.model, system, messages, &[], &ImageFormat::OpenAi)?; - - // 2. Inject strict JSON‐Schema wrapper - payload - .as_object_mut() - .expect("payload must be an object") - .insert( - "response_format".to_string(), - json!({ - "type": "json_schema", - "json_schema": { - "name": "extraction", - "schema": schema, - "strict": true - } - }), - ); - - // 3. Call OpenAI - let response = self.post(payload.clone()).await?; - - // 4. Extract the assistant’s `content` and parse it into JSON - let msg = &response["choices"][0]["message"]; - let raw = msg.get("content").cloned().ok_or_else(|| { - ProviderError::ResponseParseError("Missing content in extract response".into()) - })?; - let data = match raw { - Value::String(s) => serde_json::from_str(&s) - .map_err(|e| ProviderError::ResponseParseError(format!("Invalid JSON: {}", e)))?, - Value::Object(_) | Value::Array(_) => raw, - other => { - return Err(ProviderError::ResponseParseError(format!( - "Unexpected content type: {:?}", - other - ))) - } - }; - - // 5. Gather usage & model info - let usage = match get_usage(&response) { - Ok(u) => u, - Err(ProviderError::UsageError(e)) => { - tracing::debug!("Failed to get usage in extract: {}", e); - Usage::default() - } - Err(e) => return Err(e), - }; - let model = get_model(&response); - - Ok(ProviderExtractResponse::new(data, model, usage)) - } -} diff --git a/crates/goose-llm/src/providers/utils.rs b/crates/goose-llm/src/providers/utils.rs deleted file mode 100644 index b6c00e7bf237..000000000000 --- a/crates/goose-llm/src/providers/utils.rs +++ /dev/null @@ -1,260 +0,0 @@ -use std::{env, io::Read, path::Path}; - -use anyhow::Result; -use base64::Engine; -use regex::Regex; -use reqwest::{Response, StatusCode}; -use serde::{Deserialize, Serialize}; -use serde_json::{from_value, json, Value}; - -use super::base::Usage; -use crate::{ - model::ModelConfig, - providers::errors::{OpenAIError, ProviderError}, - types::core::ImageContent, -}; - -#[derive(serde::Deserialize)] -struct OpenAIErrorResponse { - error: OpenAIError, -} - -#[derive(Debug, Copy, Clone, Serialize, Deserialize, Default)] -pub enum ImageFormat { - #[default] - OpenAi, - Anthropic, -} - -/// Timeout in seconds. -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct Timeout(u32); -impl Default for Timeout { - fn default() -> Self { - Timeout(60) - } -} - -/// Convert an image content into an image json based on format -pub fn convert_image(image: &ImageContent, image_format: &ImageFormat) -> Value { - match image_format { - ImageFormat::OpenAi => json!({ - "type": "image_url", - "image_url": { - "url": format!("data:{};base64,{}", image.mime_type, image.data) - } - }), - ImageFormat::Anthropic => json!({ - "type": "image", - "source": { - "type": "base64", - "media_type": image.mime_type, - "data": image.data, - } - }), - } -} - -/// Handle response from OpenAI compatible endpoints -/// Error codes: https://platform.openai.com/docs/guides/error-codes -/// Context window exceeded: https://community.openai.com/t/help-needed-tackling-context-length-limits-in-openai-models/617543 -pub async fn handle_response_openai_compat(response: Response) -> Result { - let status = response.status(); - // Try to parse the response body as JSON (if applicable) - let payload = match response.json::().await { - Ok(json) => json, - Err(e) => return Err(ProviderError::RequestFailed(e.to_string())), - }; - - match status { - StatusCode::OK => Ok(payload), - StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => { - Err(ProviderError::Authentication(format!( - "Authentication failed. Please ensure your API keys are valid and have the required permissions. \ - Status: {}. Response: {:?}", - status, payload - ))) - } - StatusCode::BAD_REQUEST | StatusCode::NOT_FOUND => { - tracing::debug!( - "{}", - format!( - "Provider request failed with status: {}. Payload: {:?}", - status, payload - ) - ); - if let Ok(err_resp) = from_value::(payload) { - let err = err_resp.error; - if err.is_context_length_exceeded() { - return Err(ProviderError::ContextLengthExceeded( - err.message.unwrap_or("Unknown error".to_string()), - )); - } - return Err(ProviderError::RequestFailed(format!( - "{} (status {})", - err, - status.as_u16() - ))); - } - Err(ProviderError::RequestFailed(format!( - "Unknown error (status {})", - status - ))) - } - StatusCode::TOO_MANY_REQUESTS => { - Err(ProviderError::RateLimitExceeded(format!("{:?}", payload))) - } - StatusCode::INTERNAL_SERVER_ERROR | StatusCode::SERVICE_UNAVAILABLE => { - Err(ProviderError::ServerError(format!("{:?}", payload))) - } - _ => { - tracing::debug!( - "{}", - format!( - "Provider request failed with status: {}. Payload: {:?}", - status, payload - ) - ); - Err(ProviderError::RequestFailed(format!( - "Request failed with status: {}", - status - ))) - } - } -} - -/// Get a secret from environment variables. The secret is expected to be in JSON format. -pub fn get_env(key: &str) -> Result { - // check environment variables (convert to uppercase) - let env_key = key.to_uppercase(); - if let Ok(val) = env::var(&env_key) { - let value: Value = serde_json::from_str(&val).unwrap_or(Value::String(val)); - Ok(serde_json::from_value(value)?) - } else { - Err(anyhow::anyhow!( - "Environment variable {} not found", - env_key - )) - } -} - -pub fn sanitize_function_name(name: &str) -> String { - let re = Regex::new(r"[^a-zA-Z0-9_-]").unwrap(); - re.replace_all(name, "_").to_string() -} - -pub fn is_valid_function_name(name: &str) -> bool { - let re = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap(); - re.is_match(name) -} - -/// Extract the model name from a JSON object. Common with most providers to have this top level attribute. -pub fn get_model(data: &Value) -> String { - if let Some(model) = data.get("model") { - if let Some(model_str) = model.as_str() { - model_str.to_string() - } else { - "Unknown".to_string() - } - } else { - "Unknown".to_string() - } -} - -/// Check if a file is actually an image by examining its magic bytes -fn is_image_file(path: &Path) -> bool { - if let Ok(mut file) = std::fs::File::open(path) { - let mut buffer = [0u8; 8]; // Large enough for most image magic numbers - if file.read(&mut buffer).is_ok() { - // Check magic numbers for common image formats - return match &buffer[0..4] { - // PNG: 89 50 4E 47 - [0x89, 0x50, 0x4E, 0x47] => true, - // JPEG: FF D8 FF - [0xFF, 0xD8, 0xFF, _] => true, - // GIF: 47 49 46 38 - [0x47, 0x49, 0x46, 0x38] => true, - _ => false, - }; - } - } - false -} - -/// Convert a local image file to base64 encoded ImageContent -pub fn load_image_file(path: &str) -> Result { - let path = Path::new(path); - - // Verify it's an image before proceeding - if !is_image_file(path) { - return Err(ProviderError::RequestFailed( - "File is not a valid image".to_string(), - )); - } - - // Read the file - let bytes = std::fs::read(path) - .map_err(|e| ProviderError::RequestFailed(format!("Failed to read image file: {}", e)))?; - - // Detect mime type from extension - let mime_type = match path.extension().and_then(|e| e.to_str()) { - Some(ext) => match ext.to_lowercase().as_str() { - "png" => "image/png", - "jpg" | "jpeg" => "image/jpeg", - _ => { - return Err(ProviderError::RequestFailed( - "Unsupported image format".to_string(), - )); - } - }, - None => { - return Err(ProviderError::RequestFailed( - "Unknown image format".to_string(), - )); - } - }; - - // Convert to base64 - let data = base64::prelude::BASE64_STANDARD.encode(&bytes); - - Ok(ImageContent { - mime_type: mime_type.to_string(), - data, - }) -} - -pub fn emit_debug_trace( - model_config: &ModelConfig, - payload: &Value, - response: &Value, - usage: &Usage, -) { - tracing::debug!( - model_config = %serde_json::to_string_pretty(model_config).unwrap_or_default(), - input = %serde_json::to_string_pretty(payload).unwrap_or_default(), - output = %serde_json::to_string_pretty(response).unwrap_or_default(), - input_tokens = ?usage.input_tokens.unwrap_or_default(), - output_tokens = ?usage.output_tokens.unwrap_or_default(), - total_tokens = ?usage.total_tokens.unwrap_or_default(), - ); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_sanitize_function_name() { - assert_eq!(sanitize_function_name("hello-world"), "hello-world"); - assert_eq!(sanitize_function_name("hello world"), "hello_world"); - assert_eq!(sanitize_function_name("hello@world"), "hello_world"); - } - - #[test] - fn test_is_valid_function_name() { - assert!(is_valid_function_name("hello-world")); - assert!(is_valid_function_name("hello_world")); - assert!(!is_valid_function_name("hello world")); - assert!(!is_valid_function_name("hello@world")); - } -} diff --git a/crates/goose-llm/src/structured_outputs.rs b/crates/goose-llm/src/structured_outputs.rs deleted file mode 100644 index b6690b641e74..000000000000 --- a/crates/goose-llm/src/structured_outputs.rs +++ /dev/null @@ -1,32 +0,0 @@ -use crate::{ - providers::{create, errors::ProviderError, ProviderExtractResponse}, - types::json_value_ffi::JsonValueFfi, - Message, ModelConfig, -}; - -/// Generates a structured output based on the provided schema, -/// system prompt and user messages. -#[uniffi::export(async_runtime = "tokio", default(request_id = None))] -pub async fn generate_structured_outputs( - provider_name: &str, - provider_config: JsonValueFfi, - system_prompt: &str, - messages: &[Message], - schema: JsonValueFfi, - request_id: Option, -) -> Result { - // Use OpenAI models specifically for this task - let model_name = if provider_name == "databricks" { - "goose-gpt-4-1" - } else { - "gpt-4.1" - }; - let model_cfg = ModelConfig::new(model_name.to_string()).with_temperature(Some(0.0)); - let provider = create(provider_name, provider_config, model_cfg)?; - - let resp = provider - .extract(system_prompt, messages, &schema, request_id.as_deref()) - .await?; - - Ok(resp) -} diff --git a/crates/goose-llm/src/types/completion.rs b/crates/goose-llm/src/types/completion.rs deleted file mode 100644 index ce54f6075ed2..000000000000 --- a/crates/goose-llm/src/types/completion.rs +++ /dev/null @@ -1,247 +0,0 @@ -// This file defines types for completion interfaces, including the request and response structures. -// Many of these are adapted based on the Goose Service API: -// https://docs.google.com/document/d/1r5vjSK3nBQU1cIRf0WKysDigqMlzzrzl_bxEE4msOiw/edit?tab=t.0 - -use std::collections::HashMap; -use thiserror::Error; - -use serde::{Deserialize, Serialize}; - -use crate::types::json_value_ffi::JsonValueFfi; -use crate::{message::Message, providers::Usage}; -use crate::{model::ModelConfig, providers::errors::ProviderError}; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CompletionRequest { - pub provider_name: String, - pub provider_config: serde_json::Value, - pub model_config: ModelConfig, - pub system_preamble: Option, - pub system_prompt_override: Option, - pub messages: Vec, - pub extensions: Vec, - pub request_id: Option, -} - -impl CompletionRequest { - pub fn new( - provider_name: String, - provider_config: serde_json::Value, - model_config: ModelConfig, - system_preamble: Option, - system_prompt_override: Option, - messages: Vec, - extensions: Vec, - ) -> Self { - Self { - provider_name, - provider_config, - model_config, - system_prompt_override, - system_preamble, - messages, - extensions, - request_id: None, - } - } - - pub fn with_request_id(mut self, request_id: String) -> Self { - self.request_id = Some(request_id); - self - } -} - -#[allow(clippy::too_many_arguments)] -#[uniffi::export(default(system_preamble = None, system_prompt_override = None))] -pub fn create_completion_request( - provider_name: &str, - provider_config: JsonValueFfi, - model_config: ModelConfig, - system_preamble: Option, - system_prompt_override: Option, - messages: Vec, - extensions: Vec, - request_id: Option, -) -> CompletionRequest { - let mut request = CompletionRequest::new( - provider_name.to_string(), - provider_config, - model_config, - system_preamble, - system_prompt_override, - messages, - extensions, - ); - - if let Some(req_id) = request_id { - request = request.with_request_id(req_id); - } - - request -} - -uniffi::custom_type!(CompletionRequest, String, { - lower: |tc: &CompletionRequest| { - serde_json::to_string(&tc).unwrap() - }, - try_lift: |s: String| { - Ok(serde_json::from_str(&s).unwrap()) - }, -}); - -// https://mozilla.github.io/uniffi-rs/latest/proc_macro/errors.html -#[derive(Debug, Error, uniffi::Error)] -#[uniffi(flat_error)] -pub enum CompletionError { - #[error("failed to create provider: {0}")] - UnknownProvider(String), - - #[error("provider error: {0}")] - Provider(#[from] ProviderError), - - #[error("template rendering error: {0}")] - Template(#[from] minijinja::Error), - - #[error("json serialization error: {0}")] - Json(#[from] serde_json::Error), - - #[error("tool not found error: {0}")] - ToolNotFound(String), -} - -#[derive(Debug, Clone, Serialize, Deserialize, uniffi::Record)] -pub struct CompletionResponse { - pub message: Message, - pub model: String, - pub usage: Usage, - pub runtime_metrics: RuntimeMetrics, -} - -impl CompletionResponse { - pub fn new( - message: Message, - model: String, - usage: Usage, - runtime_metrics: RuntimeMetrics, - ) -> Self { - Self { - message, - model, - usage, - runtime_metrics, - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, uniffi::Record)] -pub struct RuntimeMetrics { - pub total_time_sec: f32, - pub total_time_sec_provider: f32, - pub tokens_per_second: Option, -} - -impl RuntimeMetrics { - pub fn new( - total_time_sec: f32, - total_time_sec_provider: f32, - tokens_per_second: Option, - ) -> Self { - Self { - total_time_sec, - total_time_sec_provider, - tokens_per_second, - } - } -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Enum)] -pub enum ToolApprovalMode { - Auto, - Manual, - Smart, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -pub struct ToolConfig { - pub name: String, - pub description: String, - pub input_schema: JsonValueFfi, - pub approval_mode: ToolApprovalMode, -} - -impl ToolConfig { - pub fn new( - name: &str, - description: &str, - input_schema: JsonValueFfi, - approval_mode: ToolApprovalMode, - ) -> Self { - Self { - name: name.to_string(), - description: description.to_string(), - input_schema, - approval_mode, - } - } - - /// Convert the tool config to a core tool - pub fn to_core_tool(&self, name: Option<&str>) -> super::core::Tool { - let tool_name = name.unwrap_or(&self.name); - super::core::Tool::new( - tool_name, - self.description.clone(), - self.input_schema.clone(), - ) - } -} - -#[uniffi::export] -pub fn create_tool_config( - name: &str, - description: &str, - input_schema: JsonValueFfi, - approval_mode: ToolApprovalMode, -) -> ToolConfig { - ToolConfig::new(name, description, input_schema, approval_mode) -} - -// — Register the newtypes with UniFFI, converting via JSON strings — - -#[derive(Debug, Clone, Serialize, Deserialize, uniffi::Record)] -pub struct ExtensionConfig { - name: String, - instructions: Option, - tools: Vec, -} - -impl ExtensionConfig { - pub fn new(name: String, instructions: Option, tools: Vec) -> Self { - Self { - name, - instructions, - tools, - } - } - - /// Convert the tools to core tools with the extension name as a prefix - pub fn get_prefixed_tools(&self) -> Vec { - self.tools - .iter() - .map(|tool| { - let name = format!("{}__{}", self.name, tool.name); - tool.to_core_tool(Some(&name)) - }) - .collect() - } - - /// Get a map of prefixed tool names to their approval modes - pub fn get_prefixed_tool_configs(&self) -> HashMap { - self.tools - .iter() - .map(|tool| { - let name = format!("{}__{}", self.name, tool.name); - (name, tool.clone()) - }) - .collect() - } -} diff --git a/crates/goose-llm/src/types/core.rs b/crates/goose-llm/src/types/core.rs deleted file mode 100644 index 3e45d276041d..000000000000 --- a/crates/goose-llm/src/types/core.rs +++ /dev/null @@ -1,131 +0,0 @@ -// This file defines core types that require serialization to -// construct payloads for LLM model providers and work with MCPs. - -use serde::{Deserialize, Serialize}; -use thiserror::Error; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Enum)] -#[serde(rename_all = "lowercase")] -pub enum Role { - User, - Assistant, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Enum)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum Content { - Text(TextContent), - Image(ImageContent), -} - -impl Content { - pub fn text>(text: S) -> Self { - Content::Text(TextContent { text: text.into() }) - } - - pub fn image, T: Into>(data: S, mime_type: T) -> Self { - Content::Image(ImageContent { - data: data.into(), - mime_type: mime_type.into(), - }) - } - - /// Get the text content if this is a TextContent variant - pub fn as_text(&self) -> Option<&str> { - match self { - Content::Text(text) => Some(&text.text), - _ => None, - } - } - - /// Get the image content if this is an ImageContent variant - pub fn as_image(&self) -> Option<(&str, &str)> { - match self { - Content::Image(image) => Some((&image.data, &image.mime_type)), - _ => None, - } - } -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -#[serde(rename_all = "camelCase")] -pub struct TextContent { - pub text: String, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, uniffi::Record)] -#[serde(rename_all = "camelCase")] -pub struct ImageContent { - pub data: String, - pub mime_type: String, -} - -/// A tool that can be used by a model. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Tool { - /// The name of the tool - pub name: String, - /// A description of what the tool does - pub description: String, - /// A JSON Schema object defining the expected parameters for the tool - pub input_schema: serde_json::Value, -} - -impl Tool { - /// Create a new tool with the given name and description - pub fn new(name: N, description: D, input_schema: serde_json::Value) -> Self - where - N: Into, - D: Into, - { - Tool { - name: name.into(), - description: description.into(), - input_schema, - } - } -} - -/// A tool call request that an extension can execute -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ToolCall { - /// The name of the tool to execute - pub name: String, - /// The parameters for the execution - pub arguments: serde_json::Value, - /// Whether the tool call needs approval before execution. Default is false. - pub needs_approval: bool, -} - -impl ToolCall { - /// Create a new ToolUse with the given name and parameters - pub fn new>(name: S, arguments: serde_json::Value) -> Self { - Self { - name: name.into(), - arguments, - needs_approval: false, - } - } - - /// Set needs_approval field - pub fn set_needs_approval(&mut self, flag: bool) { - self.needs_approval = flag; - } -} - -#[non_exhaustive] -#[derive(Error, Debug, Clone, Deserialize, Serialize, PartialEq, uniffi::Error)] -pub enum ToolError { - #[error("Invalid parameters: {0}")] - InvalidParameters(String), - #[error("Execution failed: {0}")] - ExecutionError(String), - #[error("Schema error: {0}")] - SchemaError(String), - #[error("Tool not found: {0}")] - NotFound(String), -} - -pub type ToolResult = std::result::Result; diff --git a/crates/goose-llm/src/types/json_value_ffi.rs b/crates/goose-llm/src/types/json_value_ffi.rs deleted file mode 100644 index a2e44a34cfac..000000000000 --- a/crates/goose-llm/src/types/json_value_ffi.rs +++ /dev/null @@ -1,18 +0,0 @@ -use serde_json::Value; - -// `serde_json::Value` gets converted to a `String` to pass across the FFI. -// https://github.com/mozilla/uniffi-rs/blob/main/docs/manual/src/types/custom_types.md?plain=1 -// https://github.com/mozilla/uniffi-rs/blob/c7f6caa3d1bf20f934346cefd8e82b5093f0dc6f/examples/custom-types/src/lib.rs#L63-L69 - -uniffi::custom_type!(Value, String, { - // Remote is required since 'Value' is from a different crate - remote, - lower: |obj| { - serde_json::to_string(&obj).unwrap() - }, - try_lift: |val| { - Ok(serde_json::from_str(&val).unwrap() ) - }, -}); - -pub type JsonValueFfi = Value; diff --git a/crates/goose-llm/src/types/mod.rs b/crates/goose-llm/src/types/mod.rs deleted file mode 100644 index a2c2f35c598f..000000000000 --- a/crates/goose-llm/src/types/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod completion; -pub mod core; -pub mod json_value_ffi; diff --git a/crates/goose-llm/tests/extract_session_name.rs b/crates/goose-llm/tests/extract_session_name.rs deleted file mode 100644 index 7568fa640514..000000000000 --- a/crates/goose-llm/tests/extract_session_name.rs +++ /dev/null @@ -1,79 +0,0 @@ -use anyhow::Result; -use dotenvy::dotenv; -use goose_llm::extractors::generate_session_name; -use goose_llm::message::Message; -use goose_llm::providers::errors::ProviderError; - -fn should_run_test() -> Result<(), String> { - dotenv().ok(); - if std::env::var("DATABRICKS_HOST").is_err() { - return Err("Missing DATABRICKS_HOST".to_string()); - } - if std::env::var("DATABRICKS_TOKEN").is_err() { - return Err("Missing DATABRICKS_TOKEN".to_string()); - } - Ok(()) -} - -async fn _generate_session_name(messages: &[Message]) -> Result { - let provider_name = "databricks"; - let provider_config = serde_json::json!({ - "host": std::env::var("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"), - "token": std::env::var("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"), - }); - - generate_session_name(provider_name, provider_config, messages, None).await -} - -#[tokio::test] -async fn test_generate_session_name_success() { - if should_run_test().is_err() { - println!("Skipping..."); - return; - } - - // Build a few messages with at least two user messages - let messages = vec![ - Message::user().with_text("Hello, how are you?"), - Message::assistant().with_text("I'm fine, thanks!"), - Message::user().with_text("What's the weather in New York tomorrow?"), - ]; - - let name = _generate_session_name(&messages) - .await - .expect("Failed to generate session name"); - - println!("Generated session name: {:?}", name); - - // Should be non-empty and at most 4 words - let name = name.trim(); - assert!(!name.is_empty(), "Name must not be empty"); - let word_count = name.split_whitespace().count(); - assert!( - word_count <= 4, - "Name must be 4 words or less, got {}: {}", - word_count, - name - ) -} - -#[tokio::test] -async fn test_generate_session_name_no_user() { - if should_run_test().is_err() { - println!("Skipping 'test_generate_session_name_no_user'. Databricks creds not set"); - return; - } - - // No user messages → expect ExecutionError - let messages = vec![ - Message::assistant().with_text("System starting…"), - Message::assistant().with_text("All systems go."), - ]; - - let err = _generate_session_name(&messages).await; - assert!( - matches!(err, Err(ProviderError::ExecutionError(_))), - "Expected ExecutionError when there are no user messages, got: {:?}", - err - ); -} diff --git a/crates/goose-llm/tests/extract_tooltip.rs b/crates/goose-llm/tests/extract_tooltip.rs deleted file mode 100644 index c408c1b2c6a5..000000000000 --- a/crates/goose-llm/tests/extract_tooltip.rs +++ /dev/null @@ -1,88 +0,0 @@ -use anyhow::Result; -use dotenvy::dotenv; -use goose_llm::extractors::generate_tooltip; -use goose_llm::message::{Message, MessageContent, ToolRequest}; -use goose_llm::providers::errors::ProviderError; -use goose_llm::types::core::{Content, ToolCall}; -use serde_json::json; - -fn should_run_test() -> Result<(), String> { - dotenv().ok(); - if std::env::var("DATABRICKS_HOST").is_err() { - return Err("Missing DATABRICKS_HOST".to_string()); - } - if std::env::var("DATABRICKS_TOKEN").is_err() { - return Err("Missing DATABRICKS_TOKEN".to_string()); - } - Ok(()) -} - -async fn _generate_tooltip(messages: &[Message]) -> Result { - let provider_name = "databricks"; - let provider_config = serde_json::json!({ - "host": std::env::var("DATABRICKS_HOST").expect("Missing DATABRICKS_HOST"), - "token": std::env::var("DATABRICKS_TOKEN").expect("Missing DATABRICKS_TOKEN"), - }); - - generate_tooltip(provider_name, provider_config, messages, None).await -} - -#[tokio::test] -async fn test_generate_tooltip_simple() { - if should_run_test().is_err() { - println!("Skipping..."); - return; - } - - // Two plain-text messages - let messages = vec![ - Message::user().with_text("Hello, how are you?"), - Message::assistant().with_text("I'm fine, thanks! How can I help?"), - ]; - - let tooltip = _generate_tooltip(&messages) - .await - .expect("Failed to generate tooltip"); - println!("Generated tooltip: {:?}", tooltip); - - assert!(!tooltip.trim().is_empty(), "Tooltip must not be empty"); - assert!( - tooltip.len() < 100, - "Tooltip should be reasonably short (<100 chars)" - ); -} - -#[tokio::test] -async fn test_generate_tooltip_with_tools() { - if should_run_test().is_err() { - println!("Skipping..."); - return; - } - - // 1) Assistant message with a tool request - let mut tool_req_msg = Message::assistant(); - let req = ToolRequest { - id: "1".to_string(), - tool_call: Ok(ToolCall::new("get_time", json!({"timezone": "UTC"}))).into(), - }; - tool_req_msg.content.push(MessageContent::ToolReq(req)); - - // 2) User message with the tool response - let tool_resp_msg = Message::user().with_tool_response( - "1", - Ok(vec![Content::text("The current time is 12:00 UTC")]).into(), - ); - - let messages = vec![tool_req_msg, tool_resp_msg]; - - let tooltip = _generate_tooltip(&messages) - .await - .expect("Failed to generate tooltip"); - println!("Generated tooltip (tools): {:?}", tooltip); - - assert!(!tooltip.trim().is_empty(), "Tooltip must not be empty"); - assert!( - tooltip.len() < 100, - "Tooltip should be reasonably short (<100 chars)" - ); -} diff --git a/crates/goose-llm/tests/providers_complete.rs b/crates/goose-llm/tests/providers_complete.rs deleted file mode 100644 index 8d0bc3c243b6..000000000000 --- a/crates/goose-llm/tests/providers_complete.rs +++ /dev/null @@ -1,382 +0,0 @@ -use anyhow::Result; -use dotenvy::dotenv; -use goose_llm::message::{Message, MessageContent}; -use goose_llm::providers::base::Provider; -use goose_llm::providers::errors::ProviderError; -use goose_llm::providers::{databricks, openai}; -use goose_llm::types::core::{Content, Tool}; -use std::collections::HashMap; -use std::sync::Arc; -use std::sync::Mutex; - -#[derive(Debug, Clone, Copy)] -enum TestStatus { - Passed, - Skipped, - Failed, -} - -impl std::fmt::Display for TestStatus { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TestStatus::Passed => write!(f, "✅"), - TestStatus::Skipped => write!(f, "⏭️"), - TestStatus::Failed => write!(f, "❌"), - } - } -} - -struct TestReport { - results: Mutex>, -} - -impl TestReport { - fn new() -> Arc { - Arc::new(Self { - results: Mutex::new(HashMap::new()), - }) - } - - fn record_status(&self, provider: &str, status: TestStatus) { - let mut results = self.results.lock().unwrap(); - results.insert(provider.to_string(), status); - } - - fn record_pass(&self, provider: &str) { - self.record_status(provider, TestStatus::Passed); - } - - fn record_skip(&self, provider: &str) { - self.record_status(provider, TestStatus::Skipped); - } - - fn record_fail(&self, provider: &str) { - self.record_status(provider, TestStatus::Failed); - } - - fn print_summary(&self) { - println!("\n============== Providers =============="); - let results = self.results.lock().unwrap(); - let mut providers: Vec<_> = results.iter().collect(); - providers.sort_by(|a, b| a.0.cmp(b.0)); - - for (provider, status) in providers { - println!("{} {}", status, provider); - } - println!("=======================================\n"); - } -} - -lazy_static::lazy_static! { - static ref TEST_REPORT: Arc = TestReport::new(); - static ref ENV_LOCK: Mutex<()> = Mutex::new(()); -} - -/// Generic test harness for any Provider implementation -struct ProviderTester { - provider: Arc, - name: String, -} - -impl ProviderTester { - fn new(provider: T, name: String) -> Self { - Self { - provider: Arc::new(provider), - name, - } - } - - async fn test_basic_response(&self) -> Result<()> { - let message = Message::user().with_text("Just say hello!"); - - let response = self - .provider - .complete("You are a helpful assistant.", &[message], &[], None) - .await?; - - // For a basic response, we expect a single text response - assert_eq!( - response.message.content.len(), - 1, - "Expected single content item in response" - ); - - // Verify we got a text response - assert!( - matches!(response.message.content[0], MessageContent::Text(_)), - "Expected text response" - ); - - Ok(()) - } - - async fn test_tool_usage(&self) -> Result<()> { - let weather_tool = Tool::new( - "get_weather", - "Get the weather for a location", - serde_json::json!({ - "type": "object", - "required": ["location"], - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA" - } - } - }), - ); - - let message = Message::user().with_text("What's the weather like in San Francisco?"); - - let response1 = self - .provider - .complete( - "You are a helpful weather assistant.", - &[message.clone()], - &[weather_tool.clone()], - None, - ) - .await?; - - println!("=== {}::reponse1 ===", self.name); - dbg!(&response1); - println!("==================="); - - // Verify we got a tool request - assert!( - response1 - .message - .content - .iter() - .any(|content| matches!(content, MessageContent::ToolReq(_))), - "Expected tool request in response" - ); - - let id = &response1 - .message - .content - .iter() - .filter_map(|message| message.as_tool_request()) - .next_back() - .expect("got tool request") - .id; - - let weather = Message::user().with_tool_response( - id, - Ok(vec![Content::text( - " - 50°F°C - Precipitation: 0% - Humidity: 84% - Wind: 2 mph - Weather - Saturday 9:00 PM - Clear", - )]) - .into(), - ); - - // Verify we construct a valid payload including the request/response pair for the next inference - let response2 = self - .provider - .complete( - "You are a helpful weather assistant.", - &[message, response1.message, weather], - &[weather_tool], - None, - ) - .await?; - - println!("=== {}::reponse2 ===", self.name); - dbg!(&response2); - println!("==================="); - - assert!( - response2 - .message - .content - .iter() - .any(|content| matches!(content, MessageContent::Text(_))), - "Expected text for final response" - ); - - Ok(()) - } - - async fn test_context_length_exceeded_error(&self) -> Result<()> { - // Google Gemini has a really long context window - let large_message_content = if self.name.to_lowercase() == "google" { - "hello ".repeat(1_300_000) - } else { - "hello ".repeat(300_000) - }; - - let messages = vec![ - Message::user().with_text("hi there. what is 2 + 2?"), - Message::assistant().with_text("hey! I think it's 4."), - Message::user().with_text(&large_message_content), - Message::assistant().with_text("heyy!!"), - // Messages before this mark should be truncated - Message::user().with_text("what's the meaning of life?"), - Message::assistant().with_text("the meaning of life is 42"), - Message::user().with_text( - "did I ask you what's 2+2 in this message history? just respond with 'yes' or 'no'", - ), - ]; - - // Test that we get ProviderError::ContextLengthExceeded when the context window is exceeded - let result = self - .provider - .complete("You are a helpful assistant.", &messages, &[], None) - .await; - - // Print some debug info - println!("=== {}::context_length_exceeded_error ===", self.name); - dbg!(&result); - println!("==================="); - - // Ollama truncates by default even when the context window is exceeded - if self.name.to_lowercase() == "ollama" { - assert!( - result.is_ok(), - "Expected to succeed because of default truncation" - ); - return Ok(()); - } - - assert!( - result.is_err(), - "Expected error when context window is exceeded" - ); - assert!( - matches!(result.unwrap_err(), ProviderError::ContextLengthExceeded(_)), - "Expected error to be ContextLengthExceeded" - ); - - Ok(()) - } - - /// Run all provider tests - async fn run_test_suite(&self) -> Result<()> { - self.test_basic_response().await?; - self.test_tool_usage().await?; - self.test_context_length_exceeded_error().await?; - Ok(()) - } -} - -fn load_env() { - if let Ok(path) = dotenv() { - println!("Loaded environment from {:?}", path); - } -} - -/// Helper function to run a provider test with proper error handling and reporting -async fn test_provider( - name: &str, - required_vars: &[&str], - env_modifications: Option>>, - provider_fn: F, -) -> Result<()> -where - F: FnOnce() -> T, - T: Provider + Send + Sync + 'static, -{ - // We start off as failed, so that if the process panics it is seen as a failure - TEST_REPORT.record_fail(name); - - // Take exclusive access to environment modifications - let lock = ENV_LOCK.lock().unwrap(); - - load_env(); - - // Save current environment state for required vars and modified vars - let mut original_env = HashMap::new(); - for &var in required_vars { - if let Ok(val) = std::env::var(var) { - original_env.insert(var, val); - } - } - if let Some(mods) = &env_modifications { - for &var in mods.keys() { - if let Ok(val) = std::env::var(var) { - original_env.insert(var, val); - } - } - } - - // Apply any environment modifications - if let Some(mods) = &env_modifications { - for (&var, value) in mods.iter() { - match value { - Some(val) => std::env::set_var(var, val), - None => std::env::remove_var(var), - } - } - } - - // Setup the provider - let missing_vars = required_vars.iter().any(|var| std::env::var(var).is_err()); - if missing_vars { - println!("Skipping {} tests - credentials not configured", name); - TEST_REPORT.record_skip(name); - return Ok(()); - } - - let provider = provider_fn(); - - // Restore original environment - for (&var, value) in original_env.iter() { - std::env::set_var(var, value); - } - if let Some(mods) = env_modifications { - for &var in mods.keys() { - if !original_env.contains_key(var) { - std::env::remove_var(var); - } - } - } - - std::mem::drop(lock); - - let tester = ProviderTester::new(provider, name.to_string()); - match tester.run_test_suite().await { - Ok(_) => { - TEST_REPORT.record_pass(name); - Ok(()) - } - Err(e) => { - println!("{} test failed: {}", name, e); - TEST_REPORT.record_fail(name); - Err(e) - } - } -} - -#[tokio::test] -async fn openai_complete() -> Result<()> { - test_provider( - "OpenAI", - &["OPENAI_API_KEY"], - None, - openai::OpenAiProvider::default, - ) - .await -} - -#[tokio::test] -async fn databricks_complete() -> Result<()> { - test_provider( - "Databricks", - &["DATABRICKS_HOST", "DATABRICKS_TOKEN"], - None, - databricks::DatabricksProvider::default, - ) - .await -} - -// Print the final test report -#[ctor::dtor] -fn print_test_report() { - TEST_REPORT.print_summary(); -} diff --git a/crates/goose-llm/tests/providers_extract.rs b/crates/goose-llm/tests/providers_extract.rs deleted file mode 100644 index 60f75db86c4e..000000000000 --- a/crates/goose-llm/tests/providers_extract.rs +++ /dev/null @@ -1,195 +0,0 @@ -// tests/providers_extract.rs - -use anyhow::Result; -use dotenvy::dotenv; -use goose_llm::message::Message; -use goose_llm::providers::base::Provider; -use goose_llm::providers::{databricks::DatabricksProvider, openai::OpenAiProvider}; -use goose_llm::ModelConfig; -use serde_json::{json, Value}; -use std::sync::Arc; - -#[derive(Debug, PartialEq, Copy, Clone)] -enum ProviderType { - OpenAi, - Databricks, -} - -impl ProviderType { - fn required_env(&self) -> &'static [&'static str] { - match self { - ProviderType::OpenAi => &["OPENAI_API_KEY"], - ProviderType::Databricks => &["DATABRICKS_HOST", "DATABRICKS_TOKEN"], - } - } - - fn create_provider(&self, cfg: ModelConfig) -> Result> { - Ok(match self { - ProviderType::OpenAi => Arc::new(OpenAiProvider::from_env(cfg)), - ProviderType::Databricks => Arc::new(DatabricksProvider::from_env(cfg)), - }) - } -} - -fn check_required_env_vars(required: &[&str]) -> bool { - let missing: Vec<_> = required - .iter() - .filter(|&&v| std::env::var(v).is_err()) - .cloned() - .collect(); - if !missing.is_empty() { - println!("Skipping test; missing env vars: {:?}", missing); - false - } else { - true - } -} - -// --- Shared inputs for "paper" task --- -const PAPER_SYSTEM: &str = - "You are an expert at structured data extraction. Extract the metadata of a research paper into JSON."; -const PAPER_TEXT: &str = - "Application of Quantum Algorithms in Interstellar Navigation: A New Frontier \ - by Dr. Stella Voyager, Dr. Nova Star, Dr. Lyra Hunter. Abstract: This paper \ - investigates the utilization of quantum algorithms to improve interstellar \ - navigation systems. Keywords: Quantum algorithms, interstellar navigation, \ - space-time anomalies, quantum superposition, quantum entanglement, space travel."; - -fn paper_schema() -> Value { - json!({ - "type": "object", - "properties": { - "title": { "type": "string" }, - "authors": { "type": "array", "items": { "type": "string" } }, - "abstract": { "type": "string" }, - "keywords": { "type": "array", "items": { "type": "string" } } - }, - "required": ["title","authors","abstract","keywords"], - "additionalProperties": false - }) -} - -// --- Shared inputs for "UI" task --- -const UI_SYSTEM: &str = "You are a UI generator AI. Convert the user input into a JSON-driven UI."; -const UI_TEXT: &str = "Make a User Profile Form"; - -fn ui_schema() -> Value { - json!({ - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": ["div","button","header","section","field","form"] - }, - "label": { "type": "string" }, - "children": { - "type": "array", - "items": { "$ref": "#" } - }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": { "type": "string" }, - "value": { "type": "string" } - }, - "required": ["name","value"], - "additionalProperties": false - } - } - }, - "required": ["type","label","children","attributes"], - "additionalProperties": false - }) -} - -/// Generic runner for any extract task -async fn run_extract_test( - provider_type: ProviderType, - model: &str, - system: &'static str, - user_text: &'static str, - schema: Value, - validate: F, -) -> Result<()> -where - F: Fn(&Value) -> bool, -{ - dotenv().ok(); - if !check_required_env_vars(provider_type.required_env()) { - return Ok(()); - } - - let cfg = ModelConfig::new(model.to_string()).with_temperature(Some(0.0)); - let provider = provider_type.create_provider(cfg)?; - - let msg = Message::user().with_text(user_text); - let resp = provider.extract(system, &[msg], &schema, None).await?; - - println!("[{:?}] extract => {}", provider_type, resp.data); - - assert!( - validate(&resp.data), - "{:?} failed validation on {}", - provider_type, - resp.data - ); - Ok(()) -} - -/// Helper for the "paper" task -async fn run_extract_paper_test(provider: ProviderType, model: &str) -> Result<()> { - run_extract_test( - provider, - model, - PAPER_SYSTEM, - PAPER_TEXT, - paper_schema(), - |v| { - v.as_object() - .map(|o| { - ["title", "authors", "abstract", "keywords"] - .iter() - .all(|k| o.contains_key(*k)) - }) - .unwrap_or(false) - }, - ) - .await -} - -/// Helper for the "UI" task -async fn run_extract_ui_test(provider: ProviderType, model: &str) -> Result<()> { - run_extract_test(provider, model, UI_SYSTEM, UI_TEXT, ui_schema(), |v| { - v.as_object() - .and_then(|o| o.get("type").and_then(Value::as_str)) - == Some("form") - }) - .await -} - -#[cfg(test)] -mod tests { - use super::*; - - #[tokio::test] - async fn openai_extract_paper() -> Result<()> { - run_extract_paper_test(ProviderType::OpenAi, "gpt-4o").await - } - - #[tokio::test] - async fn openai_extract_ui() -> Result<()> { - run_extract_ui_test(ProviderType::OpenAi, "gpt-4o").await - } - - #[tokio::test] - async fn databricks_extract_paper() -> Result<()> { - run_extract_paper_test(ProviderType::Databricks, "goose-gpt-4-1").await - } - - #[tokio::test] - async fn databricks_extract_ui() -> Result<()> { - run_extract_ui_test(ProviderType::Databricks, "goose-gpt-4-1").await - } -} diff --git a/crates/goose-llm/uniffi-bindgen.rs b/crates/goose-llm/uniffi-bindgen.rs deleted file mode 100644 index f6cff6cf1d99..000000000000 --- a/crates/goose-llm/uniffi-bindgen.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - uniffi::uniffi_bindgen_main() -}