Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Scala direct syntax DSL #66

Merged
merged 6 commits into from
May 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,11 @@ inline fun <A> ai(noinline block: suspend AIScope.() -> A): AI<A> = block
*
* This operator is **terminal** meaning it runs and completes the _chain_ of `AI` actions.
*/
suspend inline fun <A> AI<A>.getOrElse(crossinline orElse: suspend (AIError) -> A): A =
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We weren't using reified here.

AIScope(this) { orElse(it) }

@OptIn(ExperimentalTime::class)
suspend inline fun <reified A> AI<A>.getOrElse(crossinline orElse: suspend (AIError) -> A): A =
suspend fun <A> AIScope(block: suspend AIScope.() -> A, orElse: suspend (AIError) -> A): A =
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We needed a way to construct AIScope from Scala, so it cannot be inline. This method allows us to have an convenient entry point from Scala.

recover({
resourceScope {
val openAIConfig = OpenAIConfig()
Expand All @@ -70,7 +73,7 @@ suspend inline fun <reified A> AI<A>.getOrElse(crossinline orElse: suspend (AIEr
val embeddings = OpenAIEmbeddings(openAIConfig, openAiClient, logger)
val vectorStore = LocalVectorStore(embeddings)
val scope = AIScope(openAiClient, vectorStore, embeddings, logger, this, this@recover)
invoke(scope)
block(scope)
}
}) {
orElse(it)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.xebia.functional.xef.llm.openai

import kotlin.jvm.JvmInline
import kotlin.jvm.JvmStatic
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable

Expand Down Expand Up @@ -119,16 +120,26 @@ data class LLMModel(val name: String, val kind: Kind, val contextLength: Int) {
Completion,
Chat
}

companion object {
val GPT_4 = LLMModel("gpt-4", Kind.Chat, 8192)
val GPT_4_0314 = LLMModel("gpt-4-0314", Kind.Chat, 8192)
val GPT_4_32K = LLMModel("gpt-4-32k", Kind.Chat, 32768)
val GPT_3_5_TURBO = LLMModel("gpt-3.5-turbo", Kind.Chat, 4096)
val GPT_3_5_TURBO_0301 = LLMModel("gpt-3.5-turbo-0301", Kind.Chat, 4096)
val TEXT_DAVINCI_003 = LLMModel("text-davinci-003", Kind.Completion, 4097)
val TEXT_DAVINCI_002 = LLMModel("text-davinci-002", Kind.Completion, 4097)
val TEXT_CURIE_001 = LLMModel("text-curie-001", Kind.Completion, 2049)
val TEXT_BABBAGE_001 = LLMModel("text-babbage-001", Kind.Completion, 2049)
val TEXT_ADA_001 = LLMModel("text-ada-001", Kind.Completion, 2049)
@JvmStatic val GPT_4 = LLMModel("gpt-4", Kind.Chat, 8192)

@JvmStatic val GPT_4_0314 = LLMModel("gpt-4-0314", Kind.Chat, 8192)

@JvmStatic val GPT_4_32K = LLMModel("gpt-4-32k", Kind.Chat, 32768)

@JvmStatic val GPT_3_5_TURBO = LLMModel("gpt-3.5-turbo", Kind.Chat, 4096)

@JvmStatic val GPT_3_5_TURBO_0301 = LLMModel("gpt-3.5-turbo-0301", Kind.Chat, 4096)

@JvmStatic val TEXT_DAVINCI_003 = LLMModel("text-davinci-003", Kind.Completion, 4097)

@JvmStatic val TEXT_DAVINCI_002 = LLMModel("text-davinci-002", Kind.Completion, 4097)

@JvmStatic val TEXT_CURIE_001 = LLMModel("text-curie-001", Kind.Completion, 2049)

@JvmStatic val TEXT_BABBAGE_001 = LLMModel("text-babbage-001", Kind.Completion, 2049)

@JvmStatic val TEXT_ADA_001 = LLMModel("text-ada-001", Kind.Completion, 2049)
}
}
3 changes: 2 additions & 1 deletion scala/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ plugins {
}

dependencies {
//implementation(projects.xefCore)
implementation(projects.xefCore)
implementation(projects.kotlinLoom)
implementation(libs.kotlinx.coroutines)
implementation(libs.ciris.core)
implementation(libs.ciris.refined)
Expand Down
47 changes: 47 additions & 0 deletions scala/src/main/scala/com/xebia/functional/auto/AI.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package com.xebia.functional.auto

import com.xebia.functional.loom.LoomAdapter
import com.xebia.functional.xef.auto.AIScope as KtAIScope
import com.xebia.functional.xef.auto.AIException
import com.xebia.functional.xef.auto.AIKt
import com.xebia.functional.xef.AIError
import com.xebia.functional.xef.agents.Agent as KtAgent
import com.xebia.functional.xef.agents.ParameterlessAgent
import com.xebia.functional.xef.llm.openai.LLMModel

//def example(using AIScope): String =
// prompt[String]("What is your name?")

//val example: AIScope ?=> String =
// prompt[String]("What is your name?")

object AI:

def apply[A](block: AIScope ?=> A): A =
LoomAdapter.apply { (cont) =>
AIKt.AIScope[A](
{ (coreAIScope, cont) =>
given AIScope = AIScope.fromCore(coreAIScope)
block
},
(e: AIError, cont) => throw AIException(e.getReason),
cont
)
}

end AI

final case class AIScope(kt: KtAIScope):
def agent[A](agent: ParameterlessAgent[List[String]], scope: AIScope ?=> A): A = ???

def agent[A](agents: List[ParameterlessAgent[List[String]]], scope: AIScope ?=> A): A = ???

// TODO: Design signature for Scala3 w/ Json parser (with support for generating Json Schema)?
def prompt[A](
prompt: String,
maxAttempts: Int = 5,
llmMode: LLMModel = LLMModel.getGPT_3_5_TURBO
): A = ???

private object AIScope:
def fromCore(coreAIScope: KtAIScope): AIScope = new AIScope(coreAIScope)