Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Infer DSL and automatic input serialization #327

Merged
merged 6 commits into from
Aug 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import com.xebia.functional.xef.llm.models.chat.Message
import com.xebia.functional.xef.llm.models.functions.CFunction
import com.xebia.functional.xef.llm.models.functions.encodeJsonSchema
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.buildPrompt
import com.xebia.functional.xef.prompt.templates.user
import io.github.oshai.kotlinlogging.KotlinLogging
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.KSerializer
Expand Down Expand Up @@ -62,6 +64,28 @@ interface ChatWithFunctions : Chat {
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A = prompt(prompt.toMessages(), scope, functions, serializer, promptConfiguration)

@OptIn(ExperimentalSerializationApi::class)
@AiDsl
suspend fun <A, B> prompt(
input: A,
scope: Conversation,
inputSerializer: KSerializer<A>,
outputSerializer: KSerializer<B>,
functions: List<CFunction> = generateCFunction(outputSerializer.descriptor),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): B =
prompt(
buildPrompt {
+user(
"${inputSerializer.descriptor.serialName}(${Json.encodeToString(inputSerializer, input)})"
)
},
scope,
functions,
{ json -> Json.decodeFromString(outputSerializer, json) },
promptConfiguration
)

@AiDsl
suspend fun <A> prompt(
prompt: Prompt,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
package com.xebia.functional.xef.prompt.lang

import com.xebia.functional.xef.auto.Conversation
import com.xebia.functional.xef.auto.PromptConfiguration
import com.xebia.functional.xef.llm.ChatWithFunctions
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.buildPrompt
import com.xebia.functional.xef.prompt.templates.system
import com.xebia.functional.xef.prompt.templates.user
import kotlinx.serialization.json.Json
import kotlinx.serialization.serializer

/**
* Infer is a DSL for generating structured output from structured input. Infer allows the input
* values to be inferred with a minimal DSL which is then replaced with the actual values inside the
* LLM thought process. Assumes a LLM with Sudolang like capabilities or understanding of structured
* input.
*/
class Infer(
val model: ChatWithFunctions,
val conversation: Conversation,
) {

class Replacement<out A>(val name: String, val value: A, val config: Config)

class Config(val modifiers: List<Pair<String, String>>) {
companion object {
operator fun invoke(vararg modifiers: Pair<String, String>): Config =
Config(modifiers.toList())
}
}

class Scope {

val replacements: MutableList<Replacement<*>> = mutableListOf()

val inferInt: Int
get() = inferInt()

val inferFloat: Float
get() = inferFloat()

val inferDouble: Double
get() = inferDouble()

val inferString: String
get() = inferString()

val placeholder
get() = "${'$'}generate${'$'}"

fun inferInt(config: Config = Config()): Int = infer(placeholder, Int.MAX_VALUE, config)

fun inferFloat(config: Config = Config()): Float = infer(placeholder, Float.MAX_VALUE, config)

fun inferDouble(config: Config = Config()): Double =
infer(placeholder, Double.MAX_VALUE, config)

fun inferString(config: Config = Config()): String = infer(placeholder, placeholder, config)

private fun <A> infer(name: String, value: A, config: Config): A {
replacements.add(Replacement(name, value, config))
return value
}
}

suspend inline operator fun <reified A, reified B> invoke(
prompt: Prompt,
block: Scope.() -> A
): B {
val scope = Scope()
val request = block(scope)
var input = Json.encodeToString(serializer<A>(), request)
scope.replacements.forEach { replacement ->
input = replaceInferPlaceholder(input, replacement)
}
return model.prompt(
messages =
buildPrompt {
+system(prompt.message)
+system("Stay in role and follow the directives of the function `Process`")
+system(
"""
Process(input) {
STOP, Carefully consider all instructions in this function
STOP, first replace all placeholders ${scope.placeholder} in input
Consider in your output ALL properties in the input that are not placeholders
Reflect their information in your output
target
|> critique
|> fix(critique)
|> applyCritique(target)
produce output in valid json
}
"""
.trimIndent()
)
+user("Process($input)")
},
scope = conversation,
serializer = serializer<B>(),
promptConfiguration = PromptConfiguration { temperature(0.0) }
)
}

fun replaceInferPlaceholder(input: String, replacement: Replacement<*>): String =
input.replaceFirst(
javipacheco marked this conversation as resolved.
Show resolved Hide resolved
oldValue = replacement.value.toString(),
newValue =
if (replacement.config.modifiers.isEmpty()) {
replacement.name
} else {
replacement.name +
":" +
replacement.config.modifiers.joinToString(",") { (k, v) -> "$k:$v" } +
""
}
)

companion object {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package com.xebia.functional.xef.prompt.lang

import com.xebia.functional.xef.auto.Conversation
import com.xebia.functional.xef.llm.ChatWithFunctions
import kotlinx.serialization.serializer

class Program(
val model: ChatWithFunctions,
val conversation: Conversation,
val description: String,
) {
suspend inline operator fun <reified A, reified B> invoke(input: A): B {
return model.prompt(
input = input,
scope = conversation,
inputSerializer = serializer<A>(),
outputSerializer = serializer<B>(),
)
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package com.xebia.functional.xef.auto

import com.xebia.functional.xef.auto.llm.openai.OpenAI
import com.xebia.functional.xef.auto.llm.openai.prompt
import kotlinx.serialization.Serializable

@Serializable data class Question(val question: String)

@Serializable data class Answer(val answer: String)

/** Demonstrates how to use any structured serializable input as a prompt. */
suspend fun main() {
OpenAI.conversation {
val question = Question("What is your name?")
println("question: $question")
val answer: Answer = prompt(question)
println("answer: $answer")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package com.xebia.functional.xef.auto.expressions

import com.xebia.functional.xef.auto.Description
import com.xebia.functional.xef.auto.llm.openai.OpenAI
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.lang.Infer
import kotlinx.serialization.Serializable

enum class DigitalActivity {
SocialMedia,
Work,
Entertainment,
Gaming,
News,
Browsing,
Infer
}

@Serializable data class DigitalHabits(val activity: DigitalActivity, val dailyHours: Float)

@Serializable
data class DetoxState(
val currentScreenTime: Float, // in hours
val primaryActivities: List<DigitalHabits>,
val detoxGoal: String // e.g., "Reduce Social Media consumption by 50%"
)

@Serializable data class GenerateDetoxPlan(val state: DetoxState)

@Serializable
data class DetoxRecommendationPrompt(
@Description(
[
"Craft a digital detox plan based on the user's current habits and desired goals.",
"Recommend actionable steps and alternative non-digital activities to aid the detox process."
]
)
val dayByDayActions:
List<DetoxPlan>, // e.g., {"Day 1": ["1-hour nature walk", "Read a book for 30 minutes"]}
val summary: String
)

@Serializable data class DetoxPlan(val day: String, val actions: List<String>)

suspend fun main() {
OpenAI.conversation {
val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation)
val detoxPlan: DetoxRecommendationPrompt =
infer(
Montagon marked this conversation as resolved.
Show resolved Hide resolved
Prompt(
"""
Assume the role of a digital wellbeing coach. Based on the user's digital habits and detox goals, suggest a holistic detox plan.
"""
.trimIndent()
)
) {
GenerateDetoxPlan(
state =
DetoxState(
currentScreenTime = 6.0f,
primaryActivities =
listOf(
DigitalHabits(DigitalActivity.SocialMedia, 3.0f),
DigitalHabits(DigitalActivity.Work, 2.0f),
DigitalHabits(DigitalActivity.Entertainment, 1.0f)
),
detoxGoal = "Reduce Social Media consumption by 50%"
)
)
}

println("Digital Detox Plan:")
detoxPlan.dayByDayActions.forEach { (day, actions) ->
println("\n$day:")
actions.forEach { action -> println("- $action") }
}
println("\nSummary: ${detoxPlan.summary}")
}
}
Loading