Skip to content

Commit 19243b0

Browse files
Infer DSL and automatic input serialization (#327)
* Infer DSL and automatic input serialization * clean up * get() on Infer class * Spotless apply * Using buildPrompt --------- Co-authored-by: Javi Pacheco <[email protected]>
1 parent d46e052 commit 19243b0

File tree

10 files changed

+669
-90
lines changed

10 files changed

+669
-90
lines changed

core/src/commonMain/kotlin/com/xebia/functional/xef/llm/ChatWithFunctions.kt

+24
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ import com.xebia.functional.xef.llm.models.chat.Message
1212
import com.xebia.functional.xef.llm.models.functions.CFunction
1313
import com.xebia.functional.xef.llm.models.functions.encodeJsonSchema
1414
import com.xebia.functional.xef.prompt.Prompt
15+
import com.xebia.functional.xef.prompt.buildPrompt
16+
import com.xebia.functional.xef.prompt.templates.user
1517
import io.github.oshai.kotlinlogging.KotlinLogging
1618
import kotlinx.serialization.ExperimentalSerializationApi
1719
import kotlinx.serialization.KSerializer
@@ -62,6 +64,28 @@ interface ChatWithFunctions : Chat {
6264
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
6365
): A = prompt(prompt.toMessages(), scope, functions, serializer, promptConfiguration)
6466

67+
@OptIn(ExperimentalSerializationApi::class)
68+
@AiDsl
69+
suspend fun <A, B> prompt(
70+
input: A,
71+
scope: Conversation,
72+
inputSerializer: KSerializer<A>,
73+
outputSerializer: KSerializer<B>,
74+
functions: List<CFunction> = generateCFunction(outputSerializer.descriptor),
75+
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
76+
): B =
77+
prompt(
78+
buildPrompt {
79+
+user(
80+
"${inputSerializer.descriptor.serialName}(${Json.encodeToString(inputSerializer, input)})"
81+
)
82+
},
83+
scope,
84+
functions,
85+
{ json -> Json.decodeFromString(outputSerializer, json) },
86+
promptConfiguration
87+
)
88+
6589
@AiDsl
6690
suspend fun <A> prompt(
6791
prompt: Prompt,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
package com.xebia.functional.xef.prompt.lang
2+
3+
import com.xebia.functional.xef.auto.Conversation
4+
import com.xebia.functional.xef.auto.PromptConfiguration
5+
import com.xebia.functional.xef.llm.ChatWithFunctions
6+
import com.xebia.functional.xef.prompt.Prompt
7+
import com.xebia.functional.xef.prompt.buildPrompt
8+
import com.xebia.functional.xef.prompt.templates.system
9+
import com.xebia.functional.xef.prompt.templates.user
10+
import kotlinx.serialization.json.Json
11+
import kotlinx.serialization.serializer
12+
13+
/**
14+
* Infer is a DSL for generating structured output from structured input. Infer allows the input
15+
* values to be inferred with a minimal DSL which is then replaced with the actual values inside the
16+
* LLM thought process. Assumes a LLM with Sudolang like capabilities or understanding of structured
17+
* input.
18+
*/
19+
class Infer(
20+
val model: ChatWithFunctions,
21+
val conversation: Conversation,
22+
) {
23+
24+
class Replacement<out A>(val name: String, val value: A, val config: Config)
25+
26+
class Config(val modifiers: List<Pair<String, String>>) {
27+
companion object {
28+
operator fun invoke(vararg modifiers: Pair<String, String>): Config =
29+
Config(modifiers.toList())
30+
}
31+
}
32+
33+
class Scope {
34+
35+
val replacements: MutableList<Replacement<*>> = mutableListOf()
36+
37+
val inferInt: Int
38+
get() = inferInt()
39+
40+
val inferFloat: Float
41+
get() = inferFloat()
42+
43+
val inferDouble: Double
44+
get() = inferDouble()
45+
46+
val inferString: String
47+
get() = inferString()
48+
49+
val placeholder
50+
get() = "${'$'}generate${'$'}"
51+
52+
fun inferInt(config: Config = Config()): Int = infer(placeholder, Int.MAX_VALUE, config)
53+
54+
fun inferFloat(config: Config = Config()): Float = infer(placeholder, Float.MAX_VALUE, config)
55+
56+
fun inferDouble(config: Config = Config()): Double =
57+
infer(placeholder, Double.MAX_VALUE, config)
58+
59+
fun inferString(config: Config = Config()): String = infer(placeholder, placeholder, config)
60+
61+
private fun <A> infer(name: String, value: A, config: Config): A {
62+
replacements.add(Replacement(name, value, config))
63+
return value
64+
}
65+
}
66+
67+
suspend inline operator fun <reified A, reified B> invoke(
68+
prompt: Prompt,
69+
block: Scope.() -> A
70+
): B {
71+
val scope = Scope()
72+
val request = block(scope)
73+
var input = Json.encodeToString(serializer<A>(), request)
74+
scope.replacements.forEach { replacement ->
75+
input = replaceInferPlaceholder(input, replacement)
76+
}
77+
return model.prompt(
78+
messages =
79+
buildPrompt {
80+
+system(prompt.message)
81+
+system("Stay in role and follow the directives of the function `Process`")
82+
+system(
83+
"""
84+
Process(input) {
85+
STOP, Carefully consider all instructions in this function
86+
STOP, first replace all placeholders ${scope.placeholder} in input
87+
Consider in your output ALL properties in the input that are not placeholders
88+
Reflect their information in your output
89+
target
90+
|> critique
91+
|> fix(critique)
92+
|> applyCritique(target)
93+
produce output in valid json
94+
}
95+
"""
96+
.trimIndent()
97+
)
98+
+user("Process($input)")
99+
},
100+
scope = conversation,
101+
serializer = serializer<B>(),
102+
promptConfiguration = PromptConfiguration { temperature(0.0) }
103+
)
104+
}
105+
106+
fun replaceInferPlaceholder(input: String, replacement: Replacement<*>): String =
107+
input.replaceFirst(
108+
oldValue = replacement.value.toString(),
109+
newValue =
110+
if (replacement.config.modifiers.isEmpty()) {
111+
replacement.name
112+
} else {
113+
replacement.name +
114+
":" +
115+
replacement.config.modifiers.joinToString(",") { (k, v) -> "$k:$v" } +
116+
""
117+
}
118+
)
119+
120+
companion object {}
121+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
package com.xebia.functional.xef.prompt.lang
2+
3+
import com.xebia.functional.xef.auto.Conversation
4+
import com.xebia.functional.xef.llm.ChatWithFunctions
5+
import kotlinx.serialization.serializer
6+
7+
class Program(
8+
val model: ChatWithFunctions,
9+
val conversation: Conversation,
10+
val description: String,
11+
) {
12+
suspend inline operator fun <reified A, reified B> invoke(input: A): B {
13+
return model.prompt(
14+
input = input,
15+
scope = conversation,
16+
inputSerializer = serializer<A>(),
17+
outputSerializer = serializer<B>(),
18+
)
19+
}
20+
}

core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/lang/PromptLanguage.kt

-90
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package com.xebia.functional.xef.auto
2+
3+
import com.xebia.functional.xef.auto.llm.openai.OpenAI
4+
import com.xebia.functional.xef.auto.llm.openai.prompt
5+
import kotlinx.serialization.Serializable
6+
7+
@Serializable data class Question(val question: String)
8+
9+
@Serializable data class Answer(val answer: String)
10+
11+
/** Demonstrates how to use any structured serializable input as a prompt. */
12+
suspend fun main() {
13+
OpenAI.conversation {
14+
val question = Question("What is your name?")
15+
println("question: $question")
16+
val answer: Answer = prompt(question)
17+
println("answer: $answer")
18+
}
19+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
package com.xebia.functional.xef.auto.expressions
2+
3+
import com.xebia.functional.xef.auto.Description
4+
import com.xebia.functional.xef.auto.llm.openai.OpenAI
5+
import com.xebia.functional.xef.prompt.Prompt
6+
import com.xebia.functional.xef.prompt.lang.Infer
7+
import kotlinx.serialization.Serializable
8+
9+
enum class DigitalActivity {
10+
SocialMedia,
11+
Work,
12+
Entertainment,
13+
Gaming,
14+
News,
15+
Browsing,
16+
Infer
17+
}
18+
19+
@Serializable data class DigitalHabits(val activity: DigitalActivity, val dailyHours: Float)
20+
21+
@Serializable
22+
data class DetoxState(
23+
val currentScreenTime: Float, // in hours
24+
val primaryActivities: List<DigitalHabits>,
25+
val detoxGoal: String // e.g., "Reduce Social Media consumption by 50%"
26+
)
27+
28+
@Serializable data class GenerateDetoxPlan(val state: DetoxState)
29+
30+
@Serializable
31+
data class DetoxRecommendationPrompt(
32+
@Description(
33+
[
34+
"Craft a digital detox plan based on the user's current habits and desired goals.",
35+
"Recommend actionable steps and alternative non-digital activities to aid the detox process."
36+
]
37+
)
38+
val dayByDayActions:
39+
List<DetoxPlan>, // e.g., {"Day 1": ["1-hour nature walk", "Read a book for 30 minutes"]}
40+
val summary: String
41+
)
42+
43+
@Serializable data class DetoxPlan(val day: String, val actions: List<String>)
44+
45+
suspend fun main() {
46+
OpenAI.conversation {
47+
val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation)
48+
val detoxPlan: DetoxRecommendationPrompt =
49+
infer(
50+
Prompt(
51+
"""
52+
Assume the role of a digital wellbeing coach. Based on the user's digital habits and detox goals, suggest a holistic detox plan.
53+
"""
54+
.trimIndent()
55+
)
56+
) {
57+
GenerateDetoxPlan(
58+
state =
59+
DetoxState(
60+
currentScreenTime = 6.0f,
61+
primaryActivities =
62+
listOf(
63+
DigitalHabits(DigitalActivity.SocialMedia, 3.0f),
64+
DigitalHabits(DigitalActivity.Work, 2.0f),
65+
DigitalHabits(DigitalActivity.Entertainment, 1.0f)
66+
),
67+
detoxGoal = "Reduce Social Media consumption by 50%"
68+
)
69+
)
70+
}
71+
72+
println("Digital Detox Plan:")
73+
detoxPlan.dayByDayActions.forEach { (day, actions) ->
74+
println("\n$day:")
75+
actions.forEach { action -> println("- $action") }
76+
}
77+
println("\nSummary: ${detoxPlan.summary}")
78+
}
79+
}

0 commit comments

Comments
 (0)