@@ -6,125 +6,17 @@ package com.xebia.functional.xef.auto
6
6
import arrow.core.nonFatalOrThrow
7
7
import arrow.core.raise.catch
8
8
import com.xebia.functional.xef.AIError
9
- import com.xebia.functional.xef.auto.serialization.buildJsonSchema
10
9
import com.xebia.functional.xef.llm.openai.LLMModel
11
10
import com.xebia.functional.xef.prompt.Prompt
12
11
import com.xebia.functional.xef.prompt.append
13
12
import kotlin.jvm.JvmMultifileClass
14
13
import kotlin.jvm.JvmName
15
- import kotlinx.serialization.KSerializer
16
- import kotlinx.serialization.SerializationException
17
- import kotlinx.serialization.descriptors.SerialDescriptor
18
- import kotlinx.serialization.json.Json
19
- import kotlinx.serialization.serializer
20
-
21
- /* *
22
- * Run a [question] describes the task you want to solve within the context of [AIScope]. Returns a
23
- * value of [A] where [A] **has to be** annotated with [kotlinx.serialization.Serializable].
24
- *
25
- * @throws SerializationException if serializer cannot be created (provided [A] or its type argument
26
- * is not serializable).
27
- * @throws IllegalArgumentException if any of [A]'s type arguments contains star projection.
28
- */
29
- @AiDsl
30
- suspend inline fun <reified A > AIScope.prompt (
31
- question : String ,
32
- json : Json = Json {
33
- ignoreUnknownKeys = true
34
- isLenient = true
35
- },
36
- maxDeserializationAttempts : Int = 5,
37
- model : LLMModel = LLMModel .GPT_3_5_TURBO ,
38
- user : String = "testing",
39
- echo : Boolean = false,
40
- n : Int = 1,
41
- temperature : Double = 0.0,
42
- bringFromContext : Int = 10
43
- ): A =
44
- prompt(
45
- Prompt (question),
46
- json,
47
- maxDeserializationAttempts,
48
- model,
49
- user,
50
- echo,
51
- n,
52
- temperature,
53
- bringFromContext
54
- )
55
-
56
- /* *
57
- * Run a [prompt] describes the task you want to solve within the context of [AIScope]. Returns a
58
- * value of [A] where [A] **has to be** annotated with [kotlinx.serialization.Serializable].
59
- *
60
- * @throws SerializationException if serializer cannot be created (provided [A] or its type argument
61
- * is not serializable).
62
- * @throws IllegalArgumentException if any of [A]'s type arguments contains star projection.
63
- */
64
- @AiDsl
65
- suspend inline fun <reified A > AIScope.prompt (
66
- prompt : Prompt ,
67
- json : Json = Json {
68
- ignoreUnknownKeys = true
69
- isLenient = true
70
- },
71
- maxDeserializationAttempts : Int = 5,
72
- model : LLMModel = LLMModel .GPT_3_5_TURBO ,
73
- user : String = "testing",
74
- echo : Boolean = false,
75
- n : Int = 1,
76
- temperature : Double = 0.0,
77
- bringFromContext : Int = 10
78
- ): A =
79
- prompt(
80
- prompt,
81
- serializer(),
82
- json,
83
- maxDeserializationAttempts,
84
- model,
85
- user,
86
- echo,
87
- n,
88
- temperature,
89
- bringFromContext
90
- )
91
-
92
- @AiDsl
93
- suspend fun <A > AIScope.prompt (
94
- prompt : Prompt ,
95
- serializer : KSerializer <A >,
96
- json : Json = Json {
97
- ignoreUnknownKeys = true
98
- isLenient = true
99
- },
100
- maxDeserializationAttempts : Int = 5,
101
- model : LLMModel = LLMModel .GPT_3_5_TURBO ,
102
- user : String = "testing",
103
- echo : Boolean = false,
104
- n : Int = 1,
105
- temperature : Double = 0.0,
106
- bringFromContext : Int = 10,
107
- minResponseTokens : Int = 500,
108
- ): A =
109
- prompt(
110
- prompt,
111
- serializer.descriptor,
112
- { json.decodeFromString(serializer, it) },
113
- maxDeserializationAttempts,
114
- model,
115
- user,
116
- echo,
117
- n,
118
- temperature,
119
- bringFromContext,
120
- minResponseTokens
121
- )
122
14
123
15
@AiDsl
124
16
@JvmName(" promptWithSerializer" )
125
17
suspend fun <A > AIScope.prompt (
126
18
prompt : Prompt ,
127
- descriptor : SerialDescriptor ,
19
+ jsonSchema : String ,
128
20
serializer : (json: String ) -> A ,
129
21
maxDeserializationAttempts : Int = 5,
130
22
model : LLMModel = LLMModel .GPT_3_5_TURBO ,
@@ -135,7 +27,6 @@ suspend fun <A> AIScope.prompt(
135
27
bringFromContext : Int = 10,
136
28
minResponseTokens : Int = 500,
137
29
): A {
138
- val jsonSchema = buildJsonSchema(descriptor, false )
139
30
val responseInstructions =
140
31
"""
141
32
|
0 commit comments