Skip to content

Commit b46ba8b

Browse files
authored
Adding tests (#344)
1 parent b37945c commit b46ba8b

File tree

9 files changed

+291
-12
lines changed

9 files changed

+291
-12
lines changed

core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/PromptBuilder.kt

+5
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ import com.xebia.functional.xef.prompt.templates.assistant
66
import com.xebia.functional.xef.prompt.templates.system
77
import com.xebia.functional.xef.prompt.templates.user
88
import kotlin.jvm.JvmSynthetic
9+
import kotlinx.serialization.json.Json
10+
import kotlinx.serialization.serializer
911

1012
interface PromptBuilder {
1113
val items: MutableList<Message>
@@ -48,3 +50,6 @@ interface PromptBuilder {
4850
}
4951

5052
fun String.message(role: Role): Message = Message(role, this, role.name)
53+
54+
inline fun <reified A> A.message(role: Role): Message =
55+
Message(role, Json.encodeToString(serializer(), this), role.name)

core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/templates/templates.kt

+3-6
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,18 @@ import com.xebia.functional.xef.llm.models.chat.Role
55
import com.xebia.functional.xef.prompt.PlatformPromptBuilder
66
import com.xebia.functional.xef.prompt.Prompt
77
import com.xebia.functional.xef.prompt.message
8-
import kotlinx.serialization.json.Json
9-
import kotlinx.serialization.serializer
108

119
fun system(context: String): Message = context.message(Role.SYSTEM)
1210

1311
fun assistant(context: String): Message = context.message(Role.ASSISTANT)
1412

1513
fun user(context: String): Message = context.message(Role.USER)
1614

17-
inline fun <reified A> system(data: A): Message = system(Json.encodeToString(serializer(), data))
15+
inline fun <reified A> system(data: A): Message = data.message(Role.SYSTEM)
1816

19-
inline fun <reified A> assistant(data: A): Message =
20-
assistant(Json.encodeToString(serializer(), data))
17+
inline fun <reified A> assistant(data: A): Message = data.message(Role.ASSISTANT)
2118

22-
inline fun <reified A> user(data: A): Message = user(Json.encodeToString(serializer(), data))
19+
inline fun <reified A> user(data: A): Message = data.message(Role.USER)
2320

2421
class StepsMessageBuilder : PlatformPromptBuilder() {
2522

core/src/commonTest/kotlin/com/xebia/functional/xef/conversation/ConversationSpec.kt

+34-6
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,17 @@
11
package com.xebia.functional.xef.conversation
22

33
import com.xebia.functional.tokenizer.ModelType
4-
import com.xebia.functional.xef.data.TestEmbeddings
5-
import com.xebia.functional.xef.data.TestFunctionsModel
6-
import com.xebia.functional.xef.data.TestModel
4+
import com.xebia.functional.xef.data.*
75
import com.xebia.functional.xef.llm.models.chat.Message
86
import com.xebia.functional.xef.llm.models.chat.Role
97
import com.xebia.functional.xef.prompt.Prompt
8+
import com.xebia.functional.xef.prompt.templates.user
109
import com.xebia.functional.xef.store.ConversationId
1110
import com.xebia.functional.xef.store.LocalVectorStore
1211
import io.kotest.core.spec.style.StringSpec
1312
import io.kotest.matchers.comparables.shouldBeLessThan
1413
import io.kotest.matchers.shouldBe
1514
import io.kotest.matchers.shouldNotBe
16-
import kotlinx.serialization.Serializable
1715
import kotlinx.serialization.encodeToString
1816
import kotlinx.serialization.json.Json
1917
import kotlinx.uuid.UUID
@@ -149,8 +147,6 @@ class ConversationSpec :
149147
}
150148

151149
"functionCall shouldn't be null when the model support functions and the prompt contain a function" {
152-
@Serializable data class Answer(val bar: String)
153-
154150
val question = "fake-question"
155151
val answer = Answer("fake-answer")
156152

@@ -175,4 +171,36 @@ class ConversationSpec :
175171

176172
lastRequest.functionCall shouldNotBe null
177173
}
174+
175+
"the message of the request should be the JSON string of the question when the prompt contains serializable object" {
176+
val question = Question("fake-question")
177+
val questionJsonString = Json.encodeToString(question)
178+
val answer = Answer("fake-answer")
179+
val answerJsonString = Json.encodeToString(answer)
180+
181+
val message = mapOf(questionJsonString to answerJsonString)
182+
183+
val conversationId = ConversationId(UUID.generateUUID().toString())
184+
val scope = Conversation(LocalVectorStore(TestEmbeddings()), conversationId = conversationId)
185+
186+
val model =
187+
TestFunctionsModel(
188+
modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS,
189+
name = "fake-model",
190+
responses = message
191+
)
192+
193+
val response: Answer =
194+
model.prompt(
195+
prompt = Prompt { +user(question) },
196+
scope = scope,
197+
serializer = Answer.serializer()
198+
)
199+
200+
val lastRequest = model.requests.last()
201+
202+
lastRequest.messages.last().content shouldBe questionJsonString
203+
204+
response shouldBe answer
205+
}
178206
})
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
package com.xebia.functional.xef.data
2+
3+
import kotlinx.serialization.Serializable
4+
5+
@Serializable data class Question(val question: String)
6+
7+
@Serializable data class Answer(val bar: String)

core/src/commonTest/kotlin/com/xebia/functional/xef/prompt/PromptBuilderSpec.kt

+20
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package com.xebia.functional.xef.prompt
22

3+
import com.xebia.functional.xef.data.Question
34
import com.xebia.functional.xef.llm.models.chat.Role
45
import com.xebia.functional.xef.prompt.templates.assistant
56
import com.xebia.functional.xef.prompt.templates.steps
@@ -72,4 +73,23 @@ class PromptBuilderSpec :
7273

7374
messages shouldBe messagesExpected
7475
}
76+
77+
"buildPrompt should return the expected messages when using serializable objects" {
78+
val question = Question("Test Question")
79+
80+
val messages =
81+
Prompt {
82+
+system("Test System")
83+
+user(question)
84+
}
85+
.messages
86+
87+
val messagesExpected =
88+
listOf(
89+
"Test System".message(Role.SYSTEM),
90+
question.message(Role.USER),
91+
)
92+
93+
messages shouldBe messagesExpected
94+
}
7595
})
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
package openai
2+
3+
import com.xebia.functional.tokenizer.ModelType
4+
import com.xebia.functional.xef.conversation.llm.openai.OpenAI
5+
import com.xebia.functional.xef.conversation.llm.openai.prompt
6+
import com.xebia.functional.xef.conversation.llm.openai.promptMessage
7+
import com.xebia.functional.xef.prompt.Prompt
8+
import com.xebia.functional.xef.prompt.templates.user
9+
import io.kotest.core.spec.style.StringSpec
10+
import io.kotest.matchers.shouldBe
11+
import kotlinx.serialization.encodeToString
12+
import kotlinx.serialization.json.Json
13+
import openai.data.Answer
14+
import openai.data.Question
15+
import openai.data.TestFunctionsModel
16+
import openai.data.TestModel
17+
18+
class OpenAISpec :
19+
StringSpec({
20+
"""
21+
| When we are using a OpenAI scope conversation
22+
| the memories should have the correct size in the vector store
23+
| for the conversationId generated inside the conversation
24+
""" {
25+
OpenAI.conversation {
26+
val model = TestModel(modelType = ModelType.ADA, name = "fake-model")
27+
28+
promptMessage(prompt = Prompt("question 1"), model = model)
29+
30+
promptMessage(prompt = Prompt("question 2"), model = model)
31+
32+
val memories = store.memories(conversationId!!, 10000)
33+
34+
memories.size shouldBe 4
35+
}
36+
}
37+
38+
"""
39+
| When we are using a OpenAI scope conversation with functions
40+
| the memories should have the correct size in the vector store
41+
| for the conversationId generated inside the conversation
42+
""" {
43+
OpenAI.conversation {
44+
val question = Question("fake-question")
45+
val questionJsonString = Json.encodeToString(question)
46+
val answer = Answer("fake-answer")
47+
val answerJsonString = Json.encodeToString(answer)
48+
val question2 = "question 2"
49+
50+
val message = mapOf(questionJsonString to answerJsonString, question2 to answerJsonString)
51+
52+
val model =
53+
TestFunctionsModel(
54+
modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS,
55+
name = "fake-model",
56+
responses = message
57+
)
58+
59+
val response1: Answer = prompt(Prompt { +user(question) }, model)
60+
61+
val response2: Answer = prompt(Prompt(question2), model)
62+
63+
val memories = store.memories(conversationId!!, 10000)
64+
65+
memories.size shouldBe 4
66+
}
67+
}
68+
})
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
package openai.data
2+
3+
import com.xebia.functional.tokenizer.ModelType
4+
import com.xebia.functional.xef.llm.ChatWithFunctions
5+
import com.xebia.functional.xef.llm.Embeddings
6+
import com.xebia.functional.xef.llm.models.chat.*
7+
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest
8+
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult
9+
import com.xebia.functional.xef.llm.models.functions.FunctionCall
10+
import com.xebia.functional.xef.llm.models.usage.Usage
11+
import kotlinx.coroutines.flow.Flow
12+
13+
class TestFunctionsModel(
14+
override val modelType: ModelType,
15+
override val name: String,
16+
val responses: Map<String, String> = emptyMap(),
17+
) : ChatWithFunctions, Embeddings, AutoCloseable {
18+
19+
var requests: MutableList<ChatCompletionRequest> = mutableListOf()
20+
21+
override suspend fun createChatCompletion(
22+
request: ChatCompletionRequest
23+
): ChatCompletionResponse {
24+
requests.add(request)
25+
return ChatCompletionResponse(
26+
id = "fake-id",
27+
`object` = "fake-object",
28+
created = 0,
29+
model = "fake-model",
30+
choices =
31+
listOf(
32+
Choice(
33+
message =
34+
Message(
35+
role = Role.USER,
36+
content = responses[request.messages.last().content] ?: "fake-content",
37+
name = Role.USER.name
38+
),
39+
finishReason = "fake-finish-reason",
40+
index = 0
41+
)
42+
),
43+
usage = Usage.ZERO
44+
)
45+
}
46+
47+
override suspend fun createChatCompletions(
48+
request: ChatCompletionRequest
49+
): Flow<ChatCompletionChunk> {
50+
throw NotImplementedError()
51+
}
52+
53+
override fun tokensFromMessages(messages: List<Message>): Int {
54+
return messages.sumOf { it.content.length }
55+
}
56+
57+
override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult {
58+
return EmbeddingResult(data = emptyList(), usage = Usage.ZERO)
59+
}
60+
61+
override suspend fun createChatCompletionWithFunctions(
62+
request: ChatCompletionRequest
63+
): ChatCompletionResponseWithFunctions {
64+
requests.add(request)
65+
val response = responses[request.messages.last().content] ?: "fake-content"
66+
return ChatCompletionResponseWithFunctions(
67+
id = "fake-id",
68+
`object` = "fake-object",
69+
created = 0,
70+
model = "fake-model",
71+
choices =
72+
listOf(
73+
ChoiceWithFunctions(
74+
message =
75+
MessageWithFunctionCall(
76+
role = Role.USER.name,
77+
content = response,
78+
functionCall = FunctionCall("fake-function-name", response),
79+
name = Role.USER.name
80+
),
81+
finishReason = "fake-finish-reason",
82+
index = 0
83+
)
84+
),
85+
usage = Usage.ZERO
86+
)
87+
}
88+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
package openai.data
2+
3+
import com.xebia.functional.tokenizer.ModelType
4+
import com.xebia.functional.xef.llm.Chat
5+
import com.xebia.functional.xef.llm.Embeddings
6+
import com.xebia.functional.xef.llm.models.chat.*
7+
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest
8+
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult
9+
import com.xebia.functional.xef.llm.models.usage.Usage
10+
import kotlinx.coroutines.flow.Flow
11+
12+
class TestModel(
13+
override val modelType: ModelType,
14+
override val name: String,
15+
val responses: Map<String, String> = emptyMap(),
16+
) : Chat, Embeddings, AutoCloseable {
17+
18+
var requests: MutableList<ChatCompletionRequest> = mutableListOf()
19+
20+
override suspend fun createChatCompletion(
21+
request: ChatCompletionRequest
22+
): ChatCompletionResponse {
23+
requests.add(request)
24+
return ChatCompletionResponse(
25+
id = "fake-id",
26+
`object` = "fake-object",
27+
created = 0,
28+
model = "fake-model",
29+
choices =
30+
listOf(
31+
Choice(
32+
message =
33+
Message(
34+
role = Role.USER,
35+
content = responses[request.messages.last().content] ?: "fake-content",
36+
name = Role.USER.name
37+
),
38+
finishReason = "fake-finish-reason",
39+
index = 0
40+
)
41+
),
42+
usage = Usage.ZERO
43+
)
44+
}
45+
46+
override suspend fun createChatCompletions(
47+
request: ChatCompletionRequest
48+
): Flow<ChatCompletionChunk> {
49+
throw NotImplementedError()
50+
}
51+
52+
override fun tokensFromMessages(messages: List<Message>): Int {
53+
return messages.sumOf { it.content.length }
54+
}
55+
56+
override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult {
57+
return EmbeddingResult(data = emptyList(), usage = Usage.ZERO)
58+
}
59+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
package openai.data
2+
3+
import kotlinx.serialization.Serializable
4+
5+
@Serializable data class Question(val question: String)
6+
7+
@Serializable data class Answer(val bar: String)

0 commit comments

Comments
 (0)