From a2012c260a104ca9623fb43a73e9d394b6ad9dac Mon Sep 17 00:00:00 2001 From: Javi Pacheco Date: Wed, 23 Aug 2023 13:27:03 +0200 Subject: [PATCH] Adding tests --- .../functional/xef/prompt/PromptBuilder.kt | 5 ++ .../xef/prompt/templates/templates.kt | 9 +- .../xef/conversation/ConversationSpec.kt | 40 +++++++-- .../com/xebia/functional/xef/data/models.kt | 7 ++ .../xef/prompt/PromptBuilderSpec.kt | 20 +++++ .../src/jvmTest/kotlin/openai/OpenAISpec.kt | 68 ++++++++++++++ .../kotlin/openai/data/TestFunctionsModel.kt | 88 +++++++++++++++++++ .../jvmTest/kotlin/openai/data/TestModel.kt | 59 +++++++++++++ .../src/jvmTest/kotlin/openai/data/models.kt | 7 ++ 9 files changed, 291 insertions(+), 12 deletions(-) create mode 100644 core/src/commonTest/kotlin/com/xebia/functional/xef/data/models.kt create mode 100644 openai/src/jvmTest/kotlin/openai/OpenAISpec.kt create mode 100644 openai/src/jvmTest/kotlin/openai/data/TestFunctionsModel.kt create mode 100644 openai/src/jvmTest/kotlin/openai/data/TestModel.kt create mode 100644 openai/src/jvmTest/kotlin/openai/data/models.kt diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/PromptBuilder.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/PromptBuilder.kt index 30e052f07..00d712244 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/PromptBuilder.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/PromptBuilder.kt @@ -6,6 +6,8 @@ import com.xebia.functional.xef.prompt.templates.assistant import com.xebia.functional.xef.prompt.templates.system import com.xebia.functional.xef.prompt.templates.user import kotlin.jvm.JvmSynthetic +import kotlinx.serialization.json.Json +import kotlinx.serialization.serializer interface PromptBuilder { val items: MutableList @@ -48,3 +50,6 @@ interface PromptBuilder { } fun String.message(role: Role): Message = Message(role, this, role.name) + +inline fun A.message(role: Role): Message = + Message(role, Json.encodeToString(serializer(), this), role.name) diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/templates/templates.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/templates/templates.kt index e05eb29a7..e7b8648ed 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/templates/templates.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/prompt/templates/templates.kt @@ -5,8 +5,6 @@ import com.xebia.functional.xef.llm.models.chat.Role import com.xebia.functional.xef.prompt.PlatformPromptBuilder import com.xebia.functional.xef.prompt.Prompt import com.xebia.functional.xef.prompt.message -import kotlinx.serialization.json.Json -import kotlinx.serialization.serializer fun system(context: String): Message = context.message(Role.SYSTEM) @@ -14,12 +12,11 @@ fun assistant(context: String): Message = context.message(Role.ASSISTANT) fun user(context: String): Message = context.message(Role.USER) -inline fun system(data: A): Message = system(Json.encodeToString(serializer(), data)) +inline fun system(data: A): Message = data.message(Role.SYSTEM) -inline fun assistant(data: A): Message = - assistant(Json.encodeToString(serializer(), data)) +inline fun assistant(data: A): Message = data.message(Role.ASSISTANT) -inline fun user(data: A): Message = user(Json.encodeToString(serializer(), data)) +inline fun user(data: A): Message = data.message(Role.USER) class StepsMessageBuilder : PlatformPromptBuilder() { diff --git a/core/src/commonTest/kotlin/com/xebia/functional/xef/conversation/ConversationSpec.kt b/core/src/commonTest/kotlin/com/xebia/functional/xef/conversation/ConversationSpec.kt index 7e8a84ca8..7fe0ba246 100644 --- a/core/src/commonTest/kotlin/com/xebia/functional/xef/conversation/ConversationSpec.kt +++ b/core/src/commonTest/kotlin/com/xebia/functional/xef/conversation/ConversationSpec.kt @@ -1,19 +1,17 @@ package com.xebia.functional.xef.conversation import com.xebia.functional.tokenizer.ModelType -import com.xebia.functional.xef.data.TestEmbeddings -import com.xebia.functional.xef.data.TestFunctionsModel -import com.xebia.functional.xef.data.TestModel +import com.xebia.functional.xef.data.* import com.xebia.functional.xef.llm.models.chat.Message import com.xebia.functional.xef.llm.models.chat.Role import com.xebia.functional.xef.prompt.Prompt +import com.xebia.functional.xef.prompt.templates.user import com.xebia.functional.xef.store.ConversationId import com.xebia.functional.xef.store.LocalVectorStore import io.kotest.core.spec.style.StringSpec import io.kotest.matchers.comparables.shouldBeLessThan import io.kotest.matchers.shouldBe import io.kotest.matchers.shouldNotBe -import kotlinx.serialization.Serializable import kotlinx.serialization.encodeToString import kotlinx.serialization.json.Json import kotlinx.uuid.UUID @@ -149,8 +147,6 @@ class ConversationSpec : } "functionCall shouldn't be null when the model support functions and the prompt contain a function" { - @Serializable data class Answer(val bar: String) - val question = "fake-question" val answer = Answer("fake-answer") @@ -175,4 +171,36 @@ class ConversationSpec : lastRequest.functionCall shouldNotBe null } + + "the message of the request should be the JSON string of the question when the prompt contains serializable object" { + val question = Question("fake-question") + val questionJsonString = Json.encodeToString(question) + val answer = Answer("fake-answer") + val answerJsonString = Json.encodeToString(answer) + + val message = mapOf(questionJsonString to answerJsonString) + + val conversationId = ConversationId(UUID.generateUUID().toString()) + val scope = Conversation(LocalVectorStore(TestEmbeddings()), conversationId = conversationId) + + val model = + TestFunctionsModel( + modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS, + name = "fake-model", + responses = message + ) + + val response: Answer = + model.prompt( + prompt = Prompt { +user(question) }, + scope = scope, + serializer = Answer.serializer() + ) + + val lastRequest = model.requests.last() + + lastRequest.messages.last().content shouldBe questionJsonString + + response shouldBe answer + } }) diff --git a/core/src/commonTest/kotlin/com/xebia/functional/xef/data/models.kt b/core/src/commonTest/kotlin/com/xebia/functional/xef/data/models.kt new file mode 100644 index 000000000..4dade64f3 --- /dev/null +++ b/core/src/commonTest/kotlin/com/xebia/functional/xef/data/models.kt @@ -0,0 +1,7 @@ +package com.xebia.functional.xef.data + +import kotlinx.serialization.Serializable + +@Serializable data class Question(val question: String) + +@Serializable data class Answer(val bar: String) diff --git a/core/src/commonTest/kotlin/com/xebia/functional/xef/prompt/PromptBuilderSpec.kt b/core/src/commonTest/kotlin/com/xebia/functional/xef/prompt/PromptBuilderSpec.kt index a785f8b37..93cd30f72 100644 --- a/core/src/commonTest/kotlin/com/xebia/functional/xef/prompt/PromptBuilderSpec.kt +++ b/core/src/commonTest/kotlin/com/xebia/functional/xef/prompt/PromptBuilderSpec.kt @@ -1,5 +1,6 @@ package com.xebia.functional.xef.prompt +import com.xebia.functional.xef.data.Question import com.xebia.functional.xef.llm.models.chat.Role import com.xebia.functional.xef.prompt.templates.assistant import com.xebia.functional.xef.prompt.templates.steps @@ -72,4 +73,23 @@ class PromptBuilderSpec : messages shouldBe messagesExpected } + + "buildPrompt should return the expected messages when using serializable objects" { + val question = Question("Test Question") + + val messages = + Prompt { + +system("Test System") + +user(question) + } + .messages + + val messagesExpected = + listOf( + "Test System".message(Role.SYSTEM), + question.message(Role.USER), + ) + + messages shouldBe messagesExpected + } }) diff --git a/openai/src/jvmTest/kotlin/openai/OpenAISpec.kt b/openai/src/jvmTest/kotlin/openai/OpenAISpec.kt new file mode 100644 index 000000000..9d8cc65cb --- /dev/null +++ b/openai/src/jvmTest/kotlin/openai/OpenAISpec.kt @@ -0,0 +1,68 @@ +package openai + +import com.xebia.functional.tokenizer.ModelType +import com.xebia.functional.xef.conversation.llm.openai.OpenAI +import com.xebia.functional.xef.conversation.llm.openai.prompt +import com.xebia.functional.xef.conversation.llm.openai.promptMessage +import com.xebia.functional.xef.prompt.Prompt +import com.xebia.functional.xef.prompt.templates.user +import io.kotest.core.spec.style.StringSpec +import io.kotest.matchers.shouldBe +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +import openai.data.Answer +import openai.data.Question +import openai.data.TestFunctionsModel +import openai.data.TestModel + +class OpenAISpec : + StringSpec({ + """ + | When we are using a OpenAI scope conversation + | the memories should have the correct size in the vector store + | for the conversationId generated inside the conversation + """ { + OpenAI.conversation { + val model = TestModel(modelType = ModelType.ADA, name = "fake-model") + + promptMessage(prompt = Prompt("question 1"), model = model) + + promptMessage(prompt = Prompt("question 2"), model = model) + + val memories = store.memories(conversationId!!, 10000) + + memories.size shouldBe 4 + } + } + + """ + | When we are using a OpenAI scope conversation with functions + | the memories should have the correct size in the vector store + | for the conversationId generated inside the conversation + """ { + OpenAI.conversation { + val question = Question("fake-question") + val questionJsonString = Json.encodeToString(question) + val answer = Answer("fake-answer") + val answerJsonString = Json.encodeToString(answer) + val question2 = "question 2" + + val message = mapOf(questionJsonString to answerJsonString, question2 to answerJsonString) + + val model = + TestFunctionsModel( + modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS, + name = "fake-model", + responses = message + ) + + val response1: Answer = prompt(Prompt { +user(question) }, model) + + val response2: Answer = prompt(Prompt(question2), model) + + val memories = store.memories(conversationId!!, 10000) + + memories.size shouldBe 4 + } + } + }) diff --git a/openai/src/jvmTest/kotlin/openai/data/TestFunctionsModel.kt b/openai/src/jvmTest/kotlin/openai/data/TestFunctionsModel.kt new file mode 100644 index 000000000..29259168a --- /dev/null +++ b/openai/src/jvmTest/kotlin/openai/data/TestFunctionsModel.kt @@ -0,0 +1,88 @@ +package openai.data + +import com.xebia.functional.tokenizer.ModelType +import com.xebia.functional.xef.llm.ChatWithFunctions +import com.xebia.functional.xef.llm.Embeddings +import com.xebia.functional.xef.llm.models.chat.* +import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest +import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult +import com.xebia.functional.xef.llm.models.functions.FunctionCall +import com.xebia.functional.xef.llm.models.usage.Usage +import kotlinx.coroutines.flow.Flow + +class TestFunctionsModel( + override val modelType: ModelType, + override val name: String, + val responses: Map = emptyMap(), +) : ChatWithFunctions, Embeddings, AutoCloseable { + + var requests: MutableList = mutableListOf() + + override suspend fun createChatCompletion( + request: ChatCompletionRequest + ): ChatCompletionResponse { + requests.add(request) + return ChatCompletionResponse( + id = "fake-id", + `object` = "fake-object", + created = 0, + model = "fake-model", + choices = + listOf( + Choice( + message = + Message( + role = Role.USER, + content = responses[request.messages.last().content] ?: "fake-content", + name = Role.USER.name + ), + finishReason = "fake-finish-reason", + index = 0 + ) + ), + usage = Usage.ZERO + ) + } + + override suspend fun createChatCompletions( + request: ChatCompletionRequest + ): Flow { + throw NotImplementedError() + } + + override fun tokensFromMessages(messages: List): Int { + return messages.sumOf { it.content.length } + } + + override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult { + return EmbeddingResult(data = emptyList(), usage = Usage.ZERO) + } + + override suspend fun createChatCompletionWithFunctions( + request: ChatCompletionRequest + ): ChatCompletionResponseWithFunctions { + requests.add(request) + val response = responses[request.messages.last().content] ?: "fake-content" + return ChatCompletionResponseWithFunctions( + id = "fake-id", + `object` = "fake-object", + created = 0, + model = "fake-model", + choices = + listOf( + ChoiceWithFunctions( + message = + MessageWithFunctionCall( + role = Role.USER.name, + content = response, + functionCall = FunctionCall("fake-function-name", response), + name = Role.USER.name + ), + finishReason = "fake-finish-reason", + index = 0 + ) + ), + usage = Usage.ZERO + ) + } +} diff --git a/openai/src/jvmTest/kotlin/openai/data/TestModel.kt b/openai/src/jvmTest/kotlin/openai/data/TestModel.kt new file mode 100644 index 000000000..bc6ffd2a3 --- /dev/null +++ b/openai/src/jvmTest/kotlin/openai/data/TestModel.kt @@ -0,0 +1,59 @@ +package openai.data + +import com.xebia.functional.tokenizer.ModelType +import com.xebia.functional.xef.llm.Chat +import com.xebia.functional.xef.llm.Embeddings +import com.xebia.functional.xef.llm.models.chat.* +import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest +import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult +import com.xebia.functional.xef.llm.models.usage.Usage +import kotlinx.coroutines.flow.Flow + +class TestModel( + override val modelType: ModelType, + override val name: String, + val responses: Map = emptyMap(), +) : Chat, Embeddings, AutoCloseable { + + var requests: MutableList = mutableListOf() + + override suspend fun createChatCompletion( + request: ChatCompletionRequest + ): ChatCompletionResponse { + requests.add(request) + return ChatCompletionResponse( + id = "fake-id", + `object` = "fake-object", + created = 0, + model = "fake-model", + choices = + listOf( + Choice( + message = + Message( + role = Role.USER, + content = responses[request.messages.last().content] ?: "fake-content", + name = Role.USER.name + ), + finishReason = "fake-finish-reason", + index = 0 + ) + ), + usage = Usage.ZERO + ) + } + + override suspend fun createChatCompletions( + request: ChatCompletionRequest + ): Flow { + throw NotImplementedError() + } + + override fun tokensFromMessages(messages: List): Int { + return messages.sumOf { it.content.length } + } + + override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult { + return EmbeddingResult(data = emptyList(), usage = Usage.ZERO) + } +} diff --git a/openai/src/jvmTest/kotlin/openai/data/models.kt b/openai/src/jvmTest/kotlin/openai/data/models.kt new file mode 100644 index 000000000..188aa3ec1 --- /dev/null +++ b/openai/src/jvmTest/kotlin/openai/data/models.kt @@ -0,0 +1,7 @@ +package openai.data + +import kotlinx.serialization.Serializable + +@Serializable data class Question(val question: String) + +@Serializable data class Answer(val bar: String)