Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding tests #344

Merged
merged 1 commit into from
Aug 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import com.xebia.functional.xef.prompt.templates.assistant
import com.xebia.functional.xef.prompt.templates.system
import com.xebia.functional.xef.prompt.templates.user
import kotlin.jvm.JvmSynthetic
import kotlinx.serialization.json.Json
import kotlinx.serialization.serializer

interface PromptBuilder {
val items: MutableList<Message>
Expand Down Expand Up @@ -48,3 +50,6 @@ interface PromptBuilder {
}

fun String.message(role: Role): Message = Message(role, this, role.name)

inline fun <reified A> A.message(role: Role): Message =
Message(role, Json.encodeToString(serializer(), this), role.name)
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,18 @@ import com.xebia.functional.xef.llm.models.chat.Role
import com.xebia.functional.xef.prompt.PlatformPromptBuilder
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.message
import kotlinx.serialization.json.Json
import kotlinx.serialization.serializer

fun system(context: String): Message = context.message(Role.SYSTEM)

fun assistant(context: String): Message = context.message(Role.ASSISTANT)

fun user(context: String): Message = context.message(Role.USER)

inline fun <reified A> system(data: A): Message = system(Json.encodeToString(serializer(), data))
inline fun <reified A> system(data: A): Message = data.message(Role.SYSTEM)

inline fun <reified A> assistant(data: A): Message =
assistant(Json.encodeToString(serializer(), data))
inline fun <reified A> assistant(data: A): Message = data.message(Role.ASSISTANT)

inline fun <reified A> user(data: A): Message = user(Json.encodeToString(serializer(), data))
inline fun <reified A> user(data: A): Message = data.message(Role.USER)

class StepsMessageBuilder : PlatformPromptBuilder() {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
package com.xebia.functional.xef.conversation

import com.xebia.functional.tokenizer.ModelType
import com.xebia.functional.xef.data.TestEmbeddings
import com.xebia.functional.xef.data.TestFunctionsModel
import com.xebia.functional.xef.data.TestModel
import com.xebia.functional.xef.data.*
import com.xebia.functional.xef.llm.models.chat.Message
import com.xebia.functional.xef.llm.models.chat.Role
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.templates.user
import com.xebia.functional.xef.store.ConversationId
import com.xebia.functional.xef.store.LocalVectorStore
import io.kotest.core.spec.style.StringSpec
import io.kotest.matchers.comparables.shouldBeLessThan
import io.kotest.matchers.shouldBe
import io.kotest.matchers.shouldNotBe
import kotlinx.serialization.Serializable
import kotlinx.serialization.encodeToString
import kotlinx.serialization.json.Json
import kotlinx.uuid.UUID
Expand Down Expand Up @@ -149,8 +147,6 @@ class ConversationSpec :
}

"functionCall shouldn't be null when the model support functions and the prompt contain a function" {
@Serializable data class Answer(val bar: String)

val question = "fake-question"
val answer = Answer("fake-answer")

Expand All @@ -175,4 +171,36 @@ class ConversationSpec :

lastRequest.functionCall shouldNotBe null
}

"the message of the request should be the JSON string of the question when the prompt contains serializable object" {
val question = Question("fake-question")
val questionJsonString = Json.encodeToString(question)
val answer = Answer("fake-answer")
val answerJsonString = Json.encodeToString(answer)

val message = mapOf(questionJsonString to answerJsonString)

val conversationId = ConversationId(UUID.generateUUID().toString())
val scope = Conversation(LocalVectorStore(TestEmbeddings()), conversationId = conversationId)

val model =
TestFunctionsModel(
modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS,
name = "fake-model",
responses = message
)

val response: Answer =
model.prompt(
prompt = Prompt { +user(question) },
scope = scope,
serializer = Answer.serializer()
)

val lastRequest = model.requests.last()

lastRequest.messages.last().content shouldBe questionJsonString

response shouldBe answer
}
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package com.xebia.functional.xef.data

import kotlinx.serialization.Serializable

@Serializable data class Question(val question: String)

@Serializable data class Answer(val bar: String)
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.xebia.functional.xef.prompt

import com.xebia.functional.xef.data.Question
import com.xebia.functional.xef.llm.models.chat.Role
import com.xebia.functional.xef.prompt.templates.assistant
import com.xebia.functional.xef.prompt.templates.steps
Expand Down Expand Up @@ -72,4 +73,23 @@ class PromptBuilderSpec :

messages shouldBe messagesExpected
}

"buildPrompt should return the expected messages when using serializable objects" {
val question = Question("Test Question")

val messages =
Prompt {
+system("Test System")
+user(question)
}
.messages

val messagesExpected =
listOf(
"Test System".message(Role.SYSTEM),
question.message(Role.USER),
)

messages shouldBe messagesExpected
}
})
68 changes: 68 additions & 0 deletions openai/src/jvmTest/kotlin/openai/OpenAISpec.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
package openai

import com.xebia.functional.tokenizer.ModelType
import com.xebia.functional.xef.conversation.llm.openai.OpenAI
import com.xebia.functional.xef.conversation.llm.openai.prompt
import com.xebia.functional.xef.conversation.llm.openai.promptMessage
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.templates.user
import io.kotest.core.spec.style.StringSpec
import io.kotest.matchers.shouldBe
import kotlinx.serialization.encodeToString
import kotlinx.serialization.json.Json
import openai.data.Answer
import openai.data.Question
import openai.data.TestFunctionsModel
import openai.data.TestModel

class OpenAISpec :
StringSpec({
"""
| When we are using a OpenAI scope conversation
| the memories should have the correct size in the vector store
| for the conversationId generated inside the conversation
""" {
OpenAI.conversation {
val model = TestModel(modelType = ModelType.ADA, name = "fake-model")

promptMessage(prompt = Prompt("question 1"), model = model)

promptMessage(prompt = Prompt("question 2"), model = model)

val memories = store.memories(conversationId!!, 10000)

memories.size shouldBe 4
}
}

"""
| When we are using a OpenAI scope conversation with functions
| the memories should have the correct size in the vector store
| for the conversationId generated inside the conversation
""" {
OpenAI.conversation {
val question = Question("fake-question")
val questionJsonString = Json.encodeToString(question)
val answer = Answer("fake-answer")
val answerJsonString = Json.encodeToString(answer)
val question2 = "question 2"

val message = mapOf(questionJsonString to answerJsonString, question2 to answerJsonString)

val model =
TestFunctionsModel(
modelType = ModelType.GPT_3_5_TURBO_FUNCTIONS,
name = "fake-model",
responses = message
)

val response1: Answer = prompt(Prompt { +user(question) }, model)

val response2: Answer = prompt(Prompt(question2), model)

val memories = store.memories(conversationId!!, 10000)

memories.size shouldBe 4
}
}
})
88 changes: 88 additions & 0 deletions openai/src/jvmTest/kotlin/openai/data/TestFunctionsModel.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
package openai.data

import com.xebia.functional.tokenizer.ModelType
import com.xebia.functional.xef.llm.ChatWithFunctions
import com.xebia.functional.xef.llm.Embeddings
import com.xebia.functional.xef.llm.models.chat.*
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult
import com.xebia.functional.xef.llm.models.functions.FunctionCall
import com.xebia.functional.xef.llm.models.usage.Usage
import kotlinx.coroutines.flow.Flow

class TestFunctionsModel(
override val modelType: ModelType,
override val name: String,
val responses: Map<String, String> = emptyMap(),
) : ChatWithFunctions, Embeddings, AutoCloseable {

var requests: MutableList<ChatCompletionRequest> = mutableListOf()

override suspend fun createChatCompletion(
request: ChatCompletionRequest
): ChatCompletionResponse {
requests.add(request)
return ChatCompletionResponse(
id = "fake-id",
`object` = "fake-object",
created = 0,
model = "fake-model",
choices =
listOf(
Choice(
message =
Message(
role = Role.USER,
content = responses[request.messages.last().content] ?: "fake-content",
name = Role.USER.name
),
finishReason = "fake-finish-reason",
index = 0
)
),
usage = Usage.ZERO
)
}

override suspend fun createChatCompletions(
request: ChatCompletionRequest
): Flow<ChatCompletionChunk> {
throw NotImplementedError()
}

override fun tokensFromMessages(messages: List<Message>): Int {
return messages.sumOf { it.content.length }
}

override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult {
return EmbeddingResult(data = emptyList(), usage = Usage.ZERO)
}

override suspend fun createChatCompletionWithFunctions(
request: ChatCompletionRequest
): ChatCompletionResponseWithFunctions {
requests.add(request)
val response = responses[request.messages.last().content] ?: "fake-content"
return ChatCompletionResponseWithFunctions(
id = "fake-id",
`object` = "fake-object",
created = 0,
model = "fake-model",
choices =
listOf(
ChoiceWithFunctions(
message =
MessageWithFunctionCall(
role = Role.USER.name,
content = response,
functionCall = FunctionCall("fake-function-name", response),
name = Role.USER.name
),
finishReason = "fake-finish-reason",
index = 0
)
),
usage = Usage.ZERO
)
}
}
59 changes: 59 additions & 0 deletions openai/src/jvmTest/kotlin/openai/data/TestModel.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package openai.data

import com.xebia.functional.tokenizer.ModelType
import com.xebia.functional.xef.llm.Chat
import com.xebia.functional.xef.llm.Embeddings
import com.xebia.functional.xef.llm.models.chat.*
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingRequest
import com.xebia.functional.xef.llm.models.embeddings.EmbeddingResult
import com.xebia.functional.xef.llm.models.usage.Usage
import kotlinx.coroutines.flow.Flow

class TestModel(
override val modelType: ModelType,
override val name: String,
val responses: Map<String, String> = emptyMap(),
) : Chat, Embeddings, AutoCloseable {

var requests: MutableList<ChatCompletionRequest> = mutableListOf()

override suspend fun createChatCompletion(
request: ChatCompletionRequest
): ChatCompletionResponse {
requests.add(request)
return ChatCompletionResponse(
id = "fake-id",
`object` = "fake-object",
created = 0,
model = "fake-model",
choices =
listOf(
Choice(
message =
Message(
role = Role.USER,
content = responses[request.messages.last().content] ?: "fake-content",
name = Role.USER.name
),
finishReason = "fake-finish-reason",
index = 0
)
),
usage = Usage.ZERO
)
}

override suspend fun createChatCompletions(
request: ChatCompletionRequest
): Flow<ChatCompletionChunk> {
throw NotImplementedError()
}

override fun tokensFromMessages(messages: List<Message>): Int {
return messages.sumOf { it.content.length }
}

override suspend fun createEmbeddings(request: EmbeddingRequest): EmbeddingResult {
return EmbeddingResult(data = emptyList(), usage = Usage.ZERO)
}
}
7 changes: 7 additions & 0 deletions openai/src/jvmTest/kotlin/openai/data/models.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package openai.data

import kotlinx.serialization.Serializable

@Serializable data class Question(val question: String)

@Serializable data class Answer(val bar: String)