Skip to content

Commit

Permalink
Merge pull request #58 from jpmcb/openai-api-rs-2.0.0
Browse files Browse the repository at this point in the history
feat: Upgrade to openai-api-rs 2.0.0
  • Loading branch information
jpmcb committed Oct 19, 2023
2 parents d454958 + 2d8b8ce commit eed96f5
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 23 deletions.
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ rayon = "1"
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "json"] }
serde = "1"
tokenizers = "0.14"
openai-api-rs = "1.0"
openai-api-rs = "2.0"
zip = "0.6"
rust-fuzzy-search = "0.1"
text-splitter = "0.4"
Expand Down
3 changes: 2 additions & 1 deletion src/conversation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,8 @@ impl<D: RepositoryEmbeddingsDB, M: EmbeddingsModel> Conversation<D, M> {
#[allow(unused_labels)]
'conversation: loop {
//Generate a request with the message history and functions
let request = generate_completion_request(self.messages.clone(), FunctionCallType::Auto);
let request =
generate_completion_request(self.messages.clone(), FunctionCallType::Auto);

match self.send_request(request) {
Ok(response) => {
Expand Down
25 changes: 6 additions & 19 deletions src/conversation/prompts.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use openai_api_rs::v1::chat_completion::{
ChatCompletionMessage, ChatCompletionRequest, Function as F, FunctionParameters,
JSONSchemaDefine, JSONSchemaType, FunctionCallType,
ChatCompletionMessage, ChatCompletionRequest, Function as F, FunctionCallType,
FunctionParameters, JSONSchemaDefine, JSONSchemaType,
};
use std::collections::HashMap;

Expand All @@ -17,23 +17,10 @@ pub fn generate_completion_request(
messages: Vec<ChatCompletionMessage>,
function_call: FunctionCallType,
) -> ChatCompletionRequest {

ChatCompletionRequest {
model: CHAT_COMPLETION_MODEL.into(),
messages,
functions: Some(functions()),
function_call: Some(function_call),
temperature: Some(CHAT_COMPLETION_TEMPERATURE),
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
}
ChatCompletionRequest::new(CHAT_COMPLETION_MODEL.to_string(), messages)
.functions(functions())
.function_call(function_call)
.temperature(CHAT_COMPLETION_TEMPERATURE)
}

pub fn functions() -> Vec<F> {
Expand Down

0 comments on commit eed96f5

Please sign in to comment.