Skip to content

Commit

Permalink
feat(llms): basic chatgpt integration
Browse files Browse the repository at this point in the history
  • Loading branch information
efugier committed Nov 7, 2023
1 parent 5c5ccbf commit 5feb684
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 5 deletions.
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,6 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
toml = "*"
ureq = { version="*", features = ["json"] }
serde = { version = "*", features = ["derive"] }
23 changes: 23 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
use std::fs;
use toml::Value;

pub fn get_api_key() -> String {
let config_path = format!(
"{}/.config/pipelm/.api_configs.toml",
std::env::var("HOME").unwrap()
);
let content = fs::read_to_string(config_path).expect("Failed to read the TOML file");
let value: Value = content.parse().expect("Failed to parse TOML");

// Extract the API key from the TOML table.
let api_key = value
.get("openai")
.and_then(|table| table.get("API_KEY"))
.and_then(|api_key| api_key.as_str())
.unwrap_or_else(|| {
eprintln!("API_KEY not found in the TOML file.");
std::process::exit(1);
});

api_key.to_string()
}
35 changes: 33 additions & 2 deletions src/input_processing.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::request::{make_authenticated_request, OpenAiResponse};
use std::io::{Read, Result, Write};

pub fn process_input<R: Read, W: Write>(
pub fn chunk_process_input<R: Read, W: Write>(
input: &mut R,
output: &mut W,
prefix: &str,
Expand Down Expand Up @@ -31,6 +32,35 @@ pub fn process_input<R: Read, W: Write>(
Ok(())
}

pub fn process_input_with_request<R: Read, W: Write>(
input: &mut R,
output: &mut W,
prefix: &str,
suffix: &str,
) -> Result<()> {
let mut buffer = Vec::new();
input.read_to_end(&mut buffer)?;

// nothing to do if no input
if buffer.is_empty() {
return Ok(());
}

let input = String::from_utf8(buffer).unwrap();

let mut result = String::from(prefix);
result.push_str(&input);
result.push_str(suffix);

let response: OpenAiResponse = make_authenticated_request(&result).unwrap().into_json()?;

println!("{}", response.choices.first().unwrap().message.content);

output.write_all(suffix.as_bytes())?;

Ok(())
}

#[cfg(test)]
mod tests {
use super::*;
Expand All @@ -43,7 +73,8 @@ mod tests {
let input = $input.as_bytes();
let mut output = std::io::Cursor::new(Vec::new());

let result = process_input(&mut Cursor::new(input), &mut output, $prefix, $suffix);
let result =
chunk_process_input(&mut Cursor::new(input), &mut output, $prefix, $suffix);
assert!(result.is_ok());

let expected_output = if !input.is_empty() {
Expand Down
11 changes: 8 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
use std::io;
mod config;
mod input_processing;
mod request;

fn main() {
let mut output = io::stdout();
let mut input = io::stdin();

if let Err(e) =
input_processing::process_input(&mut input, &mut output, "Hello, World!\n```\n", "\n```\n")
{
if let Err(e) = input_processing::chunk_process_input(
&mut input,
&mut output,
"Hello, World!\n```\n",
"\n```\n",
) {
eprintln!("Error: {}", e);
std::process::exit(1);
}
Expand Down
55 changes: 55 additions & 0 deletions src/request.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use crate::config::get_api_key;
use serde::Deserialize;

#[derive(Debug, Deserialize)]
pub struct Message {
pub role: String,
pub content: String,
}

#[derive(Debug, Deserialize)]
pub struct Choice {
pub index: u32,
pub message: Message,
pub finish_reason: String,
}

#[derive(Debug, Deserialize)]
pub struct Usage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
}

#[derive(Debug, Deserialize)]
pub struct OpenAiResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub choices: Vec<Choice>,
pub usage: Usage,
pub system_fingerprint: String,
}

pub fn make_authenticated_request(text: &str) -> Result<ureq::Response, ureq::Error> {
let api_key = get_api_key();
println!("Trying to reach openai with {}", &api_key);
ureq::post("https://api.openai.com/v1/chat/completions")
.set("Content-Type", "application/json")
.set("Authorization", &format!("Bearer {}", api_key))
.send_json(ureq::json!({
"model": "gpt-4-1106-preview",
"messages": [
{
"role": "system",
"content": "You are a poetic assistant, skilled in explaining complex programming concepts with creative flair."
},
{
"role": "user",
"content": text
}
]
})
)
}

0 comments on commit 5feb684

Please sign in to comment.