Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update dependency curl to v8.8.0.bcr.1 #3

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .bazelrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
build --cxxopt=-std=c++17
1 change: 1 addition & 0 deletions .bazelversion
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
7.1.2
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
.vscode
bazel-*
build
MODULE.bazel.lock
TestApp
8 changes: 8 additions & 0 deletions BUILD.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package(
default_visibility = ['//visibility:public'],
)

alias(
name = "oai",
actual = "//liboai:oai",
)
6 changes: 6 additions & 0 deletions MODULE.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
module(
name = "oai",
)

bazel_dep(name = "curl", version = "8.8.0.bcr.1")
bazel_dep(name = "nlohmann_json", version = "3.11.3")
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
- [x] [ChatGPT](https://github.com/D7EAD/liboai/tree/main/documentation/chat)
- [X] [Audio](https://github.com/D7EAD/liboai/tree/main/documentation/audio)
- [X] [Azure](https://github.com/D7EAD/liboai/tree/main/documentation/azure)
- [X] [Functions](https://platform.openai.com/docs/api-reference/chat/create) ([v4.0.0-dev](https://github.com/D7EAD/liboai/tree/v4.0.0-dev))
- [X] [Functions](https://platform.openai.com/docs/api-reference/chat/create)
- [x] [Image DALLΒ·E](https://github.com/D7EAD/liboai/tree/main/documentation/images)
- [x] [Models](https://github.com/D7EAD/liboai/tree/main/documentation/models)
- [x] [Completions](https://github.com/D7EAD/liboai/tree/main/documentation/completions)
Expand Down
15 changes: 15 additions & 0 deletions documentation/audio/examples/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package(
default_visibility = ['//visibility:public'],
)

cc_binary(
name = "create_speech",
srcs = ["create_speech.cpp"],
deps = ["//liboai:oai"],
)

cc_binary(
name = "create_speech_async",
srcs = ["create_speech_async.cpp"],
deps = ["//liboai:oai"],
)
25 changes: 25 additions & 0 deletions documentation/audio/examples/create_speech.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
#include "liboai.h"

using namespace liboai;

int main() {
OpenAI oai;

std::ofstream ocout("demo.mp3", std::ios::binary);

if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response res = oai.Audio->speech(
"tts-1",
"alloy",
"Today is a wonderful day to build something people love!"
);
ocout << res.content;
ocout.close();
std::cout << res.content.size() << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
32 changes: 32 additions & 0 deletions documentation/audio/examples/create_speech_async.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#include "liboai.h"

using namespace liboai;

int main() {
OpenAI oai;

if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
std::ofstream ocout("demo.mp3", std::ios::binary);
auto fut = oai.Audio->speech_async(
"tts-1",
"alloy",
"Today is a wonderful day to build something people love!"
);
// do other work...

// check if the future is ready
fut.wait();

// get the contained response
auto res = fut.get();

ocout << res.content;
ocout.close();
std::cout << res.content.size() << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
75 changes: 74 additions & 1 deletion documentation/chat/conversation/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ bool PopSystemData() & noexcept(false);

```cpp
bool AddUserData(std::string_view data) & noexcept(false);
bool AddUserData(std::string_view data, std::string_view name) & noexcept(false);
```

<h3>Pop User Data</h3>
Expand All @@ -309,21 +310,93 @@ std::string GetLastResponse() const & noexcept;
bool PopLastResponse() & noexcept(false);
```

<h3>Check if Last Response is Function Call</h3>
<p>Returns whether the most recent response, following a call to <code>Update</code> or a complete <code>AppendStreamData</code>, contains a function_call or not. Returns a boolean indicating if the last response is a function call.</p>

```cpp
bool LastResponseIsFunctionCall() const & noexcept;
```

<h3>Get the Name of the Last Response's Function Call</h3>
<p>Returns the name of the function_call in the most recent response. This should only be called if <code>LastResponseIsFunctionCall()</code> returns true. Returns a <code>std::string</code> containing the name of the last response's function call, empty if non-existent.</p>

```cpp
std::string GetLastFunctionCallName() const & noexcept(false);
```

<h3>Get the Arguments of the Last Response's Function Call</h3>
<p>Returns the arguments of the function_call in the most recent response in their raw JSON form. This should only be called if <code>LastResponseIsFunctionCall()</code> returns true. Returns a <code>std::string</code> containing the name of the last response's arguments in JSON form, empty if non-existent.</p>

```cpp
std::string GetLastFunctionCallArguments() const & noexcept(false);
```

<h3>Update Conversation</h3>
<p>Updates the conversation with a response from a chat model. The conversation can be updated with a response from the chat model either via the returned <code>liboai::Response</code> object or raw JSON from a call to <code>liboai::ChatCompletion::create</code> or similar method. Returns a <code>bool</code> indicating success.</p>
<p>Updates the conversation given a Response object. This method updates the conversation given a Response object. This method should only be used if <code>AppendStreamData</code> was NOT used immediately before it.

For instance, if we made a call to <code>create*()</code>, and provided a callback function to stream and, within this callback, we used <code>AppendStreamData</code> to update the conversation per message, we would NOT want to use this method. In this scenario, the <code>AppendStreamData</code> method would have already updated the conversation, so this method would be a bad idea to call afterwards. Returns a <code>bool</code> indicating success.</p>

```cpp
bool Update(std::string_view history) & noexcept(false);
bool Update(const Response& response) & noexcept(false);
```

<h3>Export Conversation</h3>
<p>Exports the entire conversation to a JSON string. This method exports the conversation to a JSON string. The JSON string can be used to save the conversation to a file. The exported string contains both the conversation and included functions, if any. Returns the JSON string representing the conversation.</p>

```cpp
std::string Export() const & noexcept(false);
```

<h3>Import Conversation</h3>
<p>Imports a conversation from a JSON string. This method imports a conversation from a JSON string. The JSON string should be the JSON string returned from a call to <code>Export()</code>. Returns a boolean indicating success.</p>

```cpp
bool Import() const & noexcept(false);
```

<h3>Append Stream Data</h3>
<p>Appends stream data (SSEs) from streamed methods. This method updates the conversation given a token from a streamed method. This method should be used when using streamed methods such as <code>ChatCompletion::create</code> or <code>create_async</code> with a callback supplied. This function should be called from within the stream's callback function receiving the SSEs. Returns a boolean indicating data appending success.</p>

```cpp
bool AppendStreamData(std::string data) & noexcept(false);
```

<h3>Set Function(s)</h3>
<p>Sets the functions to be used for the conversation. This method sets the functions to be used for the conversation. Returns a boolean indicating success.</p>

```cpp
bool SetFunctions(Functions functions) & noexcept(false);
```

<h3>Pop Function(s)</h3>
<p>Pops any previously set functions.</p>

```cpp
void PopFunctions() & noexcept(false);
```

<h3>Get Raw JSON Conversation</h3>
<p>Retrieves the raw JSON of the conversation; the same functionality can be achieved using the <code>operator<<(...)</code> overload. Returns a <code>std::string</code> containing the JSON of the conversation.</p>

```cpp
std::string GetRawConversation() const & noexcept;
```

<h3>Get Raw JSON Functions</h3>
<p>Returns the raw JSON dump of the internal functions object in string format - if one exists.</p>

```cpp
std::string GetRawFunctions() const & noexcept;
```

<h3>Get Functions JSON Object</h3>
<p>Returns the JSON object of the set functions.</p>

```cpp
const nlohmann::json& GetFunctionsJSON() const & noexcept;
```

<h3>Get Internal JSON </h3>
<p>Retrieves a <code>const</code>-ref of the internal JSON object containing the conversation. Returns a <code>const nlohmann::json&</code> object.</p>

Expand Down
47 changes: 47 additions & 0 deletions liboai/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package(
default_visibility = ['//visibility:public'],
)

cc_library(
name= 'oai',
srcs = [
'components/audio.cpp',
'components/azure.cpp',
'components/chat.cpp',
'components/completions.cpp',
'components/edits.cpp',
'components/embeddings.cpp',
'components/files.cpp',
'components/fine_tunes.cpp',
'components/images.cpp',
'components/models.cpp',
'components/moderations.cpp',
'core/authorization.cpp',
'core/netimpl.cpp',
'core/response.cpp',
],
hdrs = [
'include/components/audio.h',
'include/components/azure.h',
'include/components/chat.h',
'include/components/completions.h',
'include/components/edits.h',
'include/components/embeddings.h',
'include/components/files.h',
'include/components/fine_tunes.h',
'include/components/images.h',
'include/components/models.h',
'include/components/moderations.h',
'include/core/authorization.h',
'include/core/exception.h',
'include/core/netimpl.h',
'include/core/network.h',
'include/core/response.h',
'include/liboai.h',
],
strip_include_prefix = 'include',
deps = [
'@curl//:curl',
'@nlohmann_json//:json',
],
)
51 changes: 50 additions & 1 deletion liboai/components/audio.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -126,4 +126,53 @@ liboai::FutureResponse liboai::Audio::translate_async(const std::filesystem::pat
};

return std::async(std::launch::async, _fn, std::move(form));
}
}

liboai::Response liboai::Audio::speech(const std::string& model, const std::string& voice, const std::string& input, std::optional<std::string> response_format, std::optional<float> speed) const& noexcept(false) {
liboai::JsonConstructor jcon;
jcon.push_back("model", model);
jcon.push_back("voice", voice);
jcon.push_back("input", input);

if (response_format) { jcon.push_back("response_format", std::move(response_format.value())); }
if (speed) { jcon.push_back("speed", speed.value()); }

Response res;
res = this->Request(
Method::HTTP_POST, this->openai_root_, "/audio/speech", "application/json",
this->auth_.GetAuthorizationHeaders(),
netimpl::components::Body {
jcon.dump()
},
this->auth_.GetProxies(),
this->auth_.GetProxyAuth(),
this->auth_.GetMaxTimeout()
);

return res;
}

liboai::FutureResponse liboai::Audio::speech_async(const std::string& model, const std::string& voice, const std::string& input, std::optional<std::string> response_format, std::optional<float> speed) const& noexcept(false) {
liboai::JsonConstructor jcon;
jcon.push_back("model", model);
jcon.push_back("voice", voice);
jcon.push_back("input", input);

if (response_format) { jcon.push_back("response_format", std::move(response_format.value())); }
if (speed) { jcon.push_back("speed", speed.value()); }

auto _fn = [this](liboai::JsonConstructor&& jcon) -> liboai::Response {
return this->Request(
Method::HTTP_POST, this->openai_root_, "/audio/speech", "application/json",
this->auth_.GetAuthorizationHeaders(),
netimpl::components::Body {
jcon.dump()
},
this->auth_.GetProxies(),
this->auth_.GetProxyAuth(),
this->auth_.GetMaxTimeout()
);
};

return std::async(std::launch::async, _fn, std::move(jcon));
}
Loading