Skip to content

Commit

Permalink
[.Net] Add an example to show how to connect to third party OpenAI AP…
Browse files Browse the repository at this point in the history
…I endpoint + upgrade Azure.AI.OpenAI package (microsoft#2619)

* update

* update

* add blog
  • Loading branch information
LittleLittleCloud authored May 8, 2024
1 parent 3dbf4d7 commit 3ad1060
Show file tree
Hide file tree
Showing 9 changed files with 151 additions and 26 deletions.
2 changes: 1 addition & 1 deletion dotnet/eng/Version.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<AzureOpenAIVersion>1.0.0-beta.15</AzureOpenAIVersion>
<AzureOpenAIVersion>1.0.0-beta.17</AzureOpenAIVersion>
<SemanticKernelVersion>1.7.1</SemanticKernelVersion>
<SemanticKernelExperimentalVersion>1.7.1-alpha</SemanticKernelExperimentalVersion>
<SystemCodeDomVersion>5.0.0</SystemCodeDomVersion>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs
#region using_statement
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using Azure.Core.Pipeline;
#endregion using_statement

namespace AutoGen.BasicSample;

#region CustomHttpClientHandler
public sealed class CustomHttpClientHandler : HttpClientHandler
{
private string _modelServiceUrl;

public CustomHttpClientHandler(string modelServiceUrl)
{
_modelServiceUrl = modelServiceUrl;
}

protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}");

return base.SendAsync(request, cancellationToken);
}
}
#endregion CustomHttpClientHandler

public class Example16_OpenAIChatAgent_ConnectToThirdPartyBackend
{
public static async Task RunAsync()
{
#region create_agent
using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434"));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(client),
};

// api-key is not required for local server
// so you can use any string here
var openAIClient = new OpenAIClient("api-key", option);
var model = "llama3";

var agent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: model,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0)
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region send_message
await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?");
#endregion send_message
}
}
4 changes: 3 additions & 1 deletion dotnet/sample/AutoGen.BasicSamples/Program.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Program.cs

await Example02_TwoAgent_MathChat.RunAsync();
using AutoGen.BasicSample;
Console.ReadLine();
await Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.RunAsync();
42 changes: 25 additions & 17 deletions dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@

using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using AutoGen.OpenAI;
using Azure.AI.OpenAI;
using Azure.Core.Pipeline;
using Azure.Core;

namespace AutoGen.LMStudio;

Expand Down Expand Up @@ -56,25 +57,32 @@ private OpenAIClient ConfigOpenAIClientForLMStudio(LMStudioConfig config)
{
// create uri from host and port
var uri = config.Uri;
var accessToken = new AccessToken(string.Empty, DateTimeOffset.Now.AddDays(180));
var tokenCredential = DelegatedTokenCredential.Create((_, _) => accessToken);
var openAIClient = new OpenAIClient(uri, tokenCredential);
var handler = new CustomHttpClientHandler(uri);
var httpClient = new HttpClient(handler);
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2022_12_01)
{
Transport = new HttpClientTransport(httpClient),
};

// remove authenication header from pipeline
var pipeline = HttpPipelineBuilder.Build(
new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2022_12_01),
Array.Empty<HttpPipelinePolicy>(),
[],
new ResponseClassifier());
return new OpenAIClient("api-key", option);
}

// use reflection to override _pipeline field
var field = typeof(OpenAIClient).GetField("_pipeline", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
field.SetValue(openAIClient, pipeline);
private sealed class CustomHttpClientHandler : HttpClientHandler
{
private Uri _modelServiceUrl;

// use reflection to set _isConfiguredForAzureOpenAI to false
var isConfiguredForAzureOpenAIField = typeof(OpenAIClient).GetField("_isConfiguredForAzureOpenAI", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
isConfiguredForAzureOpenAIField.SetValue(openAIClient, false);
public CustomHttpClientHandler(Uri modelServiceUrl)
{
_modelServiceUrl = modelServiceUrl;
}

return openAIClient;
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
// request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}");
var uriBuilder = new UriBuilder(_modelServiceUrl);
uriBuilder.Path = request.RequestUri.PathAndQuery;
request.RequestUri = uriBuilder.Uri;
return base.SendAsync(request, cancellationToken);
}
}
}
8 changes: 2 additions & 6 deletions dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,23 @@
/// </summary>
public class LMStudioConfig : ILLMConfig
{
public LMStudioConfig(string host, int port, int version = 1)
public LMStudioConfig(string host, int port)
{
this.Host = host;
this.Port = port;
this.Version = version;
this.Uri = new Uri($"http://{host}:{port}/v{version}");
this.Uri = new Uri($"http://{host}:{port}");
}

public LMStudioConfig(Uri uri)
{
this.Uri = uri;
this.Host = uri.Host;
this.Port = uri.Port;
this.Version = int.Parse(uri.Segments[1].TrimStart('v'));
}

public string Host { get; }

public int Port { get; }

public int Version { get; }

public Uri Uri { get; }
}
7 changes: 6 additions & 1 deletion dotnet/test/AutoGen.Tests/OpenAIMessageTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ private void VerifyOAIMessages(IEnumerable<(IMessage, IEnumerable<ChatRequestMes
ChatMessageImageContentItem imageContentItem => new
{
Type = "Image",
ImageUrl = imageContentItem.ImageUrl,
ImageUrl = GetImageUrlFromContent(imageContentItem),
} as object,
ChatMessageTextContentItem textContentItem => new
{
Expand Down Expand Up @@ -374,4 +374,9 @@ private void VerifyOAIMessages(IEnumerable<(IMessage, IEnumerable<ChatRequestMes
var json = JsonSerializer.Serialize(jsonObjects, this.jsonSerializerOptions);
Approvals.Verify(json);
}

private object? GetImageUrlFromContent(ChatMessageImageContentItem content)
{
return content.GetType().GetProperty("ImageUrl", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?.GetValue(content);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
The following example shows how to connect to third-party OpenAI API using @AutoGen.OpenAI.OpenAIChatAgent.

> [!NOTE]
> You can find the complete code of this example in [Example16_OpenAIChatAgent_ConnectToThirdPartyBackend](https://github.com/microsoft/autogen/tree/dotnet/dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs).
## Overview
A lot of LLM applications/platforms support spinning up a chat server that is compatible with OpenAI API, such as LM Studio, Ollama, Mistral etc. This means that you can connect to these servers using the @AutoGen.OpenAI.OpenAIChatAgent.

> [!NOTE]
> Some platforms might not support all the features of OpenAI API. For example, Ollama does not support `function call` when using it's openai API according to its [document](https://github.com/ollama/ollama/blob/main/docs/openai.md#v1chatcompletions) (as of 2024/05/07).
> That means some of the features of OpenAI API might not work as expected when using these platforms with the @AutoGen.OpenAI.OpenAIChatAgent.
> Please refer to the platform's documentation for more information.
## Prerequisites
- Install the following packages:
```bash
dotnet add package AutoGen.OpenAI --version AUTOGEN_VERSION
```

- Spin up a chat server that is compatible with OpenAI API.
The following example uses Ollama as the chat server, and llama3 as the llm model.
```bash
ollama serve
```

## Steps
- Import the required namespaces:
[!code-csharp[](../../sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs?name=using_statement)]

- Create a `CustomHttpClientHandler` class.

The `CustomHttpClientHandler` class is used to customize the HttpClientHandler. In this example, we override the `SendAsync` method to redirect the request to local Ollama server, which is running on `http://localhost:11434`.

[!code-csharp[](../../sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs?name=CustomHttpClientHandler)]

- Create an `OpenAIChatAgent` instance and connect to the third-party API.

Then create an @AutoGen.OpenAI.OpenAIChatAgent instance and connect to the OpenAI API from Ollama. You can customize the transport behavior of `OpenAIClient` by passing a customized `HttpClientTransport` instance. In the customized `HttpClientTransport` instance, we pass the `CustomHttpClientHandler` we just created which redirects all openai chat requests to the local Ollama server.

[!code-csharp[](../../sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs?name=create_agent)]

- Chat with the `OpenAIChatAgent`.
Finally, you can start chatting with the agent. In this example, we send a coding question to the agent and get the response.

[!code-csharp[](../../sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs?name=send_message)]

## Sample Output
The following is the sample output of the code snippet above:

![output](../images/articles/ConnectTo3PartyOpenAI/output.gif)
2 changes: 2 additions & 0 deletions dotnet/website/articles/toc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@
href: OpenAIChatAgent-use-function-call.md
- name: Use json mode in OpenAIChatAgent
href: OpenAIChatAgent-use-json-mode.md
- name: Connect to third-party OpenAI API endpoints.
href: OpenAIChatAgent-connect-to-third-party-api.md

- name: AutoGen.SemanticKernel
items:
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 3ad1060

Please sign in to comment.