-
Notifications
You must be signed in to change notification settings - Fork 5.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[.Net] fix #3014 by adding local model function call in dotnet website (
#3044) * add instruction in ollama-litellm function call example * add tutorial * fix tests
- Loading branch information
1 parent
14fea31
commit c1aee51
Showing
11 changed files
with
199 additions
and
153 deletions.
There are no files selected for viewing
67 changes: 1 addition & 66 deletions
67
dotnet/sample/AutoGen.BasicSamples/Example13_OpenAIAgent_JsonMode.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,68 +1,3 @@ | ||
// Copyright (c) Microsoft Corporation. All rights reserved. | ||
// Example13_OpenAIAgent_JsonMode.cs | ||
|
||
using System.Text.Json; | ||
using System.Text.Json.Serialization; | ||
using AutoGen.Core; | ||
using AutoGen.OpenAI; | ||
using AutoGen.OpenAI.Extension; | ||
using Azure.AI.OpenAI; | ||
using FluentAssertions; | ||
|
||
namespace AutoGen.BasicSample; | ||
|
||
public class Example13_OpenAIAgent_JsonMode | ||
{ | ||
public static async Task RunAsync() | ||
{ | ||
#region create_agent | ||
var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(deployName: "gpt-35-turbo"); // json mode only works with 0125 and later model. | ||
var apiKey = config.ApiKey; | ||
var endPoint = new Uri(config.Endpoint); | ||
|
||
var openAIClient = new OpenAIClient(endPoint, new Azure.AzureKeyCredential(apiKey)); | ||
var openAIClientAgent = new OpenAIChatAgent( | ||
openAIClient: openAIClient, | ||
name: "assistant", | ||
modelName: config.DeploymentName, | ||
systemMessage: "You are a helpful assistant designed to output JSON.", | ||
seed: 0, // explicitly set a seed to enable deterministic output | ||
responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode | ||
.RegisterMessageConnector() | ||
.RegisterPrintMessage(); | ||
#endregion create_agent | ||
|
||
#region chat_with_agent | ||
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle."); | ||
|
||
var person = JsonSerializer.Deserialize<Person>(reply.GetContent()); | ||
Console.WriteLine($"Name: {person.Name}"); | ||
Console.WriteLine($"Age: {person.Age}"); | ||
|
||
if (!string.IsNullOrEmpty(person.Address)) | ||
{ | ||
Console.WriteLine($"Address: {person.Address}"); | ||
} | ||
|
||
Console.WriteLine("Done."); | ||
#endregion chat_with_agent | ||
|
||
person.Name.Should().Be("John"); | ||
person.Age.Should().Be(25); | ||
person.Address.Should().BeNullOrEmpty(); | ||
} | ||
} | ||
|
||
#region person_class | ||
public class Person | ||
{ | ||
[JsonPropertyName("name")] | ||
public string Name { get; set; } | ||
|
||
[JsonPropertyName("age")] | ||
public int Age { get; set; } | ||
|
||
[JsonPropertyName("address")] | ||
public string Address { get; set; } | ||
} | ||
#endregion person_class | ||
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs |
61 changes: 1 addition & 60 deletions
61
dotnet/sample/AutoGen.BasicSamples/Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,62 +1,3 @@ | ||
// Copyright (c) Microsoft Corporation. All rights reserved. | ||
// Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs | ||
#region using_statement | ||
using AutoGen.Core; | ||
using AutoGen.OpenAI; | ||
using AutoGen.OpenAI.Extension; | ||
using Azure.AI.OpenAI; | ||
using Azure.Core.Pipeline; | ||
#endregion using_statement | ||
|
||
namespace AutoGen.BasicSample; | ||
|
||
#region CustomHttpClientHandler | ||
public sealed class CustomHttpClientHandler : HttpClientHandler | ||
{ | ||
private string _modelServiceUrl; | ||
|
||
public CustomHttpClientHandler(string modelServiceUrl) | ||
{ | ||
_modelServiceUrl = modelServiceUrl; | ||
} | ||
|
||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) | ||
{ | ||
request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}"); | ||
|
||
return base.SendAsync(request, cancellationToken); | ||
} | ||
} | ||
#endregion CustomHttpClientHandler | ||
|
||
public class Example16_OpenAIChatAgent_ConnectToThirdPartyBackend | ||
{ | ||
public static async Task RunAsync() | ||
{ | ||
#region create_agent | ||
using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434")); | ||
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview) | ||
{ | ||
Transport = new HttpClientTransport(client), | ||
}; | ||
|
||
// api-key is not required for local server | ||
// so you can use any string here | ||
var openAIClient = new OpenAIClient("api-key", option); | ||
var model = "llama3"; | ||
|
||
var agent = new OpenAIChatAgent( | ||
openAIClient: openAIClient, | ||
name: "assistant", | ||
modelName: model, | ||
systemMessage: "You are a helpful assistant designed to output JSON.", | ||
seed: 0) | ||
.RegisterMessageConnector() | ||
.RegisterPrintMessage(); | ||
#endregion create_agent | ||
|
||
#region send_message | ||
await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); | ||
#endregion send_message | ||
} | ||
} | ||
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
// Copyright (c) Microsoft Corporation. All rights reserved. | ||
// Example13_OpenAIAgent_JsonMode.cs | ||
|
||
using System.Text.Json; | ||
using System.Text.Json.Serialization; | ||
using AutoGen.Core; | ||
using AutoGen.OpenAI; | ||
using AutoGen.OpenAI.Extension; | ||
using Azure.AI.OpenAI; | ||
using FluentAssertions; | ||
|
||
namespace AutoGen.BasicSample; | ||
|
||
public class Use_Json_Mode | ||
{ | ||
public static async Task RunAsync() | ||
{ | ||
#region create_agent | ||
var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); | ||
var model = "gpt-3.5-turbo"; | ||
|
||
var openAIClient = new OpenAIClient(apiKey); | ||
var openAIClientAgent = new OpenAIChatAgent( | ||
openAIClient: openAIClient, | ||
name: "assistant", | ||
modelName: model, | ||
systemMessage: "You are a helpful assistant designed to output JSON.", | ||
seed: 0, // explicitly set a seed to enable deterministic output | ||
responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode | ||
.RegisterMessageConnector() | ||
.RegisterPrintMessage(); | ||
#endregion create_agent | ||
|
||
#region chat_with_agent | ||
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle."); | ||
|
||
var person = JsonSerializer.Deserialize<Person>(reply.GetContent()); | ||
Console.WriteLine($"Name: {person.Name}"); | ||
Console.WriteLine($"Age: {person.Age}"); | ||
|
||
if (!string.IsNullOrEmpty(person.Address)) | ||
{ | ||
Console.WriteLine($"Address: {person.Address}"); | ||
} | ||
|
||
Console.WriteLine("Done."); | ||
#endregion chat_with_agent | ||
|
||
person.Name.Should().Be("John"); | ||
person.Age.Should().Be(25); | ||
person.Address.Should().BeNullOrEmpty(); | ||
} | ||
} | ||
|
||
#region person_class | ||
public class Person | ||
{ | ||
[JsonPropertyName("name")] | ||
public string Name { get; set; } | ||
|
||
[JsonPropertyName("age")] | ||
public int Age { get; set; } | ||
|
||
[JsonPropertyName("address")] | ||
public string Address { get; set; } | ||
} | ||
#endregion person_class |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
93 changes: 93 additions & 0 deletions
93
dotnet/website/articles/Function-call-with-ollama-and-litellm.md
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
This example shows how to use function call with local LLM models where [Ollama](https://ollama.com/) as local model provider and [LiteLLM](https://docs.litellm.ai/docs/) proxy server which provides an openai-api compatible interface. | ||
|
||
[![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs) | ||
|
||
To run this example, the following prerequisites are required: | ||
- Install [Ollama](https://ollama.com/) and [LiteLLM](https://docs.litellm.ai/docs/) on your local machine. | ||
- A local model that supports function call. In this example `dolphincoder:latest` is used. | ||
|
||
## Install Ollama and pull `dolphincoder:latest` model | ||
First, install Ollama by following the instructions on the [Ollama website](https://ollama.com/). | ||
|
||
After installing Ollama, pull the `dolphincoder:latest` model by running the following command: | ||
```bash | ||
ollama pull dolphincoder:latest | ||
``` | ||
|
||
## Install LiteLLM and start the proxy server | ||
|
||
You can install LiteLLM by following the instructions on the [LiteLLM website](https://docs.litellm.ai/docs/). | ||
```bash | ||
pip install 'litellm[proxy]' | ||
``` | ||
|
||
Then, start the proxy server by running the following command: | ||
|
||
```bash | ||
litellm --model ollama_chat/dolphincoder --port 4000 | ||
``` | ||
|
||
This will start an openai-api compatible proxy server at `http://localhost:4000`. You can verify if the server is running by observing the following output in the terminal: | ||
|
||
```bash | ||
#------------------------------------------------------------# | ||
# # | ||
# 'The worst thing about this product is...' # | ||
# https://github.com/BerriAI/litellm/issues/new # | ||
# # | ||
#------------------------------------------------------------# | ||
|
||
INFO: Application startup complete. | ||
INFO: Uvicorn running on http://0.0.0.0:4000 (Press CTRL+C to quit) | ||
``` | ||
|
||
## Install AutoGen and AutoGen.SourceGenerator | ||
In your project, install the AutoGen and AutoGen.SourceGenerator package using the following command: | ||
|
||
```bash | ||
dotnet add package AutoGen | ||
dotnet add package AutoGen.SourceGenerator | ||
``` | ||
|
||
The `AutoGen.SourceGenerator` package is used to automatically generate type-safe `FunctionContract` instead of manually defining them. For more information, please check out [Create type-safe function](Create-type-safe-function-call.md). | ||
|
||
And in your project file, enable structural xml document support by setting the `GenerateDocumentationFile` property to `true`: | ||
|
||
```xml | ||
<PropertyGroup> | ||
<!-- This enables structural xml document support --> | ||
<GenerateDocumentationFile>true</GenerateDocumentationFile> | ||
</PropertyGroup> | ||
``` | ||
|
||
## Define `WeatherReport` function and create @AutoGen.Core.FunctionCallMiddleware | ||
|
||
Create a `public partial` class to host the methods you want to use in AutoGen agents. The method has to be a `public` instance method and its return type must be `Task<string>`. After the methods are defined, mark them with `AutoGen.Core.FunctionAttribute` attribute. | ||
|
||
[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Function)] | ||
|
||
Then create a @AutoGen.Core.FunctionCallMiddleware and add the `WeatherReport` function to the middleware. The middleware will pass the `FunctionContract` to the agent when generating a response, and process the tool call response when receiving a `ToolCallMessage`. | ||
[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_tools)] | ||
|
||
## Create @AutoGen.OpenAI.OpenAIChatAgent with `GetWeatherReport` tool and chat with it | ||
|
||
Because LiteLLM proxy server is openai-api compatible, we can use @AutoGen.OpenAI.OpenAIChatAgent to connect to it as a third-party openai-api provider. The agent is also registered with a @AutoGen.Core.FunctionCallMiddleware which contains the `WeatherReport` tool. Therefore, the agent can call the `WeatherReport` tool when generating a response. | ||
|
||
[!code-csharp[Create an agent with tools](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_Agent)] | ||
|
||
The reply from the agent will similar to the following: | ||
```bash | ||
AggregateMessage from assistant | ||
-------------------- | ||
ToolCallMessage: | ||
ToolCallMessage from assistant | ||
-------------------- | ||
- GetWeatherAsync: {"city": "new york"} | ||
-------------------- | ||
|
||
ToolCallResultMessage: | ||
ToolCallResultMessage from assistant | ||
-------------------- | ||
- GetWeatherAsync: The weather in new york is 72 degrees and sunny. | ||
-------------------- | ||
``` |
Oops, something went wrong.