Skip to content

Commit

Permalink
Merge branch 'microsoft:main' into notebook-for-vertexai
Browse files Browse the repository at this point in the history
  • Loading branch information
luxzoli authored Jul 1, 2024
2 parents b2d95b1 + 80ecbf9 commit 5da8ce3
Show file tree
Hide file tree
Showing 31 changed files with 917 additions and 185 deletions.
4 changes: 3 additions & 1 deletion autogen/agentchat/contrib/agent_eval/README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
Agents for running the AgentEval pipeline.
Agents for running the [AgentEval](https://microsoft.github.io/autogen/blog/2023/11/20/AgentEval/) pipeline.

AgentEval is a process for evaluating a LLM-based system's performance on a given task.

When given a task to evaluate and a few example runs, the critic and subcritic agents create evaluation criteria for evaluating a system's solution. Once the criteria has been created, the quantifier agent can evaluate subsequent task solutions based on the generated criteria.

For more information see: [AgentEval Integration Roadmap](https://github.com/microsoft/autogen/issues/2162)

See our [blog post](https://microsoft.github.io/autogen/blog/2024/06/21/AgentEval) for usage examples and general explanations.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
<ProjectReference Include="..\..\src\AutoGen.DotnetInteractive\AutoGen.DotnetInteractive.csproj" />
<ProjectReference Include="..\..\src\AutoGen.SourceGenerator\AutoGen.SourceGenerator.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false" />
<ProjectReference Include="..\..\src\AutoGen\AutoGen.csproj" />
<PackageReference Include="FluentAssertions" Version="$(FluentAssertionVersion)" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

namespace AutoGen.Anthropic.Samples;

public static class AnthropicSamples
public static class Create_Anthropic_Agent
{
public static async Task RunAsync()
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Single_Anthropic_Tool.cs

using AutoGen.Anthropic.DTO;
using AutoGen.Anthropic.Extensions;
using AutoGen.Anthropic.Utils;
using AutoGen.Core;
using FluentAssertions;

namespace AutoGen.Anthropic.Samples;

#region WeatherFunction

public partial class WeatherFunction
{
/// <summary>
/// Gets the weather based on the location and the unit
/// </summary>
/// <param name="location"></param>
/// <param name="unit"></param>
/// <returns></returns>
[Function]
public async Task<string> GetWeather(string location, string unit)
{
// dummy implementation
return $"The weather in {location} is currently sunny with a tempature of {unit} (s)";
}
}
#endregion
public class Create_Anthropic_Agent_With_Tool
{
public static async Task RunAsync()
{
#region define_tool
var tool = new Tool
{
Name = "GetWeather",
Description = "Get the current weather in a given location",
InputSchema = new InputSchema
{
Type = "object",
Properties = new Dictionary<string, SchemaProperty>
{
{ "location", new SchemaProperty { Type = "string", Description = "The city and state, e.g. San Francisco, CA" } },
{ "unit", new SchemaProperty { Type = "string", Description = "The unit of temperature, either \"celsius\" or \"fahrenheit\"" } }
},
Required = new List<string> { "location" }
}
};

var weatherFunction = new WeatherFunction();
var functionMiddleware = new FunctionCallMiddleware(
functions: [
weatherFunction.GetWeatherFunctionContract,
],
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ weatherFunction.GetWeatherFunctionContract.Name!, weatherFunction.GetWeatherWrapper },
});

#endregion

#region create_anthropic_agent

var apiKey = Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ??
throw new Exception("Missing ANTHROPIC_API_KEY environment variable.");

var anthropicClient = new AnthropicClient(new HttpClient(), AnthropicConstants.Endpoint, apiKey);
var agent = new AnthropicClientAgent(anthropicClient, "assistant", AnthropicConstants.Claude3Haiku,
tools: [tool]); // Define tools for AnthropicClientAgent
#endregion

#region register_middleware

var agentWithConnector = agent
.RegisterMessageConnector()
.RegisterPrintMessage()
.RegisterStreamingMiddleware(functionMiddleware);
#endregion register_middleware

#region single_turn
var question = new TextMessage(Role.Assistant,
"What is the weather like in San Francisco?",
from: "user");
var functionCallReply = await agentWithConnector.SendAsync(question);
#endregion

#region Single_turn_verify_reply
functionCallReply.Should().BeOfType<ToolCallAggregateMessage>();
#endregion Single_turn_verify_reply

#region Multi_turn
var finalReply = await agentWithConnector.SendAsync(chatHistory: [question, functionCallReply]);
#endregion Multi_turn

#region Multi_turn_verify_reply
finalReply.Should().BeOfType<TextMessage>();
#endregion Multi_turn_verify_reply
}
}
2 changes: 1 addition & 1 deletion dotnet/sample/AutoGen.Anthropic.Samples/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ internal static class Program
{
public static async Task Main(string[] args)
{
await AnthropicSamples.RunAsync();
await Create_Anthropic_Agent_With_Tool.RunAsync();
}
}
Original file line number Diff line number Diff line change
@@ -1,68 +1,3 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example13_OpenAIAgent_JsonMode.cs

using System.Text.Json;
using System.Text.Json.Serialization;
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using FluentAssertions;

namespace AutoGen.BasicSample;

public class Example13_OpenAIAgent_JsonMode
{
public static async Task RunAsync()
{
#region create_agent
var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(deployName: "gpt-35-turbo"); // json mode only works with 0125 and later model.
var apiKey = config.ApiKey;
var endPoint = new Uri(config.Endpoint);

var openAIClient = new OpenAIClient(endPoint, new Azure.AzureKeyCredential(apiKey));
var openAIClientAgent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: config.DeploymentName,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0, // explicitly set a seed to enable deterministic output
responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region chat_with_agent
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle.");

var person = JsonSerializer.Deserialize<Person>(reply.GetContent());
Console.WriteLine($"Name: {person.Name}");
Console.WriteLine($"Age: {person.Age}");

if (!string.IsNullOrEmpty(person.Address))
{
Console.WriteLine($"Address: {person.Address}");
}

Console.WriteLine("Done.");
#endregion chat_with_agent

person.Name.Should().Be("John");
person.Age.Should().Be(25);
person.Address.Should().BeNullOrEmpty();
}
}

#region person_class
public class Person
{
[JsonPropertyName("name")]
public string Name { get; set; }

[JsonPropertyName("age")]
public int Age { get; set; }

[JsonPropertyName("address")]
public string Address { get; set; }
}
#endregion person_class
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs
Original file line number Diff line number Diff line change
@@ -1,62 +1,3 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs
#region using_statement
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using Azure.Core.Pipeline;
#endregion using_statement

namespace AutoGen.BasicSample;

#region CustomHttpClientHandler
public sealed class CustomHttpClientHandler : HttpClientHandler
{
private string _modelServiceUrl;

public CustomHttpClientHandler(string modelServiceUrl)
{
_modelServiceUrl = modelServiceUrl;
}

protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}");

return base.SendAsync(request, cancellationToken);
}
}
#endregion CustomHttpClientHandler

public class Example16_OpenAIChatAgent_ConnectToThirdPartyBackend
{
public static async Task RunAsync()
{
#region create_agent
using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434"));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(client),
};

// api-key is not required for local server
// so you can use any string here
var openAIClient = new OpenAIClient("api-key", option);
var model = "llama3";

var agent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: model,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0)
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region send_message
await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?");
#endregion send_message
}
}
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

namespace AutoGen.OpenAI.Sample;

#region Function
public partial class Function
{
[Function]
Expand All @@ -16,25 +17,37 @@ public async Task<string> GetWeatherAsync(string city)
return await Task.FromResult("The weather in " + city + " is 72 degrees and sunny.");
}
}
#endregion Function

public class Tool_Call_With_Ollama_And_LiteLLM
{
public static async Task RunAsync()
{
#region Create_Agent
var liteLLMUrl = "http://localhost:4000";
using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(httpClient),
};
// Before running this code, make sure you have
// - Ollama:
// - Install dolphincoder:latest in Ollama
// - Ollama running on http://localhost:11434
// - LiteLLM
// - Install LiteLLM
// - Start LiteLLM with the following command:
// - litellm --model ollama_chat/dolphincoder --port 4000

# region Create_tools
var functions = new Function();
var functionMiddleware = new FunctionCallMiddleware(
functions: [functions.GetWeatherAsyncFunctionContract],
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ functions.GetWeatherAsyncFunctionContract.Name!, functions.GetWeatherAsyncWrapper },
});
#endregion Create_tools
#region Create_Agent
var liteLLMUrl = "http://localhost:4000";
using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(httpClient),
};

// api-key is not required for local server
// so you can use any string here
Expand All @@ -43,7 +56,7 @@ public static async Task RunAsync()
var agent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: "placeholder",
modelName: "dolphincoder:latest",
systemMessage: "You are a helpful AI assistant")
.RegisterMessageConnector()
.RegisterMiddleware(functionMiddleware)
Expand Down
Loading

0 comments on commit 5da8ce3

Please sign in to comment.