Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[.Net] add ollama-sample and adds more tests #2776

Merged
merged 3 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions dotnet/AutoGen.sln
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama", "src\AutoG
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama.Tests", "test\AutoGen.Ollama.Tests\AutoGen.Ollama.Tests.csproj", "{03E31CAA-3728-48D3-B936-9F11CF6C18FE}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.Ollama.Sample", "sample\AutoGen.Ollama.Sample\AutoGen.Ollama.Sample.csproj", "{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand Down Expand Up @@ -117,6 +119,10 @@ Global
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.Build.0 = Release|Any CPU
{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Debug|Any CPU.Build.0 = Debug|Any CPU
{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Release|Any CPU.ActiveCfg = Release|Any CPU
{93AA4D0D-6EE4-44D5-AD77-7F73A3934544}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand All @@ -139,6 +145,7 @@ Global
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{9F9E6DED-3D92-4970-909A-70FC11F1A665} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{03E31CAA-3728-48D3-B936-9F11CF6C18FE} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{93AA4D0D-6EE4-44D5-AD77-7F73A3934544} = {FBFEAD1F-29EB-4D99-A672-0CD8473E10B9}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {93384647-528D-46C8-922C-8DB36A382F0B}
Expand Down
25 changes: 25 additions & 0 deletions dotnet/sample/AutoGen.Ollama.Sample/AutoGen.Ollama.Sample.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>$(TestTargetFramework)</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
<NoWarn>$(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110</NoWarn>
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\..\src\AutoGen.DotnetInteractive\AutoGen.DotnetInteractive.csproj" />
<ProjectReference Include="..\..\src\AutoGen.Ollama\AutoGen.Ollama.csproj" />
<ProjectReference Include="..\..\src\AutoGen.SourceGenerator\AutoGen.SourceGenerator.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false" />
<ProjectReference Include="..\..\src\AutoGen\AutoGen.csproj" />
<PackageReference Include="FluentAssertions" Version="$(FluentAssertionVersion)" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Web" Version="$(SemanticKernelExperimentalVersion)" />
LittleLittleCloud marked this conversation as resolved.
Show resolved Hide resolved
</ItemGroup>

<ItemGroup>
<None Update="images\*.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

</Project>
28 changes: 28 additions & 0 deletions dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaMA.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Chat_With_LLaMA.cs

using AutoGen.Core;
using AutoGen.Ollama.Extension;

namespace AutoGen.Ollama.Sample;

public class Chat_With_LLaMA
{
public static async Task RunAsync()
{
using var httpClient = new HttpClient()
{
BaseAddress = new Uri("https://2xbvtxd1-11434.usw2.devtunnels.ms")
};

var ollamaAgent = new OllamaAgent(
httpClient: httpClient,
name: "ollama",
modelName: "llama3:latest",
systemMessage: "You are a helpful AI assistant")
.RegisterMessageConnector()
.RegisterPrintMessage();

var reply = await ollamaAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?");
}
}
40 changes: 40 additions & 0 deletions dotnet/sample/AutoGen.Ollama.Sample/Chat_With_LLaVA.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Chat_With_LLaVA.cs

using AutoGen.Core;
using AutoGen.Ollama.Extension;

namespace AutoGen.Ollama.Sample;

public class Chat_With_LLaVA
{
public static async Task RunAsync()
{
using var httpClient = new HttpClient()
{
BaseAddress = new Uri("https://2xbvtxd1-11434.usw2.devtunnels.ms")
};

var ollamaAgent = new OllamaAgent(
httpClient: httpClient,
name: "ollama",
modelName: "llava:latest",
systemMessage: "You are a helpful AI assistant")
.RegisterMessageConnector()
.RegisterPrintMessage();

var image = Path.Combine("images", "background.png");
var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png");
var imageMessage = new ImageMessage(Role.User, binaryData);
var textMessage = new TextMessage(Role.User, "what's in this image?");
var reply = await ollamaAgent.SendAsync(chatHistory: [textMessage, imageMessage]);

// You can also use MultiModalMessage to put text and image together in one message
// In this case, all the messages in the multi-modal message will be put into single piece of message
// where the text is the concatenation of all the text messages seperated by \n
// and the images are all the images in the multi-modal message
var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]);

reply = await ollamaAgent.SendAsync(chatHistory: [multiModalMessage]);
}
}
6 changes: 6 additions & 0 deletions dotnet/sample/AutoGen.Ollama.Sample/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Program.cs

using AutoGen.Ollama.Sample;

await Chat_With_LLaVA.RunAsync();
3 changes: 3 additions & 0 deletions dotnet/sample/AutoGen.Ollama.Sample/images/background.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
35 changes: 19 additions & 16 deletions dotnet/src/AutoGen.Ollama/Middlewares/OllamaMessageConnector.cs
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,9 @@ public async IAsyncEnumerable<IStreamingMessage> InvokeAsync(MiddlewareContext c

// if the chunks are not empty, aggregate them into a single message
var messageContent = string.Join(string.Empty, chunks.Select(c => c.Message?.Value));
var message = new Message
{
Role = "assistant",
Value = messageContent,
};
var message = new TextMessage(Role.Assistant, messageContent, agent.Name);

yield return MessageEnvelope.Create(message, agent.Name);
yield return message;
}

private IEnumerable<IMessage> ProcessMessage(IEnumerable<IMessage> messages, IAgent agent)
Expand All @@ -96,18 +92,25 @@ private IEnumerable<IMessage> ProcessMessage(IEnumerable<IMessage> messages, IAg

private IEnumerable<IMessage> ProcessMultiModalMessage(MultiModalMessage multiModalMessage, IAgent agent)
{
var messages = new List<IMessage>();
foreach (var message in multiModalMessage.Content)
var textMessages = multiModalMessage.Content.Where(m => m is TextMessage textMessage && textMessage.GetContent() is not null);
var imageMessages = multiModalMessage.Content.Where(m => m is ImageMessage);

// aggregate the text messages into one message
// by concatenating the content using newline
var textContent = string.Join("\n", textMessages.Select(m => ((TextMessage)m).Content));

// collect all the images
var images = imageMessages.SelectMany(m => ProcessImageMessage((ImageMessage)m, agent)
.SelectMany(m => (m as IMessage<Message>)?.Content.Images));

var message = new Message()
{
messages.AddRange(message switch
{
TextMessage textMessage => ProcessTextMessage(textMessage, agent),
ImageMessage imageMessage => ProcessImageMessage(imageMessage, agent),
_ => throw new InvalidOperationException("Invalid message type"),
});
}
Role = "user",
Value = textContent,
Images = images.ToList(),
};

return messages;
return [MessageEnvelope.Create(message, agent.Name)];
}

private IEnumerable<IMessage> ProcessImageMessage(ImageMessage imageMessage, IAgent agent)
Expand Down
51 changes: 37 additions & 14 deletions dotnet/test/AutoGen.Ollama.Tests/OllamaMessageTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,10 @@
// OllamaMessageTests.cs

using AutoGen.Core;
using AutoGen.Ollama;
using AutoGen.Tests;
using FluentAssertions;
using Xunit;
using Message = AutoGen.Ollama.Message;

namespace Autogen.Ollama.Tests;
namespace AutoGen.Ollama.Tests;

public class OllamaMessageTests
{
Expand Down Expand Up @@ -42,6 +39,36 @@ public async Task ItProcessUserTextMessageAsync()
await agent.SendAsync(userMessage);
}

[Fact]
public async Task ItProcessStreamingTextMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterStreamingMiddleware(messageConnector);

var messageChunks = Enumerable.Range(0, 10)
.Select(i => new ChatResponseUpdate()
{
Message = new Message()
{
Value = i.ToString(),
Role = "assistant",
}
})
.Select(m => MessageEnvelope.Create(m));

IStreamingMessage? finalReply = null;
await foreach (var reply in agent.GenerateStreamingReplyAsync(messageChunks))
{
reply.Should().BeAssignableTo<IStreamingMessage>();
finalReply = reply;
}

finalReply.Should().BeOfType<TextMessage>();
var textMessage = (TextMessage)finalReply!;
textMessage.GetContent().Should().Be("0123456789");
}

[Fact]
public async Task ItProcessAssistantTextMessageAsync()
{
Expand Down Expand Up @@ -126,17 +153,13 @@ public async Task ItProcessMultiModalMessageAsync()
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(2);
var textMessage = msgs.First();
textMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)textMessage;
message.Content.Role.Should().Be("user");
msgs.Count().Should().Be(1);
var message = msgs.First();
message.Should().BeOfType<MessageEnvelope<Message>>();

var imageMessage = msgs.Last();
imageMessage.Should().BeOfType<MessageEnvelope<Message>>();
message = (IMessage<Message>)imageMessage;
message.Content.Role.Should().Be("user");
message.Content.Images!.Count.Should().Be(1);
var multiModalMessage = (IMessage<Message>)message;
multiModalMessage.Content.Images!.Count.Should().Be(1);
multiModalMessage.Content.Value.Should().Be("Hello");

return await innerAgent.GenerateReplyAsync(msgs);
})
Expand Down
Loading