Skip to content

Commit f149f95

Browse files
RogerBarretodependabot[bot]moonbox3dmytrostrukIkko Eltociear Ashimine
authored
.Net: Add Ollama Connector (#7362)
# Motivation and Context This PR brings support for Ollama Connector, this Connector uses the `OllamaSharp` library client to allow usage of native Ollama Endpoints. --------- Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Evan Mattson <[email protected]> Co-authored-by: Dmytro Struk <[email protected]> Co-authored-by: Ikko Eltociear Ashimine <[email protected]> Co-authored-by: Chris <[email protected]> Co-authored-by: ShuaiHua Du <[email protected]> Co-authored-by: Krzysztof Kasprowicz <[email protected]> Co-authored-by: Mark Wallace <[email protected]> Co-authored-by: SergeyMenshykh <[email protected]> Co-authored-by: Nico Möller <[email protected]> Co-authored-by: Nico Möller <[email protected]> Co-authored-by: westey <[email protected]> Co-authored-by: Tao Chen <[email protected]> Co-authored-by: Eduard van Valkenburg <[email protected]> Co-authored-by: NEWTON MALLICK <[email protected]> Co-authored-by: qowlsdn8007 <[email protected]> Co-authored-by: Gil LaHaye <[email protected]>
1 parent 504d60c commit f149f95

40 files changed

+2767
-37
lines changed

dotnet/Directory.Packages.props

+3-2
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
<PackageVersion Include="System.Text.Json" Version="8.0.4" />
4040
<PackageVersion Include="System.Threading.Tasks.Extensions" Version="4.5.4" />
4141
<PackageVersion Include="System.ValueTuple" Version="4.5.0" />
42+
<PackageVersion Include="OllamaSharp" Version="3.0.1" />
4243
<!-- Tokenizers -->
4344
<PackageVersion Include="Microsoft.ML.Tokenizers" Version="0.22.0-preview.24378.1" />
4445
<PackageVersion Include="Microsoft.DeepDev.TokenizerLib" Version="1.3.3" />
@@ -135,8 +136,8 @@
135136
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
136137
</PackageReference>
137138
<!-- OnnxRuntimeGenAI -->
138-
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.3.0"/>
139-
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Cuda" Version="0.3.0"/>
139+
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.3.0" />
140+
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Cuda" Version="0.3.0" />
140141
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.4.0"/>
141142
</ItemGroup>
142143
</Project>

dotnet/SK-dotnet.sln

+19-1
Original file line numberDiff line numberDiff line change
@@ -314,12 +314,16 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests"
314314
EndProject
315315
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}"
316316
EndProject
317-
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
317+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Ollama", "src\Connectors\Connectors.Ollama\Connectors.Ollama.csproj", "{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}"
318318
EndProject
319319
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureCosmosDBMongoDB.UnitTests", "src\Connectors\Connectors.AzureCosmosDBMongoDB.UnitTests\Connectors.AzureCosmosDBMongoDB.UnitTests.csproj", "{2918478E-BC86-4D53-9D01-9C318F80C14F}"
320320
EndProject
321321
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}"
322322
EndProject
323+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Ollama.UnitTests", "src\Connectors\Connectors.Ollama.UnitTests\Connectors.Ollama.UnitTests.csproj", "{924DB138-1223-4C99-B6E6-0938A3FA14EF}"
324+
EndProject
325+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
326+
EndProject
323327
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureCosmosDBNoSQL.UnitTests", "src\Connectors\Connectors.AzureCosmosDBNoSQL.UnitTests\Connectors.AzureCosmosDBNoSQL.UnitTests.csproj", "{385A8FE5-87E2-4458-AE09-35E10BD2E67F}"
324328
EndProject
325329
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Weaviate.UnitTests", "src\Connectors\Connectors.Weaviate.UnitTests\Connectors.Weaviate.UnitTests.csproj", "{AD9ECE32-088A-49D8-8ACB-890E79F1E7B8}"
@@ -787,6 +791,18 @@ Global
787791
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Publish|Any CPU.Build.0 = Debug|Any CPU
788792
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.ActiveCfg = Release|Any CPU
789793
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.Build.0 = Release|Any CPU
794+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
795+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.Build.0 = Debug|Any CPU
796+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
797+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Publish|Any CPU.Build.0 = Publish|Any CPU
798+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Release|Any CPU.ActiveCfg = Release|Any CPU
799+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Release|Any CPU.Build.0 = Release|Any CPU
800+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
801+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Debug|Any CPU.Build.0 = Debug|Any CPU
802+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
803+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Publish|Any CPU.Build.0 = Debug|Any CPU
804+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Release|Any CPU.ActiveCfg = Release|Any CPU
805+
{924DB138-1223-4C99-B6E6-0938A3FA14EF}.Release|Any CPU.Build.0 = Release|Any CPU
790806
{38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
791807
{38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.Build.0 = Debug|Any CPU
792808
{38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
@@ -941,6 +957,8 @@ Global
941957
{B0B3901E-AF56-432B-8FAA-858468E5D0DF} = {24503383-A8C4-4255-9998-28D70FE8E99A}
942958
{1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
943959
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
960+
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
961+
{924DB138-1223-4C99-B6E6-0938A3FA14EF} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
944962
{38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
945963
{2918478E-BC86-4D53-9D01-9C318F80C14F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
946964
{E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
3+
using System.Text;
4+
using Microsoft.SemanticKernel;
5+
using Microsoft.SemanticKernel.ChatCompletion;
6+
using Microsoft.SemanticKernel.Connectors.Ollama;
7+
8+
namespace ChatCompletion;
9+
10+
// The following example shows how to use Semantic Kernel with Ollama Chat Completion API
11+
public class Ollama_ChatCompletion(ITestOutputHelper output) : BaseTest(output)
12+
{
13+
[Fact]
14+
public async Task ServicePromptAsync()
15+
{
16+
Assert.NotNull(TestConfiguration.Ollama.ModelId);
17+
18+
Console.WriteLine("======== Ollama - Chat Completion ========");
19+
20+
var chatService = new OllamaChatCompletionService(
21+
endpoint: new Uri(TestConfiguration.Ollama.Endpoint),
22+
modelId: TestConfiguration.Ollama.ModelId);
23+
24+
Console.WriteLine("Chat content:");
25+
Console.WriteLine("------------------------");
26+
27+
var chatHistory = new ChatHistory("You are a librarian, expert about books");
28+
29+
// First user message
30+
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
31+
this.OutputLastMessage(chatHistory);
32+
33+
// First assistant message
34+
var reply = await chatService.GetChatMessageContentAsync(chatHistory);
35+
chatHistory.Add(reply);
36+
this.OutputLastMessage(chatHistory);
37+
38+
// Second user message
39+
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
40+
this.OutputLastMessage(chatHistory);
41+
42+
// Second assistant message
43+
reply = await chatService.GetChatMessageContentAsync(chatHistory);
44+
chatHistory.Add(reply);
45+
this.OutputLastMessage(chatHistory);
46+
}
47+
48+
[Fact]
49+
public async Task ChatPromptAsync()
50+
{
51+
Assert.NotNull(TestConfiguration.Ollama.ModelId);
52+
53+
StringBuilder chatPrompt = new("""
54+
<message role="system">You are a librarian, expert about books</message>
55+
<message role="user">Hi, I'm looking for book suggestions</message>
56+
""");
57+
58+
var kernel = Kernel.CreateBuilder()
59+
.AddOllamaChatCompletion(
60+
endpoint: new Uri(TestConfiguration.Ollama.Endpoint ?? "http://localhost:11434"),
61+
modelId: TestConfiguration.Ollama.ModelId)
62+
.Build();
63+
64+
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString());
65+
66+
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>");
67+
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>");
68+
69+
reply = await kernel.InvokePromptAsync(chatPrompt.ToString());
70+
71+
Console.WriteLine(reply);
72+
}
73+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
3+
using System.Text;
4+
using Microsoft.SemanticKernel;
5+
using Microsoft.SemanticKernel.ChatCompletion;
6+
using Microsoft.SemanticKernel.Connectors.Ollama;
7+
8+
namespace ChatCompletion;
9+
10+
/// <summary>
11+
/// These examples demonstrate the ways different content types are streamed by Ollama via the chat completion service.
12+
/// </summary>
13+
public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output)
14+
{
15+
/// <summary>
16+
/// This example demonstrates chat completion streaming using Ollama.
17+
/// </summary>
18+
[Fact]
19+
public Task StreamChatAsync()
20+
{
21+
Assert.NotNull(TestConfiguration.Ollama.ModelId);
22+
23+
Console.WriteLine("======== Ollama - Chat Completion Streaming ========");
24+
25+
var chatService = new OllamaChatCompletionService(
26+
endpoint: new Uri(TestConfiguration.Ollama.Endpoint),
27+
modelId: TestConfiguration.Ollama.ModelId);
28+
29+
return this.StartStreamingChatAsync(chatService);
30+
}
31+
32+
[Fact]
33+
public async Task StreamChatPromptAsync()
34+
{
35+
Assert.NotNull(TestConfiguration.Ollama.ModelId);
36+
37+
StringBuilder chatPrompt = new("""
38+
<message role="system">You are a librarian, expert about books</message>
39+
<message role="user">Hi, I'm looking for book suggestions</message>
40+
""");
41+
42+
var kernel = Kernel.CreateBuilder()
43+
.AddOllamaChatCompletion(
44+
endpoint: new Uri(TestConfiguration.Ollama.Endpoint),
45+
modelId: TestConfiguration.Ollama.ModelId)
46+
.Build();
47+
48+
var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString());
49+
50+
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>");
51+
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>");
52+
53+
reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString());
54+
55+
Console.WriteLine(reply);
56+
}
57+
58+
/// <summary>
59+
/// This example demonstrates how the chat completion service streams text content.
60+
/// It shows how to access the response update via StreamingChatMessageContent.Content property
61+
/// and alternatively via the StreamingChatMessageContent.Items property.
62+
/// </summary>
63+
[Fact]
64+
public async Task StreamTextFromChatAsync()
65+
{
66+
Assert.NotNull(TestConfiguration.Ollama.ModelId);
67+
68+
Console.WriteLine("======== Stream Text from Chat Content ========");
69+
70+
// Create chat completion service
71+
var chatService = new OllamaChatCompletionService(
72+
endpoint: new Uri(TestConfiguration.Ollama.Endpoint),
73+
modelId: TestConfiguration.Ollama.ModelId);
74+
75+
// Create chat history with initial system and user messages
76+
ChatHistory chatHistory = new("You are a librarian, an expert on books.");
77+
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions.");
78+
chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?");
79+
80+
// Start streaming chat based on the chat history
81+
await foreach (StreamingChatMessageContent chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory))
82+
{
83+
// Access the response update via StreamingChatMessageContent.Content property
84+
Console.Write(chatUpdate.Content);
85+
86+
// Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property
87+
Console.Write(chatUpdate.Items.OfType<StreamingTextContent>().FirstOrDefault());
88+
}
89+
}
90+
91+
private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService)
92+
{
93+
Console.WriteLine("Chat content:");
94+
Console.WriteLine("------------------------");
95+
96+
var chatHistory = new ChatHistory("You are a librarian, expert about books");
97+
this.OutputLastMessage(chatHistory);
98+
99+
// First user message
100+
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
101+
this.OutputLastMessage(chatHistory);
102+
103+
// First assistant message
104+
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
105+
106+
// Second user message
107+
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?");
108+
this.OutputLastMessage(chatHistory);
109+
110+
// Second assistant message
111+
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
112+
}
113+
114+
private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole)
115+
{
116+
bool roleWritten = false;
117+
string fullMessage = string.Empty;
118+
119+
await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
120+
{
121+
if (!roleWritten && chatUpdate.Role.HasValue)
122+
{
123+
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}");
124+
roleWritten = true;
125+
}
126+
127+
if (chatUpdate.Content is { Length: > 0 })
128+
{
129+
fullMessage += chatUpdate.Content;
130+
Console.Write(chatUpdate.Content);
131+
}
132+
}
133+
134+
Console.WriteLine("\n------------------------");
135+
chatHistory.AddMessage(authorRole, fullMessage);
136+
}
137+
138+
private async Task<string> StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt)
139+
{
140+
bool roleWritten = false;
141+
string fullMessage = string.Empty;
142+
143+
await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync<StreamingChatMessageContent>(prompt))
144+
{
145+
if (!roleWritten && chatUpdate.Role.HasValue)
146+
{
147+
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}");
148+
roleWritten = true;
149+
}
150+
151+
if (chatUpdate.Content is { Length: > 0 })
152+
{
153+
fullMessage += chatUpdate.Content;
154+
Console.Write(chatUpdate.Content);
155+
}
156+
}
157+
158+
Console.WriteLine("\n------------------------");
159+
return fullMessage;
160+
}
161+
}

dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs

+4-17
Original file line numberDiff line numberDiff line change
@@ -89,33 +89,20 @@ private async Task StartChatAsync(IChatCompletionService chatGPT)
8989

9090
// First user message
9191
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
92-
await MessageOutputAsync(chatHistory);
92+
OutputLastMessage(chatHistory);
9393

9494
// First bot assistant message
9595
var reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
9696
chatHistory.Add(reply);
97-
await MessageOutputAsync(chatHistory);
97+
OutputLastMessage(chatHistory);
9898

9999
// Second user message
100100
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
101-
await MessageOutputAsync(chatHistory);
101+
OutputLastMessage(chatHistory);
102102

103103
// Second bot assistant message
104104
reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
105105
chatHistory.Add(reply);
106-
await MessageOutputAsync(chatHistory);
107-
}
108-
109-
/// <summary>
110-
/// Outputs the last message of the chat history
111-
/// </summary>
112-
private Task MessageOutputAsync(ChatHistory chatHistory)
113-
{
114-
var message = chatHistory.Last();
115-
116-
Console.WriteLine($"{message.Role}: {message.Content}");
117-
Console.WriteLine("------------------------");
118-
119-
return Task.CompletedTask;
106+
OutputLastMessage(chatHistory);
120107
}
121108
}

dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs

+1-12
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ public async Task StreamFunctionCallContentAsync()
9999
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
100100

101101
// Create chat history with initial user question
102-
ChatHistory chatHistory = new();
102+
ChatHistory chatHistory = [];
103103
chatHistory.AddUserMessage("Hi, what is the current time?");
104104

105105
// Start streaming chat based on the chat history
@@ -162,15 +162,4 @@ private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletio
162162
Console.WriteLine("\n------------------------");
163163
chatHistory.AddMessage(authorRole, fullMessage);
164164
}
165-
166-
/// <summary>
167-
/// Outputs the last message of the chat history
168-
/// </summary>
169-
private void OutputLastMessage(ChatHistory chatHistory)
170-
{
171-
var message = chatHistory.Last();
172-
173-
Console.WriteLine($"{message.Role}: {message.Content}");
174-
Console.WriteLine("------------------------");
175-
}
176165
}

dotnet/samples/Concepts/Concepts.csproj

+1
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@
6767
<ProjectReference Include="..\..\src\Connectors\Connectors.Memory.Redis\Connectors.Memory.Redis.csproj" />
6868
<ProjectReference Include="..\..\src\Connectors\Connectors.Memory.Sqlite\Connectors.Memory.Sqlite.csproj" />
6969
<ProjectReference Include="..\..\src\Connectors\Connectors.Memory.Weaviate\Connectors.Memory.Weaviate.csproj" />
70+
<ProjectReference Include="..\..\src\Connectors\Connectors.Ollama\Connectors.Ollama.csproj" />
7071
<ProjectReference Include="..\..\src\Connectors\Connectors.OpenAI\Connectors.OpenAI.csproj" />
7172
<ProjectReference Include="..\..\src\Experimental\Orchestration.Flow\Experimental.Orchestration.Flow.csproj" />
7273
<ProjectReference Include="..\..\src\Extensions\PromptTemplates.Handlebars\PromptTemplates.Handlebars.csproj" />

0 commit comments

Comments
 (0)