Skip to content
Closed
185 changes: 171 additions & 14 deletions shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,28 @@
using System.Diagnostics;
using System.Text;
using System.Text.Json;
using AIShell.Abstraction;

namespace AIShell.Ollama.Agent;

public sealed class OllamaAgent : ILLMAgent
{
private bool _reloadSettings;
private bool _isDisposed;
private string _configRoot;
private Settings _settings;
private FileSystemWatcher _watcher;

/// <summary>
/// The name of setting file
/// </summary>
private const string SettingFileName = "ollama.config.json";

/// <summary>
/// Gets the settings.
/// </summary>
internal Settings Settings => _settings;

/// <summary>
/// The name of the agent
/// </summary>
Expand All @@ -13,7 +31,7 @@ public sealed class OllamaAgent : ILLMAgent
/// <summary>
/// The description of the agent to be shown at start up
/// </summary>
public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in aka.ms/aish/ollama";
public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent";

/// <summary>
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
Expand Down Expand Up @@ -42,7 +60,15 @@ public sealed class OllamaAgent : ILLMAgent
/// </summary>
public void Dispose()
{
if (_isDisposed)
{
return;
}

GC.SuppressFinalize(this);
_chatService?.Dispose();
_watcher.Dispose();
_isDisposed = true;
}

/// <summary>
Expand All @@ -51,12 +77,31 @@ public void Dispose()
/// <param name="config">Agent configuration for any configuration file and other settings</param>
public void Initialize(AgentConfig config)
{
_chatService = new OllamaChatService();
_configRoot = config.ConfigurationRoot;

SettingFile = Path.Combine(_configRoot, SettingFileName);
_settings = ReadSettings();

if (_settings is null)
{
// Create the setting file with examples to serve as a template for user to update.
NewExampleSettingFile();
_settings = ReadSettings();
}

_chatService = new OllamaChatService(_settings);

_watcher = new FileSystemWatcher(_configRoot, SettingFileName)
{
NotifyFilter = NotifyFilters.LastWrite,
EnableRaisingEvents = true,
};
_watcher.Changed += OnSettingFileChange;

LegalLinks = new(StringComparer.OrdinalIgnoreCase)
{
["Ollama Docs"] = "https://github.com/ollama/ollama",
["Prerequisites"] = "https://aka.ms/ollama/readme"
["Prerequisites"] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
};
}

Expand All @@ -68,7 +113,7 @@ public void Initialize(AgentConfig config)
/// <summary>
/// Gets the path to the setting file of the agent.
/// </summary>
public string SettingFile { private set; get; } = null;
public string SettingFile { private set; get; }

/// <summary>
/// Gets a value indicating whether the agent accepts a specific user action feedback.
Expand All @@ -87,7 +132,16 @@ public void OnUserAction(UserActionPayload actionPayload) {}
/// Refresh the current chat by starting a new chat session.
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
/// </summary>
public Task RefreshChatAsync(IShell shell, bool force) => Task.CompletedTask;
public Task RefreshChatAsync(IShell shell, bool force)
{
if (force)
{
// Reload the setting file if needed.
ReloadSettings();
}

return Task.CompletedTask;
}

/// <summary>
/// Main chat function that takes the users input and passes it to the LLM and renders it.
Expand All @@ -103,23 +157,126 @@ public async Task<bool> ChatAsync(string input, IShell shell)
// get the cancellation token
CancellationToken token = shell.CancellationToken;

// Reload the setting file if needed.
ReloadSettings();

if (Process.GetProcessesByName("ollama").Length is 0)
{
host.RenderFullResponse("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
host.MarkupWarningLine($"[[{Name}]]: Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
return false;
}

if (!SelfCheck(host))
{
return false;
}

try
{
ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
status: "Thinking ...",
func: async context => await _chatService.GetChatResponseAsync(context, input, token)
).ConfigureAwait(false);

if (ollamaResponse is not null)
{
// render the content
host.RenderFullResponse(ollamaResponse.response);
}
}
catch (HttpRequestException)
{
host.WriteErrorLine($"[{Name}]: Cannot serve the query due to the Endpoint or Model misconfiguration. Please properly update the setting file.");
return false;
}

ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
status: "Thinking ...",
func: async context => await _chatService.GetChatResponseAsync(context, input, token)
).ConfigureAwait(false);
return true;
}

if (ollamaResponse is not null)
internal void ReloadSettings()
{
if (_reloadSettings)
{
// render the content
host.RenderFullResponse(ollamaResponse.response);
_reloadSettings = false;
var settings = ReadSettings();
if (settings is null)
{
return;
}

_settings = settings;
_chatService.RefreshSettings(_settings);
}

}

private Settings ReadSettings()
{
Settings settings = null;
FileInfo file = new(SettingFile);

if (file.Exists)
{
try
{
using var stream = file.OpenRead();
var data = JsonSerializer.Deserialize(stream, SourceGenerationContext.Default.ConfigData);
settings = new Settings(data);
}
catch (Exception e)
{
throw new InvalidDataException($"Parsing settings from '{SettingFile}' failed with the following error: {e.Message}", e);
}
}

return settings;
}

private void OnSettingFileChange(object sender, FileSystemEventArgs e)
{
if (e.ChangeType is WatcherChangeTypes.Changed)
{
_reloadSettings = true;
}
}

internal bool SelfCheck(IHost host)
{
var settings = new (string settingValue, string settingName)[]
{
(_settings?.Model, "Model"),
(_settings?.Endpoint, "Endpoint")
};

foreach (var (settingValue, settingName) in settings)
{
if (string.IsNullOrWhiteSpace(settingValue))
{
host.WriteErrorLine($"[{Name}]: {settingName} is undefined. Please declare it in the setting file.");
return false;
}
}

return true;
}

private void NewExampleSettingFile()
{
string SampleContent = $$"""
{
// To use Ollama API service:
// 1. Install Ollama:
// winget install Ollama.Ollama
// 2. Start Ollama API server:
// ollama serve
// 3. Install Ollama model:
// ollama pull phi3

// Declare Ollama model
"Model": "phi3",
// Declare Ollama endpoint
"Endpoint": "http://localhost:11434"
}
""";
File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8);
}
}
22 changes: 14 additions & 8 deletions shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,7 @@ namespace AIShell.Ollama.Agent;

internal class OllamaChatService : IDisposable
{
/// <summary>
/// Ollama endpoint to call to generate a response
/// </summary>
internal const string Endpoint = "http://localhost:11434/api/generate";

private Settings _settings;
/// <summary>
/// Http client
/// </summary>
Expand All @@ -20,11 +16,21 @@ internal class OllamaChatService : IDisposable
/// <summary>
/// Initialization method to initialize the http client
/// </summary>
internal OllamaChatService()
internal OllamaChatService(Settings settings)
{
_settings = settings;
_client = new HttpClient();
}

/// <summary>
/// Refresh settings
/// </summary>
/// <param name="settings"></param>
internal void RefreshSettings(Settings settings)
{
_settings = settings;
}

/// <summary>
/// Dispose of the http client
/// </summary>
Expand All @@ -43,15 +49,15 @@ private HttpRequestMessage PrepareForChat(string input)
// Main data to send to the endpoint
var requestData = new Query
{
model = "phi3",
model = _settings.Model,
prompt = input,
stream = false
};

var json = JsonSerializer.Serialize(requestData);

var data = new StringContent(json, Encoding.UTF8, "application/json");
var request = new HttpRequestMessage(HttpMethod.Post, Endpoint) { Content = data };
var request = new HttpRequestMessage(HttpMethod.Post, $"{_settings.Endpoint}/api/generate") { Content = data };

return request;
}
Expand Down
30 changes: 20 additions & 10 deletions shell/agents/AIShell.Ollama.Agent/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,34 @@ this agent you need to have Ollama installed and running.

## Pre-requisites to using the agent

- Install [Ollama](https://github.com/ollama/ollama)
- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we
suggest using the `phi3` model as it is set as the default model in the code
- Install [Ollama](https://github.com/ollama/ollama)
- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we suggest using the `phi3` model as it is set as the default model in the code
- [Start the Ollama API server](https://github.com/ollama/ollama?tab=readme-ov-file#start-ollama)

## Configuration

Currently to change the model you will need to modify the query in the code in the
`OllamaChatService` class. The default model is `phi3`.

The default endpoint is `http://localhost:11434/api/generate` with `11434` being the default port. This can be changed in the code
and eventually will be added to a configuration file.
To configure the agent, run `/agent config ollama` to open up the setting file in your default editor, and then update the file based on the following example.

```json
{
// To use Ollama API service:
// 1. Install Ollama:
// winget install Ollama.Ollama
// 2. Start Ollama API server:
// ollama serve
// 3. Install Ollama model:
// ollama pull phi3

// Declare Ollama model
"Model": "phi3",
// Declare Ollama endpoint
"Endpoint": "http://localhost:11434"
}
```

## Known Limitations

- There is no history shared across queries so the model will not be able to remember previous
queries
- Streaming is currently not supported if you change the stream value to `true` in the data to send
to the API it will not work
- Configuration is currently hard coded in the code and will be moved to a configuration file in the
future
38 changes: 38 additions & 0 deletions shell/agents/AIShell.Ollama.Agent/Settings.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
using System.Text.Json;
using System.Text.Json.Serialization;

namespace AIShell.Ollama.Agent;

internal class Settings
{
private string _model;
private string _endpoint;

public string Model => _model;
public string Endpoint => _endpoint;

public Settings(ConfigData configData)
{
_model = configData?.Model;
_endpoint = configData?.Endpoint?.TrimEnd('/');
}
}

internal class ConfigData
{
public string Model { get; set; }
public string Endpoint { get; set; }
}

/// <summary>
/// Use source generation to serialize and deserialize the setting file.
/// Both metadata-based and serialization-optimization modes are used to gain the best performance.
/// </summary>
[JsonSourceGenerationOptions(
WriteIndented = true,
AllowTrailingCommas = true,
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
UseStringEnumConverter = true)]
[JsonSerializable(typeof(ConfigData))]
internal partial class SourceGenerationContext : JsonSerializerContext { }