diff --git a/OllamaSharp.sln b/OllamaSharp.sln
index e6b1364..52976f6 100644
--- a/OllamaSharp.sln
+++ b/OllamaSharp.sln
@@ -12,6 +12,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
.editorconfig = .editorconfig
EndProjectSection
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OllamaApiConsole", "demo\OllamaApiConsole.csproj", "{755670DB-33A4-441A-99C2-642A04D08953}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -26,6 +28,10 @@ Global
{1527F300-40C7-49EB-A6FD-D21B20BA5BC1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1527F300-40C7-49EB-A6FD-D21B20BA5BC1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1527F300-40C7-49EB-A6FD-D21B20BA5BC1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {755670DB-33A4-441A-99C2-642A04D08953}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {755670DB-33A4-441A-99C2-642A04D08953}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {755670DB-33A4-441A-99C2-642A04D08953}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {755670DB-33A4-441A-99C2-642A04D08953}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/demo/Demos/ChatConsole.cs b/demo/Demos/ChatConsole.cs
new file mode 100644
index 0000000..ab6ce7e
--- /dev/null
+++ b/demo/Demos/ChatConsole.cs
@@ -0,0 +1,55 @@
+using OllamaSharp;
+using Spectre.Console;
+
+namespace OllamaApiConsole.Demos;
+
+public class ChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
+{
+ public override async Task Run()
+ {
+ AnsiConsole.Write(new Rule("Chat").LeftJustified());
+ AnsiConsole.WriteLine();
+
+ Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:");
+
+ if (!string.IsNullOrEmpty(Ollama.SelectedModel))
+ {
+ var keepChatting = true;
+ var systemPrompt = ReadInput($"Define a system prompt [{HintTextColor}](optional)[/]");
+
+ do
+ {
+ AnsiConsole.MarkupLine("");
+ AnsiConsole.MarkupLine($"You are talking to [{AccentTextColor}]{Ollama.SelectedModel}[/] now.");
+ WriteChatInstructionHint();
+
+ var chat = new Chat(Ollama, systemPrompt);
+
+ string message;
+
+ do
+ {
+ AnsiConsole.WriteLine();
+ message = ReadInput();
+
+ if (message.Equals(EXIT_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = false;
+ break;
+ }
+
+ if (message.Equals(START_NEW_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = true;
+ break;
+ }
+
+ await foreach (var answerToken in chat.SendAsync(message))
+ AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]");
+
+ AnsiConsole.WriteLine();
+ } while (!string.IsNullOrEmpty(message));
+ } while (keepChatting);
+ }
+ }
+}
\ No newline at end of file
diff --git a/demo/Demos/ImageChatConsole.cs b/demo/Demos/ImageChatConsole.cs
new file mode 100644
index 0000000..99e20e9
--- /dev/null
+++ b/demo/Demos/ImageChatConsole.cs
@@ -0,0 +1,117 @@
+using System.Text.RegularExpressions;
+using OllamaSharp;
+using Spectre.Console;
+
+namespace OllamaApiConsole.Demos;
+
+public partial class ImageChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
+{
+ public override async Task Run()
+ {
+ AnsiConsole.Write(new Rule("Image chat").LeftJustified());
+ AnsiConsole.WriteLine();
+
+ Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:");
+
+ if (!string.IsNullOrEmpty(Ollama.SelectedModel))
+ {
+ var keepChatting = true;
+ var systemPrompt = ReadInput($"Define a system prompt [{HintTextColor}](optional)[/]");
+
+ do
+ {
+ AnsiConsole.MarkupLine("");
+ AnsiConsole.MarkupLine($"You are talking to [{AccentTextColor}]{Ollama.SelectedModel}[/] now.");
+ AnsiConsole.MarkupLine($"[{HintTextColor}]To send an image, simply enter its full filename like \"[{AccentTextColor}]c:/image.jpg[/]\"[/]");
+ WriteChatInstructionHint();
+
+ var chat = new Chat(Ollama, systemPrompt);
+
+ string message;
+
+ do
+ {
+ AnsiConsole.WriteLine();
+ message = ReadInput();
+
+ if (message.Equals(EXIT_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = false;
+ break;
+ }
+
+ if (message.Equals(START_NEW_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = true;
+ break;
+ }
+
+ var imagePaths = WindowsFileRegex().Matches(message).Where(m => !string.IsNullOrEmpty(m.Value))
+ .Union(UnixFileRegex().Matches(message).Where(m => !string.IsNullOrEmpty(m.Value)))
+ .Select(m => m.Value)
+ .ToArray();
+
+ if (imagePaths.Length > 0)
+ {
+ byte[][] imageBytes;
+
+ try
+ {
+ imageBytes = imagePaths.Select(File.ReadAllBytes).ToArray();
+ }
+ catch (IOException ex)
+ {
+ AnsiConsole.MarkupLineInterpolated($"Could not load your {(imagePaths.Length == 1 ? "image" : "images")}:");
+ AnsiConsole.MarkupLineInterpolated($"[{ErrorTextColor}]{Markup.Escape(ex.Message)}[/]");
+ AnsiConsole.MarkupLine("Please try again");
+ continue;
+ }
+
+ var imagesBase64 = imageBytes.Select(Convert.ToBase64String);
+
+ // remove paths from the message
+ foreach (var path in imagePaths)
+ message = message.Replace(path, "");
+
+ message += Environment.NewLine + Environment.NewLine + $"(the user attached {imagePaths.Length} {(imagePaths.Length == 1 ? "image" : "images")})";
+
+ foreach (var consoleImage in imageBytes.Select(bytes => new CanvasImage(bytes)))
+ {
+ consoleImage.MaxWidth = 40;
+ AnsiConsole.Write(consoleImage);
+ }
+
+ AnsiConsole.WriteLine();
+ if (imagePaths.Length == 1)
+ AnsiConsole.MarkupLine($"[{HintTextColor}]The image was scaled down for the console only, the model gets the full version.[/]");
+ else
+ AnsiConsole.MarkupLine($"[{HintTextColor}]The images were scaled down for the console only, the model gets full versions.[/]");
+ AnsiConsole.WriteLine();
+
+ await foreach (var answerToken in chat.SendAsync(message, [], imagesBase64))
+ AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]");
+ }
+ else
+ {
+ await foreach (var answerToken in chat.SendAsync(message))
+ AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]");
+ }
+
+ AnsiConsole.WriteLine();
+ } while (!string.IsNullOrEmpty(message));
+ } while (keepChatting);
+ }
+ }
+
+ ///
+ /// https://stackoverflow.com/a/24703223/704281
+ ///
+ [GeneratedRegex("\\b[a-zA-Z]:[\\\\/](?:[^<>:\"/\\\\|?*\\n\\r]+[\\\\/])*[^<>:\"/\\\\|?*\\n\\r]+\\.\\w+\\b")]
+ private static partial Regex WindowsFileRegex();
+
+ ///
+ /// https://stackoverflow.com/a/169021/704281
+ ///
+ [GeneratedRegex("(.+)\\/([^\\/]+)")]
+ private static partial Regex UnixFileRegex();
+}
\ No newline at end of file
diff --git a/demo/Demos/ModelManagerConsole.cs b/demo/Demos/ModelManagerConsole.cs
new file mode 100644
index 0000000..93c3992
--- /dev/null
+++ b/demo/Demos/ModelManagerConsole.cs
@@ -0,0 +1,168 @@
+using OllamaSharp;
+using OllamaSharp.Models;
+using Spectre.Console;
+
+namespace OllamaApiConsole.Demos;
+
+public class ModelManagerConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
+{
+ public override async Task Run()
+ {
+ AnsiConsole.Write(new Rule("Model manager").LeftJustified());
+ AnsiConsole.WriteLine();
+
+ string command;
+ var exit = false;
+
+ do
+ {
+ command = AnsiConsole.Prompt(
+ new SelectionPrompt()
+ .PageSize(10)
+ .Title("What do you want to do?")
+ .AddChoices("..", "Copy model", "Create model", "Delete model", "Generate embeddings", "Show model information", "List local models", "Pull model", "Push model"));
+
+ switch (command)
+ {
+ case "Copy model":
+ await CopyModel();
+ break;
+
+ case "Create model":
+ await CreateModel();
+ break;
+
+ case "Delete model":
+ await DeleteModel();
+ break;
+
+ case "Generate embeddings":
+ await GenerateEmbedding();
+ break;
+
+ case "Show model information":
+ await ShowModelInformation();
+ break;
+
+ case "List local models":
+ await ListLocalModels();
+ break;
+
+ case "Pull model":
+ await PullModel();
+ break;
+
+ case "Push model":
+ await PushModel();
+ break;
+
+ default:
+ exit = true;
+ break;
+ }
+
+ Console.WriteLine();
+ } while (!exit);
+ }
+
+ private async Task CopyModel()
+ {
+ var source = await SelectModel("Which model should be copied?");
+ if (!string.IsNullOrEmpty(source))
+ {
+ var destination = ReadInput($"Enter a name for the copy of [{AccentTextColor}]{source}[/]:");
+ await Ollama.CopyModelAsync(source, destination);
+ }
+ }
+
+ private async Task CreateModel()
+ {
+ var createName = ReadInput("Enter a name for your new model:");
+ var createModelFileContent = ReadInput("Enter the contents for the model file:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]");
+ await foreach (var status in Ollama.CreateModelAsync(createName, createModelFileContent))
+ AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
+ }
+
+ private async Task DeleteModel()
+ {
+ var deleteModel = await SelectModel("Which model do you want to delete?");
+ if (!string.IsNullOrEmpty(deleteModel))
+ await Ollama.DeleteModelAsync(deleteModel);
+ }
+
+ private async Task GenerateEmbedding()
+ {
+ var embedModel = await SelectModel("Which model should be used to create embeddings?");
+ if (!string.IsNullOrEmpty(embedModel))
+ {
+ var embedContent = ReadInput("Enter a string to to embed:");
+ Ollama.SelectedModel = embedModel;
+ var embedResponse = await Ollama.EmbedAsync(embedContent);
+ AnsiConsole.MarkupLineInterpolated($"[{AiTextColor}]{string.Join(", ", embedResponse.Embeddings[0])}[/]");
+ }
+ }
+
+ private async Task ShowModelInformation()
+ {
+ var infoModel = await SelectModel("Which model do you want to retrieve information for?");
+ if (!string.IsNullOrEmpty(infoModel))
+ {
+ var infoResponse = await Ollama.ShowModelAsync(infoModel);
+ PropertyConsoleRenderer.Render(infoResponse);
+ }
+ }
+
+ private async Task ListLocalModels()
+ {
+ var models = await Ollama.ListLocalModelsAsync();
+ foreach (var model in models.OrderBy(m => m.Name))
+ AnsiConsole.MarkupLineInterpolated($"[{AiTextColor}]{model.Name}[/]");
+ }
+
+ private async Task PullModel()
+ {
+ var pullModel = ReadInput("Enter the name of the model you want to pull:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md[/][/][{HintTextColor}] for reference[/]");
+
+ await AnsiConsole.Progress().StartAsync(async context =>
+ {
+ ProgressTask? task = null;
+ await foreach (var status in Ollama.PullModelAsync(pullModel))
+ UpdateProgressTaskByStatus(context, ref task, status);
+ task?.StopTask();
+ });
+ }
+
+ private async Task PushModel()
+ {
+ var pushModel = ReadInput("Which model do you want to push?");
+ await foreach (var status in Ollama.PushModelAsync(pushModel))
+ AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
+ }
+
+ private static void UpdateProgressTaskByStatus(ProgressContext context, ref ProgressTask? task, PullModelResponse? modelResponse)
+ {
+ if (modelResponse is null)
+ return;
+
+ if (modelResponse.Status != task?.Description)
+ {
+ task?.StopTask();
+ task = context.AddTask(modelResponse.Status);
+ }
+
+ task.Increment(modelResponse.Percent - task.Value);
+ }
+
+ public static class PropertyConsoleRenderer
+ {
+ public static void Render(object o)
+ {
+ foreach (var pi in o.GetType().GetProperties())
+ {
+ AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.AccentTextColor}][underline][bold]{pi.Name}:[/][/][/]");
+ AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.AccentTextColor}]{pi.GetValue(o)?.ToString() ?? ""}[/]");
+ AnsiConsole.WriteLine();
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/demo/Demos/ToolConsole.cs b/demo/Demos/ToolConsole.cs
new file mode 100644
index 0000000..6ee051f
--- /dev/null
+++ b/demo/Demos/ToolConsole.cs
@@ -0,0 +1,188 @@
+using System.Reflection;
+using OllamaSharp;
+using OllamaSharp.Models.Chat;
+using OllamaSharp.Models.Exceptions;
+using Spectre.Console;
+
+namespace OllamaApiConsole.Demos;
+
+public class ToolConsole(IOllamaApiClient ollama) : OllamaConsole(ollama)
+{
+ public override async Task Run()
+ {
+ AnsiConsole.Write(new Rule("Tool chat").LeftJustified());
+ AnsiConsole.WriteLine();
+
+ Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:");
+
+ if (!string.IsNullOrEmpty(Ollama.SelectedModel))
+ {
+ var keepChatting = true;
+ var systemPrompt = ReadInput($"Define a system prompt [{HintTextColor}](optional)[/]");
+
+ do
+ {
+ AnsiConsole.MarkupLine("");
+ AnsiConsole.MarkupLineInterpolated($"You are talking to [{AccentTextColor}]{Ollama.SelectedModel}[/] now.");
+ AnsiConsole.MarkupLine("When asked for the weather or the news for a given location, it will try to use a predefined tool.");
+ AnsiConsole.MarkupLine("If any tool is used, the intended usage information is printed.");
+ WriteChatInstructionHint();
+
+ var chat = new Chat(Ollama, systemPrompt);
+
+ string message;
+
+ do
+ {
+ AnsiConsole.WriteLine();
+ message = ReadInput();
+
+ if (message.Equals(EXIT_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = false;
+ break;
+ }
+
+ if (message.Equals(START_NEW_COMMAND, StringComparison.OrdinalIgnoreCase))
+ {
+ keepChatting = true;
+ break;
+ }
+
+ try
+ {
+ await foreach (var answerToken in chat.SendAsync(message, GetTools()))
+ AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]");
+ }
+ catch (OllamaException ex)
+ {
+ AnsiConsole.MarkupLineInterpolated($"[{ErrorTextColor}]{ex.Message}[/]");
+ }
+
+ var toolCalls = chat.Messages.LastOrDefault()?.ToolCalls?.ToArray() ?? [];
+ if (toolCalls.Any())
+ {
+ AnsiConsole.MarkupLine("\n[purple]Tools used:[/]");
+
+ foreach (var function in toolCalls.Where(t => t.Function != null).Select(t => t.Function))
+ {
+ AnsiConsole.MarkupLineInterpolated($" - [purple]{function!.Name}[/]");
+
+ AnsiConsole.MarkupLineInterpolated($" - [purple]parameters[/]");
+
+ if (function?.Arguments is not null)
+ {
+ foreach (var argument in function.Arguments)
+ AnsiConsole.MarkupLineInterpolated($" - [purple]{argument.Key}[/]: [purple]{argument.Value}[/]");
+ }
+
+ if (function is not null)
+ {
+ var result = FunctionHelper.ExecuteFunction(function);
+ AnsiConsole.MarkupLineInterpolated($" - [purple]return value[/]: [purple]\"{result}\"[/]");
+
+ await foreach (var answerToken in chat.SendAsAsync(ChatRole.Tool, result, GetTools()))
+ AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]");
+ }
+ }
+ }
+
+ AnsiConsole.WriteLine();
+ } while (!string.IsNullOrEmpty(message));
+ } while (keepChatting);
+ }
+ }
+
+ private static IEnumerable GetTools() => [new WeatherTool(), new NewsTool()];
+
+ private sealed class WeatherTool : Tool
+ {
+ public WeatherTool()
+ {
+ Function = new Function
+ {
+ Description = "Get the current weather for a location",
+ Name = "get_current_weather",
+ Parameters = new Parameters
+ {
+ Properties = new Dictionary
+ {
+ ["location"] = new() { Type = "string", Description = "The location to get the weather for, e.g. San Francisco, CA" },
+ ["format"] = new() { Type = "string", Description = "The format to return the weather in, e.g. 'celsius' or 'fahrenheit'", Enum = ["celsius", "fahrenheit"] },
+ },
+ Required = ["location", "format"],
+ }
+ };
+ Type = "function";
+ }
+ }
+
+ private sealed class NewsTool : Tool
+ {
+ public NewsTool()
+ {
+ Function = new Function
+ {
+ Description = "Get the current news for a location",
+ Name = "get_current_news",
+ Parameters = new Parameters
+ {
+ Properties = new Dictionary
+ {
+ ["location"] = new() { Type = "string", Description = "The location to get the news for, e.g. San Francisco, CA" },
+ ["category"] = new() { Type = "string", Description = "The optional category to filter the news, can be left empty to return all.", Enum = ["politics", "economy", "sports", "entertainment", "health", "technology", "science"] },
+ },
+ Required = ["location"],
+ }
+ };
+ Type = "function";
+ }
+ }
+
+ private static class FunctionHelper
+ {
+ public static string ExecuteFunction(Message.Function function)
+ {
+ var toolFunction = _availableFunctions[function.Name!];
+ var parameters = MapParameters(toolFunction.Method, function.Arguments!);
+ return toolFunction.DynamicInvoke(parameters)?.ToString()!;
+ }
+
+ private static readonly Dictionary> _availableFunctions = new()
+ {
+ ["get_current_weather"] = (location, format) =>
+ {
+ var (temperature, unit) = format switch
+ {
+ "fahrenheit" => (Random.Shared.Next(23, 104), "°F"),
+ _ => (Random.Shared.Next(-5, 40), "°C"),
+ };
+
+ return $"{temperature} {unit} in {location}";
+ },
+ ["get_current_news"] = (location, category) =>
+ {
+ category = string.IsNullOrEmpty(category) ? "all" : category;
+ return $"Could not find news for {location} (category: {category}).";
+ }
+ };
+
+ private static object[] MapParameters(MethodBase method, IDictionary namedParameters)
+ {
+ var paramNames = method.GetParameters().Select(p => p.Name).ToArray();
+ var parameters = new object[paramNames.Length];
+
+ for (var i = 0; i < parameters.Length; ++i)
+ parameters[i] = Type.Missing;
+
+ foreach (var (paramName, value) in namedParameters)
+ {
+ var paramIndex = Array.IndexOf(paramNames, paramName);
+ if (paramIndex >= 0)
+ parameters[paramIndex] = value;
+ }
+
+ return parameters;
+ }
+ }
+}
\ No newline at end of file
diff --git a/demo/OllamaApiConsole.csproj b/demo/OllamaApiConsole.csproj
new file mode 100644
index 0000000..c336098
--- /dev/null
+++ b/demo/OllamaApiConsole.csproj
@@ -0,0 +1,19 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/demo/OllamaConsole.cs b/demo/OllamaConsole.cs
new file mode 100644
index 0000000..0c0488e
--- /dev/null
+++ b/demo/OllamaConsole.cs
@@ -0,0 +1,98 @@
+using System.Text;
+using OllamaSharp;
+using Spectre.Console;
+
+namespace OllamaApiConsole;
+
+public abstract class OllamaConsole(IOllamaApiClient ollama)
+{
+ private const char MULTILINE_OPEN = '[';
+
+ private const char MULTILINE_CLOSE = ']';
+
+ public static string HintTextColor { get; } = "gray";
+
+ public static string AccentTextColor { get; } = "blue";
+
+ public static string WarningTextColor { get; } = "yellow";
+
+ public static string ErrorTextColor { get; } = "red";
+
+ public static string AiTextColor { get; } = "cyan";
+
+ public static string START_NEW_COMMAND { get; } = "/new";
+
+ public static string EXIT_COMMAND { get; } = "/exit";
+
+ public IOllamaApiClient Ollama { get; } = ollama ?? throw new ArgumentNullException(nameof(ollama));
+
+ public abstract Task Run();
+
+ public static string ReadInput(string prompt = "", string additionalInformation = "")
+ {
+ if (!string.IsNullOrEmpty(prompt))
+ AnsiConsole.MarkupLine(prompt);
+
+ if (!string.IsNullOrEmpty(additionalInformation))
+ AnsiConsole.MarkupLine(additionalInformation);
+
+ var builder = new StringBuilder();
+ bool? isMultiLineActive = null;
+ var needsCleaning = false;
+
+ while (!isMultiLineActive.HasValue || isMultiLineActive.Value)
+ {
+ AnsiConsole.Markup($"[{AccentTextColor}]> [/]");
+ var input = Console.ReadLine() ?? "";
+
+ if (!isMultiLineActive.HasValue)
+ {
+ isMultiLineActive = input.TrimStart().StartsWith(MULTILINE_OPEN);
+ needsCleaning = isMultiLineActive.GetValueOrDefault();
+ }
+
+ builder.AppendLine(input);
+
+ if (input.TrimEnd().EndsWith(MULTILINE_CLOSE) && isMultiLineActive.GetValueOrDefault())
+ isMultiLineActive = false;
+ }
+
+ if (needsCleaning)
+ return builder.ToString().Trim().TrimStart(MULTILINE_OPEN).TrimEnd(MULTILINE_CLOSE);
+
+ return builder.ToString().TrimEnd();
+ }
+
+ protected void WriteChatInstructionHint()
+ {
+ AnsiConsole.MarkupLine($"[{HintTextColor}]Enter [{AccentTextColor}]{START_NEW_COMMAND}[/] to start over or [{AccentTextColor}]{EXIT_COMMAND}[/] to leave.[/]");
+ AnsiConsole.MarkupLine($"[{HintTextColor}]Begin with [{AccentTextColor}]{Markup.Escape(MULTILINE_OPEN.ToString())}[/] to start multiline input. Sumbmit it by ending with [{AccentTextColor}]{Markup.Escape(MULTILINE_CLOSE.ToString())}[/].[/]");
+ }
+
+ protected async Task SelectModel(string prompt, string additionalInformation = "")
+ {
+ const string BACK = "..";
+
+ var models = await Ollama.ListLocalModelsAsync();
+ var modelsWithBackChoice = models.OrderBy(m => m.Name).Select(m => m.Name).ToList();
+ if (modelsWithBackChoice.Count == 1)
+ {
+ return modelsWithBackChoice[0];
+ }
+ else
+ {
+ modelsWithBackChoice.Insert(0, BACK);
+
+ if (!string.IsNullOrEmpty(additionalInformation))
+ AnsiConsole.MarkupLine(additionalInformation);
+
+ var answer = AnsiConsole.Prompt(
+ new SelectionPrompt()
+ .PageSize(10)
+ .Title(prompt)
+ .AddChoices(modelsWithBackChoice));
+
+ return answer == BACK ? "" : answer;
+ }
+ }
+}
diff --git a/demo/Program.cs b/demo/Program.cs
new file mode 100644
index 0000000..4268c2c
--- /dev/null
+++ b/demo/Program.cs
@@ -0,0 +1,89 @@
+using OllamaApiConsole;
+using OllamaApiConsole.Demos;
+using OllamaSharp;
+using Spectre.Console;
+
+Console.ResetColor();
+
+AnsiConsole.Write(new Rule("OllamaSharp Api Console").LeftJustified());
+AnsiConsole.WriteLine();
+
+OllamaApiClient? ollama = null;
+var connected = false;
+
+do
+{
+ AnsiConsole.MarkupLine($"Enter the Ollama [{OllamaConsole.AccentTextColor}]machine name[/] or [{OllamaConsole.AccentTextColor}]endpoint url[/]");
+
+ var url = OllamaConsole.ReadInput();
+
+ if (string.IsNullOrWhiteSpace(url))
+ url = "http://localhost:11434";
+
+ if (!url.StartsWith("http"))
+ url = "http://" + url;
+
+ if (url.IndexOf(':', 5) < 0)
+ url += ":11434";
+
+ var uri = new Uri(url);
+ Console.WriteLine($"Connecting to {uri} ...");
+
+ try
+ {
+ ollama = new OllamaApiClient(url);
+ connected = await ollama.IsRunningAsync();
+
+ var models = await ollama.ListLocalModelsAsync();
+ if (!models.Any())
+ AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.WarningTextColor}]Your Ollama instance does not provide any models :([/]");
+ }
+ catch (Exception ex)
+ {
+ AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.ErrorTextColor}]{Markup.Escape(ex.Message)}[/]");
+ AnsiConsole.WriteLine();
+ }
+} while (!connected);
+
+string demo;
+
+do
+{
+ AnsiConsole.Clear();
+
+ demo = AnsiConsole.Prompt(
+ new SelectionPrompt()
+ .PageSize(10)
+ .Title("What demo do you want to run?")
+ .AddChoices("Chat", "Image chat", "Tool chat", "Model manager", "Exit"));
+
+ AnsiConsole.Clear();
+
+ try
+ {
+ switch (demo)
+ {
+ case "Chat":
+ await new ChatConsole(ollama!).Run();
+ break;
+
+ case "Image chat":
+ await new ImageChatConsole(ollama!).Run();
+ break;
+
+ case "Tool chat":
+ await new ToolConsole(ollama!).Run();
+ break;
+
+ case "Model manager":
+ await new ModelManagerConsole(ollama!).Run();
+ break;
+ }
+ }
+ catch (Exception ex)
+ {
+ AnsiConsole.MarkupLine($"An error occurred. Press [{OllamaConsole.AccentTextColor}]Return[/] to start over.");
+ AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.ErrorTextColor}]{Markup.Escape(ex.Message)}[/]");
+ Console.ReadLine();
+ }
+} while (demo != "Exit");
diff --git a/src/OllamaApiClientExtensions.cs b/src/OllamaApiClientExtensions.cs
index 26b606d..7d1dc13 100644
--- a/src/OllamaApiClientExtensions.cs
+++ b/src/OllamaApiClientExtensions.cs
@@ -17,7 +17,7 @@ public static class OllamaApiClientExtensions
/// The name of the existing model to copy.
/// The name the copied model should get.
/// The token to cancel the operation with.
- public static Task CopyModel(this IOllamaApiClient client, string source, string destination, CancellationToken cancellationToken = default)
+ public static Task CopyModelAsync(this IOllamaApiClient client, string source, string destination, CancellationToken cancellationToken = default)
=> client.CopyModelAsync(new CopyModelRequest { Source = source, Destination = destination }, cancellationToken);
///
@@ -30,7 +30,7 @@ public static Task CopyModel(this IOllamaApiClient client, string source, string
/// See .
///
/// The token to cancel the operation with.
- public static IAsyncEnumerable CreateModel(this IOllamaApiClient client, string name, string modelFileContent, CancellationToken cancellationToken = default)
+ public static IAsyncEnumerable CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, CancellationToken cancellationToken = default)
{
var request = new CreateModelRequest
{
@@ -52,7 +52,7 @@ public static Task CopyModel(this IOllamaApiClient client, string source, string
///
/// The name path to the model file.
/// The token to cancel the operation with.
- public static IAsyncEnumerable CreateModel(this IOllamaApiClient client, string name, string modelFileContent, string path, CancellationToken cancellationToken = default)
+ public static IAsyncEnumerable CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, string path, CancellationToken cancellationToken = default)
{
var request = new CreateModelRequest
{
@@ -70,7 +70,7 @@ public static Task CopyModel(this IOllamaApiClient client, string source, string
/// The client used to execute the command.
/// The name of the model to delete.
/// The token to cancel the operation with.
- public static Task DeleteModel(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
+ public static Task DeleteModelAsync(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
=> client.DeleteModelAsync(new DeleteModelRequest { Model = model }, cancellationToken);
///
@@ -79,7 +79,7 @@ public static Task DeleteModel(this IOllamaApiClient client, string model, Cance
/// The client used to execute the command.
/// The name of the model to pull.
/// The token to cancel the operation with.
- public static IAsyncEnumerable PullModel(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
+ public static IAsyncEnumerable PullModelAsync(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
=> client.PullModelAsync(new PullModelRequest { Model = model }, cancellationToken);
///
@@ -88,7 +88,7 @@ public static Task DeleteModel(this IOllamaApiClient client, string model, Cance
/// The client used to execute the command.
/// The name of the model to push.
/// The token to cancel the operation with.
- public static IAsyncEnumerable PushModel(this IOllamaApiClient client, string name, CancellationToken cancellationToken = default)
+ public static IAsyncEnumerable PushModelAsync(this IOllamaApiClient client, string name, CancellationToken cancellationToken = default)
=> client.PushModelAsync(new PushModelRequest { Model = name, Stream = true }, cancellationToken);
///
@@ -97,7 +97,7 @@ public static Task DeleteModel(this IOllamaApiClient client, string model, Cance
/// The client used to execute the command.
/// The input text to generate embeddings for.
/// The token to cancel the operation with.
- public static Task Embed(this IOllamaApiClient client, string input, CancellationToken cancellationToken = default)
+ public static Task EmbedAsync(this IOllamaApiClient client, string input, CancellationToken cancellationToken = default)
{
var request = new EmbedRequest
{
@@ -138,6 +138,6 @@ public static Task Embed(this IOllamaApiClient client, string inp
/// The name of the model to get the information for.
/// The token to cancel the operation with.
/// The model information.
- public static Task ShowModel(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
+ public static Task ShowModelAsync(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
=> client.ShowModelAsync(new ShowModelRequest { Model = model }, cancellationToken);
}
diff --git a/src/OllamaSharp.csproj b/src/OllamaSharp.csproj
index f7f3fd8..aa86ee6 100644
--- a/src/OllamaSharp.csproj
+++ b/src/OllamaSharp.csproj
@@ -37,6 +37,7 @@
+
diff --git a/test/OllamaApiClientTests.cs b/test/OllamaApiClientTests.cs
index d5431bb..9f662f0 100644
--- a/test/OllamaApiClientTests.cs
+++ b/test/OllamaApiClientTests.cs
@@ -506,7 +506,7 @@ public async Task Returns_Deserialized_Models()
Content = new StringContent("{\r\n \"license\": \"\",\r\n \"modelfile\": \"# Modelfile generated by \\\"ollama show\\\"\\n\\n\",\r\n \"parameters\": \"stop [INST]\\nstop [/INST]\\nstop <>\\nstop <>\",\r\n \"template\": \"[INST] {{ if and .First .System }}<>{{ .System }}<>\\n\\n{{ end }}{{ .Prompt }} [/INST] \"\r\n}")
};
- var info = await _client.ShowModel("codellama:latest", CancellationToken.None);
+ var info = await _client.ShowModelAsync("codellama:latest", CancellationToken.None);
info.License.Should().Contain("contents of license block");
info.Modelfile.Should().StartWith("# Modelfile generated");
@@ -523,7 +523,7 @@ public async Task Returns_Deserialized_Model_WithSystem()
Content = new StringContent("{\"modelfile\":\"# Modelfile generated by \\\"ollama show\\\"\\n# To build a new Modelfile based on this, replace FROM with:\\n# FROM magicoder:latest\\n\\nFROM C:\\\\Users\\\\jd\\\\.ollama\\\\models\\\\blobs\\\\sha256-4a501ed4ce55e5611922b3ee422501ff7cc773b472d196c3c416859b6d375273\\nTEMPLATE \\\"{{ .System }}\\n\\n@@ Instruction\\n{{ .Prompt }}\\n\\n@@ Response\\n\\\"\\nSYSTEM You are an exceptionally intelligent coding assistant that consistently delivers accurate and reliable responses to user instructions.\\nPARAMETER num_ctx 16384\\n\",\"parameters\":\"num_ctx 16384\",\"template\":\"{{ .System }}\\n\\n@@ Instruction\\n{{ .Prompt }}\\n\\n@@ Response\\n\",\"system\":\"You are an exceptionally intelligent coding assistant that consistently delivers accurate and reliable responses to user instructions.\",\"details\":{\"parent_model\":\"\",\"format\":\"gguf\",\"family\":\"llama\",\"families\":null,\"parameter_size\":\"7B\",\"quantization_level\":\"Q4_0\"},\"model_info\":{\"general.architecture\":\"llama\",\"general.file_type\":2,\"general.parameter_count\":8829407232,\"general.quantization_version\":2,\"llama.attention.head_count\":32,\"llama.attention.head_count_kv\":4,\"llama.attention.layer_norm_rms_epsilon\":0.000001,\"llama.block_count\":48,\"llama.context_length\":4096,\"llama.embedding_length\":4096,\"llama.feed_forward_length\":11008,\"llama.rope.dimension_count\":128,\"llama.rope.freq_base\":5000000,\"llama.vocab_size\":64000,\"tokenizer.ggml.add_bos_token\":false,\"tokenizer.ggml.add_eos_token\":false,\"tokenizer.ggml.bos_token_id\":1,\"tokenizer.ggml.eos_token_id\":2,\"tokenizer.ggml.model\":\"llama\",\"tokenizer.ggml.padding_token_id\":0,\"tokenizer.ggml.pre\":\"default\",\"tokenizer.ggml.scores\":[],\"tokenizer.ggml.token_type\":[],\"tokenizer.ggml.tokens\":[]},\"modified_at\":\"2024-05-14T23:33:07.4166573+08:00\"}")
};
- var info = await _client.ShowModel("starcoder:latest", CancellationToken.None);
+ var info = await _client.ShowModelAsync("starcoder:latest", CancellationToken.None);
info.License.Should().BeNullOrEmpty();
info.Modelfile.Should().StartWith("# Modelfile generated");