-
Notifications
You must be signed in to change notification settings - Fork 92
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Showing
11 changed files
with
751 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
using OllamaSharp; | ||
using Spectre.Console; | ||
|
||
namespace OllamaApiConsole.Demos; | ||
|
||
public class ChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama) | ||
{ | ||
public override async Task Run() | ||
{ | ||
AnsiConsole.Write(new Rule("Chat").LeftJustified()); | ||
AnsiConsole.WriteLine(); | ||
|
||
Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:"); | ||
|
||
if (!string.IsNullOrEmpty(Ollama.SelectedModel)) | ||
{ | ||
var keepChatting = true; | ||
var systemPrompt = ReadInput($"Define a system prompt [{HintTextColor}](optional)[/]"); | ||
|
||
do | ||
{ | ||
AnsiConsole.MarkupLine(""); | ||
AnsiConsole.MarkupLine($"You are talking to [{AccentTextColor}]{Ollama.SelectedModel}[/] now."); | ||
WriteChatInstructionHint(); | ||
|
||
var chat = new Chat(Ollama, systemPrompt); | ||
|
||
string message; | ||
|
||
do | ||
{ | ||
AnsiConsole.WriteLine(); | ||
message = ReadInput(); | ||
|
||
if (message.Equals(EXIT_COMMAND, StringComparison.OrdinalIgnoreCase)) | ||
{ | ||
keepChatting = false; | ||
break; | ||
} | ||
|
||
if (message.Equals(START_NEW_COMMAND, StringComparison.OrdinalIgnoreCase)) | ||
{ | ||
keepChatting = true; | ||
break; | ||
} | ||
|
||
await foreach (var answerToken in chat.SendAsync(message)) | ||
AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]"); | ||
|
||
AnsiConsole.WriteLine(); | ||
} while (!string.IsNullOrEmpty(message)); | ||
} while (keepChatting); | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
using System.Text.RegularExpressions; | ||
using OllamaSharp; | ||
using Spectre.Console; | ||
|
||
namespace OllamaApiConsole.Demos; | ||
|
||
public partial class ImageChatConsole(IOllamaApiClient ollama) : OllamaConsole(ollama) | ||
{ | ||
public override async Task Run() | ||
{ | ||
AnsiConsole.Write(new Rule("Image chat").LeftJustified()); | ||
AnsiConsole.WriteLine(); | ||
|
||
Ollama.SelectedModel = await SelectModel("Select a model you want to chat with:"); | ||
|
||
if (!string.IsNullOrEmpty(Ollama.SelectedModel)) | ||
{ | ||
var keepChatting = true; | ||
var systemPrompt = ReadInput($"Define a system prompt [{HintTextColor}](optional)[/]"); | ||
|
||
do | ||
{ | ||
AnsiConsole.MarkupLine(""); | ||
AnsiConsole.MarkupLine($"You are talking to [{AccentTextColor}]{Ollama.SelectedModel}[/] now."); | ||
AnsiConsole.MarkupLine($"[{HintTextColor}]To send an image, simply enter its full filename like \"[{AccentTextColor}]c:/image.jpg[/]\"[/]"); | ||
WriteChatInstructionHint(); | ||
|
||
var chat = new Chat(Ollama, systemPrompt); | ||
|
||
string message; | ||
|
||
do | ||
{ | ||
AnsiConsole.WriteLine(); | ||
message = ReadInput(); | ||
|
||
if (message.Equals(EXIT_COMMAND, StringComparison.OrdinalIgnoreCase)) | ||
{ | ||
keepChatting = false; | ||
break; | ||
} | ||
|
||
if (message.Equals(START_NEW_COMMAND, StringComparison.OrdinalIgnoreCase)) | ||
{ | ||
keepChatting = true; | ||
break; | ||
} | ||
|
||
var imagePaths = WindowsFileRegex().Matches(message).Where(m => !string.IsNullOrEmpty(m.Value)) | ||
.Union(UnixFileRegex().Matches(message).Where(m => !string.IsNullOrEmpty(m.Value))) | ||
.Select(m => m.Value) | ||
.ToArray(); | ||
|
||
if (imagePaths.Length > 0) | ||
{ | ||
byte[][] imageBytes; | ||
|
||
try | ||
{ | ||
imageBytes = imagePaths.Select(File.ReadAllBytes).ToArray(); | ||
} | ||
catch (IOException ex) | ||
{ | ||
AnsiConsole.MarkupLineInterpolated($"Could not load your {(imagePaths.Length == 1 ? "image" : "images")}:"); | ||
AnsiConsole.MarkupLineInterpolated($"[{ErrorTextColor}]{Markup.Escape(ex.Message)}[/]"); | ||
AnsiConsole.MarkupLine("Please try again"); | ||
continue; | ||
} | ||
|
||
var imagesBase64 = imageBytes.Select(Convert.ToBase64String); | ||
|
||
// remove paths from the message | ||
foreach (var path in imagePaths) | ||
message = message.Replace(path, ""); | ||
|
||
message += Environment.NewLine + Environment.NewLine + $"(the user attached {imagePaths.Length} {(imagePaths.Length == 1 ? "image" : "images")})"; | ||
|
||
foreach (var consoleImage in imageBytes.Select(bytes => new CanvasImage(bytes))) | ||
{ | ||
consoleImage.MaxWidth = 40; | ||
AnsiConsole.Write(consoleImage); | ||
} | ||
|
||
AnsiConsole.WriteLine(); | ||
if (imagePaths.Length == 1) | ||
AnsiConsole.MarkupLine($"[{HintTextColor}]The image was scaled down for the console only, the model gets the full version.[/]"); | ||
else | ||
AnsiConsole.MarkupLine($"[{HintTextColor}]The images were scaled down for the console only, the model gets full versions.[/]"); | ||
AnsiConsole.WriteLine(); | ||
|
||
await foreach (var answerToken in chat.SendAsync(message, [], imagesBase64)) | ||
AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]"); | ||
} | ||
else | ||
{ | ||
await foreach (var answerToken in chat.SendAsync(message)) | ||
AnsiConsole.MarkupInterpolated($"[{AiTextColor}]{answerToken}[/]"); | ||
} | ||
|
||
AnsiConsole.WriteLine(); | ||
} while (!string.IsNullOrEmpty(message)); | ||
} while (keepChatting); | ||
} | ||
} | ||
|
||
/// <summary> | ||
/// https://stackoverflow.com/a/24703223/704281 | ||
/// </summary> | ||
[GeneratedRegex("\\b[a-zA-Z]:[\\\\/](?:[^<>:\"/\\\\|?*\\n\\r]+[\\\\/])*[^<>:\"/\\\\|?*\\n\\r]+\\.\\w+\\b")] | ||
private static partial Regex WindowsFileRegex(); | ||
|
||
/// <summary> | ||
/// https://stackoverflow.com/a/169021/704281 | ||
/// </summary> | ||
[GeneratedRegex("(.+)\\/([^\\/]+)")] | ||
private static partial Regex UnixFileRegex(); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
using OllamaSharp; | ||
using OllamaSharp.Models; | ||
using Spectre.Console; | ||
|
||
namespace OllamaApiConsole.Demos; | ||
|
||
public class ModelManagerConsole(IOllamaApiClient ollama) : OllamaConsole(ollama) | ||
{ | ||
public override async Task Run() | ||
{ | ||
AnsiConsole.Write(new Rule("Model manager").LeftJustified()); | ||
AnsiConsole.WriteLine(); | ||
|
||
string command; | ||
var exit = false; | ||
|
||
do | ||
{ | ||
command = AnsiConsole.Prompt( | ||
new SelectionPrompt<string>() | ||
.PageSize(10) | ||
.Title("What do you want to do?") | ||
.AddChoices("..", "Copy model", "Create model", "Delete model", "Generate embeddings", "Show model information", "List local models", "Pull model", "Push model")); | ||
|
||
switch (command) | ||
{ | ||
case "Copy model": | ||
await CopyModel(); | ||
break; | ||
|
||
case "Create model": | ||
await CreateModel(); | ||
break; | ||
|
||
case "Delete model": | ||
await DeleteModel(); | ||
break; | ||
|
||
case "Generate embeddings": | ||
await GenerateEmbedding(); | ||
break; | ||
|
||
case "Show model information": | ||
await ShowModelInformation(); | ||
break; | ||
|
||
case "List local models": | ||
await ListLocalModels(); | ||
break; | ||
|
||
case "Pull model": | ||
await PullModel(); | ||
break; | ||
|
||
case "Push model": | ||
await PushModel(); | ||
break; | ||
|
||
default: | ||
exit = true; | ||
break; | ||
} | ||
|
||
Console.WriteLine(); | ||
} while (!exit); | ||
} | ||
|
||
private async Task CopyModel() | ||
{ | ||
var source = await SelectModel("Which model should be copied?"); | ||
if (!string.IsNullOrEmpty(source)) | ||
{ | ||
var destination = ReadInput($"Enter a name for the copy of [{AccentTextColor}]{source}[/]:"); | ||
await Ollama.CopyModelAsync(source, destination); | ||
} | ||
} | ||
|
||
private async Task CreateModel() | ||
{ | ||
var createName = ReadInput("Enter a name for your new model:"); | ||
var createModelFileContent = ReadInput("Enter the contents for the model file:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]"); | ||
await foreach (var status in Ollama.CreateModelAsync(createName, createModelFileContent)) | ||
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}"); | ||
} | ||
|
||
private async Task DeleteModel() | ||
{ | ||
var deleteModel = await SelectModel("Which model do you want to delete?"); | ||
if (!string.IsNullOrEmpty(deleteModel)) | ||
await Ollama.DeleteModelAsync(deleteModel); | ||
} | ||
|
||
private async Task GenerateEmbedding() | ||
{ | ||
var embedModel = await SelectModel("Which model should be used to create embeddings?"); | ||
if (!string.IsNullOrEmpty(embedModel)) | ||
{ | ||
var embedContent = ReadInput("Enter a string to to embed:"); | ||
Ollama.SelectedModel = embedModel; | ||
var embedResponse = await Ollama.EmbedAsync(embedContent); | ||
AnsiConsole.MarkupLineInterpolated($"[{AiTextColor}]{string.Join(", ", embedResponse.Embeddings[0])}[/]"); | ||
} | ||
} | ||
|
||
private async Task ShowModelInformation() | ||
{ | ||
var infoModel = await SelectModel("Which model do you want to retrieve information for?"); | ||
if (!string.IsNullOrEmpty(infoModel)) | ||
{ | ||
var infoResponse = await Ollama.ShowModelAsync(infoModel); | ||
PropertyConsoleRenderer.Render(infoResponse); | ||
} | ||
} | ||
|
||
private async Task ListLocalModels() | ||
{ | ||
var models = await Ollama.ListLocalModelsAsync(); | ||
foreach (var model in models.OrderBy(m => m.Name)) | ||
AnsiConsole.MarkupLineInterpolated($"[{AiTextColor}]{model.Name}[/]"); | ||
} | ||
|
||
private async Task PullModel() | ||
{ | ||
var pullModel = ReadInput("Enter the name of the model you want to pull:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md[/][/][{HintTextColor}] for reference[/]"); | ||
|
||
await AnsiConsole.Progress().StartAsync(async context => | ||
{ | ||
ProgressTask? task = null; | ||
await foreach (var status in Ollama.PullModelAsync(pullModel)) | ||
UpdateProgressTaskByStatus(context, ref task, status); | ||
task?.StopTask(); | ||
}); | ||
} | ||
|
||
private async Task PushModel() | ||
{ | ||
var pushModel = ReadInput("Which model do you want to push?"); | ||
await foreach (var status in Ollama.PushModelAsync(pushModel)) | ||
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}"); | ||
} | ||
|
||
private static void UpdateProgressTaskByStatus(ProgressContext context, ref ProgressTask? task, PullModelResponse? modelResponse) | ||
{ | ||
if (modelResponse is null) | ||
return; | ||
|
||
if (modelResponse.Status != task?.Description) | ||
{ | ||
task?.StopTask(); | ||
task = context.AddTask(modelResponse.Status); | ||
} | ||
|
||
task.Increment(modelResponse.Percent - task.Value); | ||
} | ||
|
||
public static class PropertyConsoleRenderer | ||
{ | ||
public static void Render(object o) | ||
{ | ||
foreach (var pi in o.GetType().GetProperties()) | ||
{ | ||
AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.AccentTextColor}][underline][bold]{pi.Name}:[/][/][/]"); | ||
AnsiConsole.MarkupLineInterpolated($"[{OllamaConsole.AccentTextColor}]{pi.GetValue(o)?.ToString() ?? ""}[/]"); | ||
AnsiConsole.WriteLine(); | ||
} | ||
} | ||
} | ||
} |
Oops, something went wrong.