diff --git a/LLama.Examples/ExampleRunner.cs b/LLama.Examples/ExampleRunner.cs index b3a52d4c0..9ebbf5b8c 100644 --- a/LLama.Examples/ExampleRunner.cs +++ b/LLama.Examples/ExampleRunner.cs @@ -46,7 +46,8 @@ public static async Task Run() await example(); } - Console.WriteLine("Press ENTER to go to the main menu..."); + AnsiConsole.Reset(); + AnsiConsole.MarkupLine("Press ENTER to go to the main menu..."); Console.ReadLine(); AnsiConsole.Clear(); diff --git a/LLama.Examples/Examples/BatchedExecutorFork.cs b/LLama.Examples/Examples/BatchedExecutorFork.cs index b5cf6c43e..b42f436bb 100644 --- a/LLama.Examples/Examples/BatchedExecutorFork.cs +++ b/LLama.Examples/Examples/BatchedExecutorFork.cs @@ -2,6 +2,7 @@ using LLama.Common; using LLama.Native; using LLama.Sampling; +using Spectre.Console; namespace LLama.Examples.Examples; @@ -20,10 +21,7 @@ public static async Task Run() var parameters = new ModelParams(modelPath); using var model = LLamaWeights.LoadFromFile(parameters); - Console.WriteLine("Prompt (leave blank to select automatically):"); - var prompt = Console.ReadLine(); - if (string.IsNullOrWhiteSpace(prompt)) - prompt = "Not many people know that"; + var prompt = AnsiConsole.Ask("Prompt (or ENTER for default):", "Not many people know that"); // Create an executor that can evaluate a batch of conversations together var executor = new BatchedExecutor(model, parameters); diff --git a/LLama.Examples/Examples/BatchedExecutorRewind.cs b/LLama.Examples/Examples/BatchedExecutorRewind.cs index 9a25b6e5f..4a8c3ab29 100644 --- a/LLama.Examples/Examples/BatchedExecutorRewind.cs +++ b/LLama.Examples/Examples/BatchedExecutorRewind.cs @@ -2,6 +2,7 @@ using LLama.Common; using LLama.Native; using LLama.Sampling; +using Spectre.Console; namespace LLama.Examples.Examples; @@ -21,10 +22,7 @@ public static async Task Run() var parameters = new ModelParams(modelPath); using var model = LLamaWeights.LoadFromFile(parameters); - Console.WriteLine("Prompt (leave blank to select automatically):"); - var prompt = Console.ReadLine(); - if (string.IsNullOrWhiteSpace(prompt)) - prompt = "Not many people know that"; + var prompt = AnsiConsole.Ask("Prompt (or ENTER for default):", "Not many people know that"); // Create an executor that can evaluate a batch of conversations together var executor = new BatchedExecutor(model, parameters); diff --git a/LLama.Examples/Program.cs b/LLama.Examples/Program.cs index 3b4320a4e..54d40fb54 100644 --- a/LLama.Examples/Program.cs +++ b/LLama.Examples/Program.cs @@ -1,18 +1,18 @@ using LLama.Native; +using Spectre.Console; -Console.WriteLine( - """ - ====================================================================================================== +AnsiConsole.MarkupLineInterpolated( + $""" + [purple]======================================================================================================[/] __ __ ____ __ /\ \ /\ \ /\ _`\ /\ \ \ \ \ \ \ \ __ ___ ___ __ \ \,\L\_\\ \ \___ __ _ __ _____ - \ \ \ __\ \ \ __ /'__`\ /' __` __`\ /'__`\ \/_\__ \ \ \ _ `\ /'__`\ /\`'__\/\ '__`\ + \ \ \ __\ \ \ __ /'__`\ /' __` __`\ /'__`\ \/_\__ \ \ \ _ `\ /'__`\ /\` __\/\ __`\ \ \ \L\ \\ \ \L\ \/\ \L\.\_ /\ \/\ \/\ \ /\ \L\.\_ /\ \L\ \\ \ \ \ \ /\ \L\.\_\ \ \/ \ \ \L\ \ \ \____/ \ \____/\ \__/.\_\\ \_\ \_\ \_\\ \__/.\_\\ `\____\\ \_\ \_\\ \__/.\_\\ \_\ \ \ ,__/ \/___/ \/___/ \/__/\/_/ \/_/\/_/\/_/ \/__/\/_/ \/_____/ \/_/\/_/ \/__/\/_/ \/_/ \ \ \/ - \ \_\ + [purple]=========================================================================================[/] \ \_\ [purple]======[/] \/_/ - ====================================================================================================== """); diff --git a/LLama.Examples/UserSettings.cs b/LLama.Examples/UserSettings.cs index 1a0bb36b8..088a628e7 100644 --- a/LLama.Examples/UserSettings.cs +++ b/LLama.Examples/UserSettings.cs @@ -1,4 +1,6 @@ -namespace LLama.Examples; +using Spectre.Console; + +namespace LLama.Examples; internal static class UserSettings { @@ -23,51 +25,33 @@ private static void WriteDefaultModelPath(string path) public static string GetModelPath(bool alwaysPrompt = false) { - string? defaultPath = ReadDefaultModelPath(); - return defaultPath is null || alwaysPrompt + var defaultPath = ReadDefaultModelPath(); + var path = defaultPath is null || alwaysPrompt ? PromptUserForPath() : PromptUserForPathWithDefault(defaultPath); + + if (File.Exists(path)) + WriteDefaultModelPath(path); + + return path; } private static string PromptUserForPath() { - while (true) - { - Console.ForegroundColor = ConsoleColor.White; - Console.Write("Please input your model path: "); - string? path = Console.ReadLine(); - - if (File.Exists(path)) - { - WriteDefaultModelPath(path); - return path; - } - - Console.WriteLine("ERROR: invalid model file path\n"); - } + return AnsiConsole.Prompt( + new TextPrompt("Please input your model path:") + .PromptStyle("white") + .Validate(File.Exists, "[red]ERROR: invalid model file path - file does not exist[/]") + ); } private static string PromptUserForPathWithDefault(string defaultPath) { - while (true) - { - Console.ForegroundColor = ConsoleColor.White; - Console.WriteLine($"Default model: {defaultPath}"); - Console.Write($"Please input a model path (or ENTER for default): "); - string? path = Console.ReadLine(); - - if (string.IsNullOrWhiteSpace(path)) - { - return defaultPath; - } - - if (File.Exists(path)) - { - WriteDefaultModelPath(path); - return path; - } - - Console.WriteLine("ERROR: invalid model file path\n"); - } + return AnsiConsole.Prompt( + new TextPrompt("Please input your model path (or ENTER for default):") + .DefaultValue(defaultPath) + .PromptStyle("white") + .Validate(File.Exists, "[red]ERROR: invalid model file path - file does not exist[/]") + ); } }