Skip to content

Commit

Permalink
Used AnsiConsole in a few more places: (#534)
Browse files Browse the repository at this point in the history
- UserSettings, simplifying the validation/re-ask loop down to one call
 - Program, adding colour to figlet title
 - Batched examples, showing default prompt
 - ExampleRunner, resetting state after running an example
  • Loading branch information
martindevans authored Feb 24, 2024
1 parent 91ca9d2 commit 74a3918
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 52 deletions.
3 changes: 2 additions & 1 deletion LLama.Examples/ExampleRunner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ public static async Task Run()
await example();
}

Console.WriteLine("Press ENTER to go to the main menu...");
AnsiConsole.Reset();
AnsiConsole.MarkupLine("Press ENTER to go to the main menu...");
Console.ReadLine();

AnsiConsole.Clear();
Expand Down
6 changes: 2 additions & 4 deletions LLama.Examples/Examples/BatchedExecutorFork.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using LLama.Common;
using LLama.Native;
using LLama.Sampling;
using Spectre.Console;

namespace LLama.Examples.Examples;

Expand All @@ -20,10 +21,7 @@ public static async Task Run()
var parameters = new ModelParams(modelPath);
using var model = LLamaWeights.LoadFromFile(parameters);

Console.WriteLine("Prompt (leave blank to select automatically):");
var prompt = Console.ReadLine();
if (string.IsNullOrWhiteSpace(prompt))
prompt = "Not many people know that";
var prompt = AnsiConsole.Ask("Prompt (or ENTER for default):", "Not many people know that");

// Create an executor that can evaluate a batch of conversations together
var executor = new BatchedExecutor(model, parameters);
Expand Down
6 changes: 2 additions & 4 deletions LLama.Examples/Examples/BatchedExecutorRewind.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using LLama.Common;
using LLama.Native;
using LLama.Sampling;
using Spectre.Console;

namespace LLama.Examples.Examples;

Expand All @@ -21,10 +22,7 @@ public static async Task Run()
var parameters = new ModelParams(modelPath);
using var model = LLamaWeights.LoadFromFile(parameters);

Console.WriteLine("Prompt (leave blank to select automatically):");
var prompt = Console.ReadLine();
if (string.IsNullOrWhiteSpace(prompt))
prompt = "Not many people know that";
var prompt = AnsiConsole.Ask("Prompt (or ENTER for default):", "Not many people know that");

// Create an executor that can evaluate a batch of conversations together
var executor = new BatchedExecutor(model, parameters);
Expand Down
12 changes: 6 additions & 6 deletions LLama.Examples/Program.cs
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
using LLama.Native;
using Spectre.Console;

Console.WriteLine(
"""
======================================================================================================
AnsiConsole.MarkupLineInterpolated(
$"""
[purple]======================================================================================================[/]
__ __ ____ __
/\ \ /\ \ /\ _`\ /\ \
\ \ \ \ \ \ __ ___ ___ __ \ \,\L\_\\ \ \___ __ _ __ _____
\ \ \ __\ \ \ __ /'__`\ /' __` __`\ /'__`\ \/_\__ \ \ \ _ `\ /'__`\ /\`'__\/\ '__`\
\ \ \ __\ \ \ __ /'__`\ /' __` __`\ /'__`\ \/_\__ \ \ \ _ `\ /'__`\ /\` __\/\ __`\
\ \ \L\ \\ \ \L\ \/\ \L\.\_ /\ \/\ \/\ \ /\ \L\.\_ /\ \L\ \\ \ \ \ \ /\ \L\.\_\ \ \/ \ \ \L\ \
\ \____/ \ \____/\ \__/.\_\\ \_\ \_\ \_\\ \__/.\_\\ `\____\\ \_\ \_\\ \__/.\_\\ \_\ \ \ ,__/
\/___/ \/___/ \/__/\/_/ \/_/\/_/\/_/ \/__/\/_/ \/_____/ \/_/\/_/ \/__/\/_/ \/_/ \ \ \/
\ \_\
[purple]=========================================================================================[/] \ \_\ [purple]======[/]
\/_/
======================================================================================================

""");

Expand Down
58 changes: 21 additions & 37 deletions LLama.Examples/UserSettings.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
namespace LLama.Examples;
using Spectre.Console;

namespace LLama.Examples;

internal static class UserSettings
{
Expand All @@ -23,51 +25,33 @@ private static void WriteDefaultModelPath(string path)

public static string GetModelPath(bool alwaysPrompt = false)
{
string? defaultPath = ReadDefaultModelPath();
return defaultPath is null || alwaysPrompt
var defaultPath = ReadDefaultModelPath();
var path = defaultPath is null || alwaysPrompt
? PromptUserForPath()
: PromptUserForPathWithDefault(defaultPath);

if (File.Exists(path))
WriteDefaultModelPath(path);

return path;
}

private static string PromptUserForPath()
{
while (true)
{
Console.ForegroundColor = ConsoleColor.White;
Console.Write("Please input your model path: ");
string? path = Console.ReadLine();

if (File.Exists(path))
{
WriteDefaultModelPath(path);
return path;
}

Console.WriteLine("ERROR: invalid model file path\n");
}
return AnsiConsole.Prompt(
new TextPrompt<string>("Please input your model path:")
.PromptStyle("white")
.Validate(File.Exists, "[red]ERROR: invalid model file path - file does not exist[/]")
);
}

private static string PromptUserForPathWithDefault(string defaultPath)
{
while (true)
{
Console.ForegroundColor = ConsoleColor.White;
Console.WriteLine($"Default model: {defaultPath}");
Console.Write($"Please input a model path (or ENTER for default): ");
string? path = Console.ReadLine();

if (string.IsNullOrWhiteSpace(path))
{
return defaultPath;
}

if (File.Exists(path))
{
WriteDefaultModelPath(path);
return path;
}

Console.WriteLine("ERROR: invalid model file path\n");
}
return AnsiConsole.Prompt(
new TextPrompt<string>("Please input your model path (or ENTER for default):")
.DefaultValue(defaultPath)
.PromptStyle("white")
.Validate(File.Exists, "[red]ERROR: invalid model file path - file does not exist[/]")
);
}
}

0 comments on commit 74a3918

Please sign in to comment.