Skip to content

Commit

Permalink
🔧 Add Semantic Memory SDK to LLama.Examples
Browse files Browse the repository at this point in the history
Added a project reference to LLama.KernelMemory  in the LLama.Examples.csproj file.

🔧 Add KernelMemory class and update TestRunner

Added a new class KernelMemory to the LLama.Examples.NewVersion namespace, which includes a Run method that demonstrates the usage of Semantic Kernel Memory. The class imports a sample PDF document and asks a question to the memory, displaying the answer and relevant sources.

Updated the TestRunner class in the same namespace to include an option (choice 15) to run the KernelMemory example.

🔧 Fix typo in LLamaSharpTextEmbeddingGeneration class

Fixed a typo in the LLamaSharpTextEmbeddingGeneration class where the LlamaSharpConfig variable was incorrectly named as LlamaSharpConfig.

Added XML documentation for the LLamaSharpTextEmbeddingGeneration constructor, Dispose method, and GenerateEmbeddingsAsync method.

Summary:
- Added project reference to LLama.KernelMemory and LLama.SemanticKernel in LLama.Examples.csproj
- Added KernelMemory class to demonstrate Semantic Kernel Memory usage
- Updated TestRunner class to include option for running KernelMemory example
- Fixed typo in LLamaSharpTextEmbeddingGeneration class
- Added XML documentation for constructor, Dispose method, and GenerateEmbeddingsAsync method in LLamaSharpTextEmbeddingGeneration class
  • Loading branch information
xbotter committed Oct 30, 2023
1 parent 26cded7 commit a49438e
Show file tree
Hide file tree
Showing 8 changed files with 163 additions and 15 deletions.
Binary file added LLama.Examples/Assets/sample-SK-Readme.pdf
Binary file not shown.
4 changes: 4 additions & 0 deletions LLama.Examples/LLama.Examples.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\LLama.KernelMemory\LLamaSharp.KernelMemory.csproj" />
<ProjectReference Include="..\LLama.SemanticKernel\LLamaSharp.SemanticKernel.csproj" />
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>
Expand Down Expand Up @@ -61,6 +62,9 @@
<None Update="Assets\reason-act.txt">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="Assets\sample-SK-Readme.pdf">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

</Project>
47 changes: 47 additions & 0 deletions LLama.Examples/NewVersion/KernelMemory.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
using Microsoft.SemanticMemory.Handlers;
using Microsoft.SemanticMemory;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLamaSharp.KernelMemory;

namespace LLama.Examples.NewVersion
{
public class KernelMemory
{
public static async Task Run()
{
Console.WriteLine("Example from: https://github.com/microsoft/kernel-memory/blob/main/examples/101-using-core-nuget/Program.cs");
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
var memory = new MemoryClientBuilder()
.WithLLamaSharpDefaults(new LLamaSharpConfig(modelPath))
.With(new TextPartitioningOptions
{
MaxTokensPerParagraph = 300,
MaxTokensPerLine = 100,
OverlappingTokens = 50
})
.BuildServerlessClient();

await memory.ImportDocumentAsync(@"./Asserts/sample-SK-Readme.pdf");

var question = "What's Semantic Kernel?";

Console.WriteLine($"\n\nQuestion: {question}");

var answer = await memory.AskAsync(question);

Console.WriteLine($"\nAnswer: {answer.Result}");

Console.WriteLine("\n\n Sources:\n");

foreach (var x in answer.RelevantSources)
{
Console.WriteLine($" - {x.SourceName} - {x.Link} [{x.Partitions.First().LastUpdate:D}]");
}
}
}
}
23 changes: 14 additions & 9 deletions LLama.Examples/NewVersion/TestRunner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ public static async Task Run()
Console.WriteLine("12: Semantic Kernel Chat.");
Console.WriteLine("13: Semantic Kernel Memory.");
Console.WriteLine("14: Coding Assistant.");
Console.WriteLine("15: Semantic Memory.");

while (true)
{
Expand All @@ -36,31 +37,31 @@ public static async Task Run()
{
await ChatSessionStripRoleName.Run();
}
else if(choice == 2)
else if (choice == 2)
{
await InteractiveModeExecute.Run();
}
else if(choice == 3)
else if (choice == 3)
{
await InstructModeExecute.Run();
}
else if(choice == 4)
else if (choice == 4)
{
await StatelessModeExecute.Run();
}
else if(choice == 5)
else if (choice == 5)
{
await SaveAndLoadSession.Run();
}
else if(choice == 6)
else if (choice == 6)
{
await LoadAndSaveState.Run();
}
else if(choice == 7)
else if (choice == 7)
{
GetEmbeddings.Run();
}
else if(choice == 8)
else if (choice == 8)
{
QuantizeModel.Run();
}
Expand All @@ -84,10 +85,14 @@ public static async Task Run()
{
await SemanticKernelMemory.Run();
}
else if(choice == 14)
else if (choice == 14)
{
await CodingAssistant.Run();
}
else if (choice == 15)
{
await KernelMemory.Run();
}
else
{
Console.WriteLine("Cannot parse your choice. Please select again.");
Expand All @@ -98,5 +103,5 @@ public static async Task Run()
}
}


}
52 changes: 52 additions & 0 deletions LLama.KernelMemory/BuilderExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
using Microsoft.SemanticMemory;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace LLamaSharp.KernelMemory
{
/// <summary>
/// Provides extension methods for the MemoryClientBuilder class.
/// </summary>
public static class BuilderExtensions
{
/// <summary>
/// Adds LLamaSharpTextEmbeddingGeneration to the MemoryClientBuilder.
/// </summary>
/// <param name="builder">The MemoryClientBuilder instance.</param>
/// <param name="config">The LLamaSharpConfig instance.</param>
/// <returns>The MemoryClientBuilder instance with LLamaSharpTextEmbeddingGeneration added.</returns>
public static MemoryClientBuilder WithLLamaSharpTextEmbeddingGeneration(this MemoryClientBuilder builder, LLamaSharpConfig config)
{
builder.WithCustomEmbeddingGeneration(new LLamaSharpTextEmbeddingGeneration(config));
return builder;
}

/// <summary>
/// Adds LLamaSharpTextGeneration to the MemoryClientBuilder.
/// </summary>
/// <param name="builder">The MemoryClientBuilder instance.</param>
/// <param name="config">The LLamaSharpConfig instance.</param>
/// <returns>The MemoryClientBuilder instance with LLamaSharpTextGeneration added.</returns>
public static MemoryClientBuilder WithLLamaSharpTextGeneration(this MemoryClientBuilder builder, LLamaSharpConfig config)
{
builder.WithCustomTextGeneration(new LlamaSharpTextGeneration(config));
return builder;
}

/// <summary>
/// Adds LLamaSharpTextEmbeddingGeneration and LLamaSharpTextGeneration to the MemoryClientBuilder.
/// </summary>
/// <param name="builder">The MemoryClientBuilder instance.</param>
/// <param name="config">The LLamaSharpConfig instance.</param>
/// <returns>The MemoryClientBuilder instance with LLamaSharpTextEmbeddingGeneration and LLamaSharpTextGeneration added.</returns>
public static MemoryClientBuilder WithLLamaSharpDefaults(this MemoryClientBuilder builder, LLamaSharpConfig config)
{
builder.WithLLamaSharpTextEmbeddingGeneration(config);
builder.WithLLamaSharpTextGeneration(config);
return builder;
}
}
}
13 changes: 11 additions & 2 deletions LLama.KernelMemory/LLamaSharpTextEmbeddingGeneration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,35 @@

namespace LLamaSharp.KernelMemory
{
/// <summary>
/// Provides text embedding generation for LLamaSharp.
/// </summary>
public class LLamaSharpTextEmbeddingGeneration : ITextEmbeddingGeneration, IDisposable
{
private readonly LlamaSharpConfig _config;
private readonly LLamaSharpConfig _config;
private readonly LLamaEmbedder _embedder;
private readonly LLamaWeights _weights;

public LLamaSharpTextEmbeddingGeneration(LlamaSharpConfig config)
/// <summary>
/// Initializes a new instance of the <see cref="LLamaSharpTextEmbeddingGeneration"/> class.
/// </summary>
/// <param name="config">The configuration for LLamaSharp.</param>
public LLamaSharpTextEmbeddingGeneration(LLamaSharpConfig config)
{
this._config = config;
var @params = new ModelParams(_config.ModelPath);
_weights = LLamaWeights.LoadFromFile(@params);
_embedder = new LLamaEmbedder(_weights, @params);
}

/// <inheritdoc/>
public void Dispose()
{
_embedder.Dispose();
_weights.Dispose();
}

/// <inheritdoc/>
public Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingsAsync(IList<string> data, CancellationToken cancellationToken = default)
{
IList<ReadOnlyMemory<float>> results = new List<ReadOnlyMemory<float>>();
Expand Down
26 changes: 24 additions & 2 deletions LLama.KernelMemory/LlamaSharpConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,38 @@

namespace LLamaSharp.KernelMemory
{
public class LlamaSharpConfig
/// <summary>
/// Represents the configuration for LLamaSharp.
/// </summary>
public class LLamaSharpConfig
{
public LlamaSharpConfig(string modelPath)
/// <summary>
/// Initializes a new instance of the <see cref="LLamaSharpConfig"/> class.
/// </summary>
/// <param name="modelPath">The path to the model file.</param>
public LLamaSharpConfig(string modelPath)
{
ModelPath = modelPath;
}

/// <summary>
/// Gets or sets the path to the model file.
/// </summary>
public string ModelPath { get; set; }

/// <summary>
/// Gets or sets the size of the context.
/// </summary>
public uint? ContextSize { get; set; }

/// <summary>
/// Gets or sets the seed value.
/// </summary>
public uint? Seed { get; set; }

/// <summary>
/// Gets or sets the number of GPU layers.
/// </summary>
public int? GpuLayerCount { get; set; }
}
}
13 changes: 11 additions & 2 deletions LLama.KernelMemory/LlamaSharpTextGeneration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,21 @@

namespace LLamaSharp.KernelMemory
{
/// <summary>
/// Provides text generation for LLamaSharp.
/// </summary>
public class LlamaSharpTextGeneration : ITextGeneration, IDisposable
{
private readonly LlamaSharpConfig _config;
private readonly LLamaSharpConfig _config;
private readonly LLamaWeights _weights;
private readonly InstructExecutor _executor;
private readonly LLamaContext _context;

public LlamaSharpTextGeneration(LlamaSharpConfig config)
/// <summary>
/// Initializes a new instance of the <see cref="LlamaSharpTextGeneration"/> class.
/// </summary>
/// <param name="config">The configuration for LLamaSharp.</param>
public LlamaSharpTextGeneration(LLamaSharpConfig config)
{
this._config = config;
var parameters = new ModelParams(config.ModelPath)
Expand All @@ -31,12 +38,14 @@ public LlamaSharpTextGeneration(LlamaSharpConfig config)

}

/// <inheritdoc/>
public void Dispose()
{
_context.Dispose();
_weights.Dispose();
}

/// <inheritdoc/>
public IAsyncEnumerable<string> GenerateTextAsync(string prompt, TextGenerationOptions options, CancellationToken cancellationToken = default)
{
return _executor.InferAsync(prompt, OptionsToParams(options), cancellationToken: cancellationToken);
Expand Down

0 comments on commit a49438e

Please sign in to comment.