diff --git a/LLama.Examples/Examples/BatchedExecutorGuidance.cs b/LLama.Examples/Examples/BatchedExecutorGuidance.cs index a901e0a58..d69552472 100644 --- a/LLama.Examples/Examples/BatchedExecutorGuidance.cs +++ b/LLama.Examples/Examples/BatchedExecutorGuidance.cs @@ -74,6 +74,7 @@ await AnsiConsole // Sample from the "guided" conversation. This sampler will internally use the "guidance" conversation // to steer the conversation. See how this is done in GuidedSampler.ProcessLogits (bottom of this file). var g = guidedSampler.Sample(executor.Context.NativeHandle, guided.Sample(), []); + guidedSampler.Accept(executor.Context.NativeHandle, g); guidedDecoder.Add(g); // Use this token to advance both guided _and_ guidance. Keeping them in sync (except for the initial prompt). @@ -110,10 +111,6 @@ protected override LLamaToken ProcessTokenDataArray(SafeLLamaContextHandle ctx, return candidates.SampleToken(ctx); } - public override void Accept(SafeLLamaContextHandle ctx, LLamaToken token) - { - } - public override ISamplingPipeline Clone() { throw new NotSupportedException();