Skip to content

Commit

Permalink
Added checks in Decode to skip doing anything if the batch is empty.
Browse files Browse the repository at this point in the history
  • Loading branch information
martindevans committed Apr 24, 2024
1 parent ccc49eb commit 2581276
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions LLama/Native/SafeLLamaContextHandle.cs
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,9 @@ public uint TokenToSpan(LLamaToken token, Span<byte> dest)
/// </returns>
public DecodeResult Decode(LLamaBatch batch)
{
if (batch.TokenCount == 0)
return DecodeResult.Ok;

lock (GlobalInferenceLock)
using (batch.ToNativeBatch(out var nb))
return (DecodeResult)llama_decode(this, nb);
Expand All @@ -383,6 +386,9 @@ public DecodeResult Decode(LLamaBatch batch)
/// <returns>A tuple, containing the decode result and the number of tokens that have <b>not</b> been decoded yet.</returns>
internal (DecodeResult, int) Decode(List<LLamaToken> tokens, LLamaSeqId id, LLamaBatch batch, ref int n_past)
{
if (tokens.Count == 0)
return (DecodeResult.Ok, 0);

var batchSize = checked((int)BatchSize);

// Evaluate the prompt, in chunks smaller than the max batch size
Expand Down

0 comments on commit 2581276

Please sign in to comment.