Skip to content

Commit

Permalink
Removed (marked as obsolete) prompting with a string for `Conversatio…
Browse files Browse the repository at this point in the history
…n`. Tokenization requires extra parameters (e.g. addBos, special) which require special considersation. For now it's better to tokenize using other tools and pass the tokens directly.
  • Loading branch information
martindevans committed May 6, 2024
1 parent 6bf010d commit 3ba4975
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 24 deletions.
2 changes: 1 addition & 1 deletion LLama.Examples/Examples/BatchedExecutorFork.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public static async Task Run()

// Evaluate the initial prompt to create one conversation
using var start = executor.Create();
start.Prompt(prompt);
start.Prompt(executor.Context.Tokenize(prompt));
await executor.Infer();

// Create the root node of the tree
Expand Down
4 changes: 2 additions & 2 deletions LLama.Examples/Examples/BatchedExecutorGuidance.cs
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ public static async Task Run()

// Load the two prompts into two conversations
using var guided = executor.Create();
guided.Prompt(positivePrompt);
guided.Prompt(executor.Context.Tokenize(positivePrompt));
using var guidance = executor.Create();
guidance.Prompt(negativePrompt);
guidance.Prompt(executor.Context.Tokenize(negativePrompt));

// Run inference to evaluate prompts
await AnsiConsole
Expand Down
2 changes: 1 addition & 1 deletion LLama.Examples/Examples/BatchedExecutorRewind.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public static async Task Run()

// Evaluate the initial prompt to create one conversation
using var conversation = executor.Create();
conversation.Prompt(prompt);
conversation.Prompt(executor.Context.Tokenize(prompt));

// Create the start node wrapping the conversation
var node = new Node(executor.Context);
Expand Down
2 changes: 1 addition & 1 deletion LLama.Examples/Examples/BatchedExecutorSaveAndLoad.cs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public static async Task Run()

// Create a conversation
var conversation = executor.Create();
conversation.Prompt(prompt);
conversation.Prompt(executor.Context.Tokenize(prompt));

// Run inference loop
var decoder = new StreamingTokenDecoder(executor.Context);
Expand Down
17 changes: 0 additions & 17 deletions LLama/Batched/BatchedExecutor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,23 +55,6 @@ public BatchedExecutor(LLamaWeights model, IContextParams contextParams)
Epoch = 1;
}

/// <summary>
/// Start a new <see cref="Conversation"/> with the given prompt
/// </summary>
/// <param name="prompt"></param>
/// <returns></returns>
[Obsolete("Use BatchedExecutor.Create instead")]
public Conversation Prompt(string prompt)
{
if (IsDisposed)
throw new ObjectDisposedException(nameof(BatchedExecutor));

var conversation = Create();
conversation.Prompt(prompt);

return conversation;
}

/// <summary>
/// Start a new <see cref="Conversation"/>
/// </summary>
Expand Down
5 changes: 3 additions & 2 deletions LLama/Batched/Conversation.cs
Original file line number Diff line number Diff line change
Expand Up @@ -166,11 +166,12 @@ private void AssertCanBePrompted()
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public void Prompt(string input)
[Obsolete("Tokenize the text and pass the tokens instead")]
public void Prompt(string input, bool addBos, bool special)
{
AssertCanBePrompted();

Prompt(Executor.Context.Tokenize(input));
Prompt(Executor.Context.Tokenize(input, addBos, special));
}

/// <summary>
Expand Down

0 comments on commit 3ba4975

Please sign in to comment.