-
Notifications
You must be signed in to change notification settings - Fork 345
/
ExampleRunner.cs
67 lines (61 loc) · 3.12 KB
/
ExampleRunner.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
using Spectre.Console;
using LLama.Examples.Examples;
public class ExampleRunner
{
private static readonly Dictionary<string, Func<Task>> Examples = new()
{
{ "Chat Session: LLama3", LLama3ChatSession.Run },
{ "Chat Session: LLama2", LLama2ChatSession.Run },
{ "Chat Session: History", ChatSessionWithHistory.Run },
{ "Chat Session: Role names", ChatSessionWithRoleName.Run },
{ "Chat Session: Role names stripped", ChatSessionStripRoleName.Run },
{ "Chat Session: Pre-processing and reset", ChatSessionWithRestart.Run },
{ "Chat Session: Coding Assistant", CodingAssistant.Run },
{ "Chat Session: Automatic conversation", TalkToYourself.Run },
{ "Chat Session: Chinese characters", ChatChineseGB2312.Run },
{ "Executor: Interactive mode chat", InteractiveModeExecute.Run },
{ "Executor: Llava Interactive mode chat", LlavaInteractiveModeExecute.Run },
{ "Executor: Instruct mode chat", InstructModeExecute.Run },
{ "Executor: Stateless mode chat", StatelessModeExecute.Run },
{ "Save and Load: chat session", SaveAndLoadSession.Run },
{ "Save and Load: state of model and executor", LoadAndSaveState.Run },
{ "LLama Model: Get embeddings", GetEmbeddings.Run },
{ "LLama Model: Quantize", QuantizeModel.Run },
{ "Grammar: Constrain response to json format", GrammarJsonResponse.Run },
{ "Kernel Memory: Document Q&A", KernelMemory.Run },
{ "Kernel Memory: Save and Load", KernelMemorySaveAndLoad.Run },
{ "Semantic Kernel: HomeAutomation", SemanticKernelHomeAutomation.Run },
{ "Semantic Kernel: Prompt", SemanticKernelPrompt.Run },
{ "Semantic Kernel: Chat", SemanticKernelChat.Run },
{ "Semantic Kernel: Store", SemanticKernelMemory.Run },
{ "Batched Executor: Save/Load", BatchedExecutorSaveAndLoad.Run },
{ "Batched Executor: Fork", BatchedExecutorFork.Run },
{ "Batched Executor: Rewind", BatchedExecutorRewind.Run },
{ "Batched Executor: LLava", BatchedExecutorLLava.Run },
{ "Batched Executor: BoolQ Benchmark", BatchedExecutorBoolQ.Run },
{ "Batched Executor: Beam Search", BatchedExecutorBeamSearch.Run },
{ "Custom Sampling Pipeline", CustomSampler.Run },
{ "Speech Chat: Integration with Whisper.net", SpeechChat.Run },
{ "Exit", () => { Environment.Exit(0); return Task.CompletedTask; } }
};
public static async Task Run()
{
AnsiConsole.Write(new Rule("LLamaSharp Examples"));
while (true)
{
var choice = AnsiConsole.Prompt(
new SelectionPrompt<string>()
.Title("Please choose[green] an example[/] to run: ")
.AddChoices(Examples.Keys));
if (Examples.TryGetValue(choice, out var example))
{
AnsiConsole.Write(new Rule(choice));
await example();
}
AnsiConsole.Reset();
AnsiConsole.MarkupLine("Press ENTER to go to the main menu...");
Console.ReadLine();
AnsiConsole.Clear();
}
}
}