From d92c5fcc03db22ec2f6da31e915f0f9509e6ceec Mon Sep 17 00:00:00 2001 From: isidorn Date: Wed, 31 Jul 2024 15:45:34 +0200 Subject: [PATCH] use gpt-4o in sample --- chat-sample/README.md | 2 +- chat-sample/src/extension.ts | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/chat-sample/README.md b/chat-sample/README.md index e1b9543da..ba8b43732 100644 --- a/chat-sample/README.md +++ b/chat-sample/README.md @@ -9,7 +9,7 @@ When an extension uses the Chat or the Language Model API, we call it a GitHub C This GitHub Copilot Extension sample shows: - How to contribute a chat participant to the GitHub Copilot Chat view. -- How to use the Language Model API to request access to the Language Model (gpt-3.5-turbo, gpt-4). +- How to use the Language Model API to request access to the Language Model (gpt-4o, gpt-3.5-turbo, gpt-4). ![demo](./demo.png) diff --git a/chat-sample/src/extension.ts b/chat-sample/src/extension.ts index d5823f70d..7caeb5753 100644 --- a/chat-sample/src/extension.ts +++ b/chat-sample/src/extension.ts @@ -11,7 +11,8 @@ interface ICatChatResult extends vscode.ChatResult { } } -const MODEL_SELECTOR: vscode.LanguageModelChatSelector = { vendor: 'copilot', family: 'gpt-3.5-turbo' }; +// Use gpt-4o since it is fast and high quality. gpt-3.5-turbo and gpt-4 are also available. +const MODEL_SELECTOR: vscode.LanguageModelChatSelector = { vendor: 'copilot', family: 'gpt-4o' }; export function activate(context: vscode.ExtensionContext) { @@ -24,6 +25,7 @@ export function activate(context: vscode.ExtensionContext) { stream.progress('Picking the right topic to teach...'); const topic = getTopic(context.history); try { + // To get a list of all available models, do not pass any selector to the selectChatModels. const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR); if (model) { const messages = [ @@ -118,7 +120,7 @@ export function activate(context: vscode.ExtensionContext) { let chatResponse: vscode.LanguageModelChatResponse | undefined; try { - const [model] = await vscode.lm.selectChatModels({ vendor: 'copilot', family: 'gpt-3.5-turbo' }); + const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR); if (!model) { console.log('Model not found. Please make sure the GitHub Copilot Chat extension is installed and enabled.'); return;