Skip to content

Commit 6d1f7e2

Browse files
Add Microsoft.ML.GenAI.Phi, test package and sample project. (#7184)
* add genai.phi and tests * formatter * refactor Phi3Tokenizer * update * add configuration for phi-series * add semantic kernel and autogen intergration * update * add Microsoft.ML.GenAI.Sample * use tokenzier model from testTokenizer package * use defaults * add quantize linear * use version string * remove special token from CreatePhi2 API * set up quantize sample * initialize linear with zeros * update sample * add 6.0 to targetframework * fix tests * update * remove Phi3Tokenizer and use LlamaTokenizer instead * revert change in tokenizer package * run test on x64 * fix tests * check in approved file * run test in net6.0 * use meta device * copy approval tests to output folder * set up approval test file location * fix comment * rename to AddGenAITextGeneration and AddGenAIChatCompletion * Update job-template.yml * add mit license * add reference * bump code coverage version * add <PreserveCompilationContext>true</PreserveCompilationContext> * add runtime package * remove flag * add flag * fix build error * update * update
1 parent f72c9d2 commit 6d1f7e2

File tree

68 files changed

+6803
-190
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+6803
-190
lines changed

Microsoft.ML.sln

Lines changed: 45 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,15 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TorchSharp.Tes
176176
EndProject
177177
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TensorFlow.Tests", "test\Microsoft.ML.TensorFlow.Tests\Microsoft.ML.TensorFlow.Tests.csproj", "{763FF013-8309-4680-A769-B54E7BB99612}"
178178
EndProject
179-
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.GenAI.Core", "src\Microsoft.ML.GenAI.Core\Microsoft.ML.GenAI.Core.csproj", "{DB2CA055-8ABD-4E3E-8089-5B64C3415E85}"
179+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.GenAI.Core", "src\Microsoft.ML.GenAI.Core\Microsoft.ML.GenAI.Core.csproj", "{DB2CA055-8ABD-4E3E-8089-5B64C3415E85}"
180+
EndProject
181+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.GenAI.Phi", "src\Microsoft.ML.GenAI.Phi\Microsoft.ML.GenAI.Phi.csproj", "{694BF884-B2E4-4E1C-9342-0564BAAC4575}"
182+
EndProject
183+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.GenAI.Phi.Tests", "test\Microsoft.ML.GenAI.Phi.Tests\Microsoft.ML.GenAI.Phi.Tests.csproj", "{867FFC34-DFA7-400F-B9BB-85158326CE08}"
184+
EndProject
185+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.GenAI.Samples", "docs\samples\Microsoft.ML.GenAI.Samples\Microsoft.ML.GenAI.Samples.csproj", "{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}"
186+
EndProject
187+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.GenAI.Core.Tests", "test\Microsoft.ML.GenAI.Core.Tests\Microsoft.ML.GenAI.Core.Tests.csproj", "{14AB0804-D4CE-4634-B544-5A8587620783}"
180188
EndProject
181189
Global
182190
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -838,6 +846,38 @@ Global
838846
{DB2CA055-8ABD-4E3E-8089-5B64C3415E85}.Release|Any CPU.Build.0 = Release|Any CPU
839847
{DB2CA055-8ABD-4E3E-8089-5B64C3415E85}.Release|x64.ActiveCfg = Release|Any CPU
840848
{DB2CA055-8ABD-4E3E-8089-5B64C3415E85}.Release|x64.Build.0 = Release|Any CPU
849+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
850+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Debug|Any CPU.Build.0 = Debug|Any CPU
851+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Debug|x64.ActiveCfg = Debug|Any CPU
852+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Debug|x64.Build.0 = Debug|Any CPU
853+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Release|Any CPU.ActiveCfg = Release|Any CPU
854+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Release|Any CPU.Build.0 = Release|Any CPU
855+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Release|x64.ActiveCfg = Release|Any CPU
856+
{694BF884-B2E4-4E1C-9342-0564BAAC4575}.Release|x64.Build.0 = Release|Any CPU
857+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
858+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Debug|Any CPU.Build.0 = Debug|Any CPU
859+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Debug|x64.ActiveCfg = Debug|Any CPU
860+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Debug|x64.Build.0 = Debug|Any CPU
861+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Release|Any CPU.ActiveCfg = Release|Any CPU
862+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Release|Any CPU.Build.0 = Release|Any CPU
863+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Release|x64.ActiveCfg = Release|Any CPU
864+
{867FFC34-DFA7-400F-B9BB-85158326CE08}.Release|x64.Build.0 = Release|Any CPU
865+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
866+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Debug|Any CPU.Build.0 = Debug|Any CPU
867+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Debug|x64.ActiveCfg = Debug|Any CPU
868+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Debug|x64.Build.0 = Debug|Any CPU
869+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Release|Any CPU.ActiveCfg = Release|Any CPU
870+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Release|Any CPU.Build.0 = Release|Any CPU
871+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Release|x64.ActiveCfg = Release|Any CPU
872+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47}.Release|x64.Build.0 = Release|Any CPU
873+
{14AB0804-D4CE-4634-B544-5A8587620783}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
874+
{14AB0804-D4CE-4634-B544-5A8587620783}.Debug|Any CPU.Build.0 = Debug|Any CPU
875+
{14AB0804-D4CE-4634-B544-5A8587620783}.Debug|x64.ActiveCfg = Debug|Any CPU
876+
{14AB0804-D4CE-4634-B544-5A8587620783}.Debug|x64.Build.0 = Debug|Any CPU
877+
{14AB0804-D4CE-4634-B544-5A8587620783}.Release|Any CPU.ActiveCfg = Release|Any CPU
878+
{14AB0804-D4CE-4634-B544-5A8587620783}.Release|Any CPU.Build.0 = Release|Any CPU
879+
{14AB0804-D4CE-4634-B544-5A8587620783}.Release|x64.ActiveCfg = Release|Any CPU
880+
{14AB0804-D4CE-4634-B544-5A8587620783}.Release|x64.Build.0 = Release|Any CPU
841881
EndGlobalSection
842882
GlobalSection(SolutionProperties) = preSolution
843883
HideSolutionNode = FALSE
@@ -925,6 +965,10 @@ Global
925965
{AB8D68F1-6C3E-41FD-B0EC-A093E009341D} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
926966
{763FF013-8309-4680-A769-B54E7BB99612} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
927967
{DB2CA055-8ABD-4E3E-8089-5B64C3415E85} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
968+
{694BF884-B2E4-4E1C-9342-0564BAAC4575} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
969+
{867FFC34-DFA7-400F-B9BB-85158326CE08} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
970+
{1D4AD9A3-19AF-432B-889D-A63FE6D7BD47} = {DA452A53-2E94-4433-B08C-041EDEC729E6}
971+
{14AB0804-D4CE-4634-B544-5A8587620783} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
928972
EndGlobalSection
929973
GlobalSection(ExtensibilityGlobals) = postSolution
930974
SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D}

THIRD-PARTY-NOTICES.TXT

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -171,3 +171,29 @@ distributed under the License is distributed on an "AS IS" BASIS,
171171
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
172172
See the License for the specific language governing permissions and
173173
limitations under the License.
174+
175+
License notice for Torchsharp.PyBridge
176+
------------------------------------------
177+
https://github.com/shaltielshmid/TorchSharp.PyBridge/blob/main/LICENSE
178+
179+
MIT License
180+
181+
Copyright (c) 2023 shaltielshmid
182+
183+
Permission is hereby granted, free of charge, to any person obtaining a copy
184+
of this software and associated documentation files (the "Software"), to deal
185+
in the Software without restriction, including without limitation the rights
186+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
187+
copies of the Software, and to permit persons to whom the Software is
188+
furnished to do so, subject to the following conditions:
189+
190+
The above copyright notice and this permission notice shall be included in all
191+
copies or substantial portions of the Software.
192+
193+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
194+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
195+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
196+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
197+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
198+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
199+
SOFTWARE.

build/ci/job-template.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ jobs:
121121
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
122122
- ${{ if eq(parameters.innerLoop, 'false') }}:
123123
- ${{ if and(eq(parameters.runSpecific, 'false'), eq(parameters.useVSTestTask, 'false')) }}:
124-
- script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\2.2.0.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.102.5\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
124+
- script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\2.2.1.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.102.7\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
125125
displayName: Run All Tests.
126126
- ${{ if and(eq(parameters.runSpecific, 'true'), eq(parameters.useVSTestTask, 'false')) }}:
127127
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:TestRunnerAdditionalArguments='-trait$(spaceValue)Category=RunSpecificTest' /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<OutputType>Exe</OutputType>
5+
<TargetFramework>net8.0</TargetFramework>
6+
<ImplicitUsings>enable</ImplicitUsings>
7+
<Nullable>enable</Nullable>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<ProjectReference Include="..\..\..\src\Microsoft.ML.GenAI.Core\Microsoft.ML.GenAI.Core.csproj" />
12+
<ProjectReference Include="..\..\..\src\Microsoft.ML.GenAI.Phi\Microsoft.ML.GenAI.Phi.csproj" />
13+
</ItemGroup>
14+
15+
<ItemGroup>
16+
<PackageReference Include="TorchSharp-cuda-windows" Version="0.102.5" Condition="$([MSBuild]::IsOSPlatform('Windows'))" />
17+
<PackageReference Include="Microsoft.SemanticKernel" Version="$(SemanticKernelVersion)" />
18+
</ItemGroup>
19+
20+
</Project>
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
using AutoGen.Core;
7+
using Microsoft.ML.GenAI.Phi;
8+
using static TorchSharp.torch;
9+
using TorchSharp;
10+
using Microsoft.ML.GenAI.Core;
11+
using Microsoft.ML.GenAI.Core.Extension;
12+
13+
namespace Microsoft.ML.GenAI.Samples.Phi3Mini;
14+
15+
public class AutoGenSample
16+
{
17+
public static async Task RunAsync()
18+
{
19+
var device = "cuda";
20+
if (device == "cuda")
21+
{
22+
torch.InitializeDeviceType(DeviceType.CUDA);
23+
}
24+
25+
var defaultType = ScalarType.Float16;
26+
torch.manual_seed(1);
27+
torch.set_default_dtype(defaultType);
28+
var weightFolder = @"C:\Users\xiaoyuz\source\repos\Phi-3-mini-4k-instruct";
29+
var pipeline = Utils.LoadPhi3Mini4KFromFolder(weightFolder, device: device);
30+
31+
// agent
32+
var agent = new Phi3Agent(pipeline, "assistant")
33+
.RegisterPrintMessage();
34+
var question = @"write a C# program to calculate the factorial of a number";
35+
36+
// chat with the assistant
37+
await agent.SendAsync(question);
38+
}
39+
}
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
using Microsoft.ML.GenAI.Phi.Extension;
2+
using Microsoft.SemanticKernel;
3+
using Microsoft.SemanticKernel.ChatCompletion;
4+
using TorchSharp;
5+
using static TorchSharp.torch;
6+
7+
namespace Microsoft.ML.GenAI.Samples.Phi3Mini;
8+
9+
public class SemanticKernelSample
10+
{
11+
public static async Task RunChatCompletionSample()
12+
{
13+
var device = "cuda";
14+
if (device == "cuda")
15+
{
16+
torch.InitializeDeviceType(DeviceType.CUDA);
17+
}
18+
19+
var defaultType = ScalarType.Float16;
20+
torch.manual_seed(1);
21+
torch.set_default_dtype(defaultType);
22+
var weightFolder = @"C:\Users\xiaoyuz\source\repos\Phi-3-mini-4k-instruct";
23+
var pipeline = Utils.LoadPhi3Mini4KFromFolder(weightFolder, device: device);
24+
25+
26+
var kernel = Kernel.CreateBuilder()
27+
.AddGenAIChatCompletion(pipeline)
28+
.Build();
29+
var chatService = kernel.GetRequiredService<IChatCompletionService>();
30+
var chatHistory = new ChatHistory();
31+
chatHistory.AddSystemMessage("you are a helpful assistant");
32+
chatHistory.AddUserMessage("write a C# program to calculate the factorial of a number");
33+
34+
await foreach (var response in chatService.GetStreamingChatMessageContentsAsync(chatHistory))
35+
{
36+
Console.Write(response);
37+
}
38+
}
39+
40+
public static async Task RunTextGenerationSample()
41+
{
42+
var device = "cuda";
43+
if (device == "cuda")
44+
{
45+
torch.InitializeDeviceType(DeviceType.CUDA);
46+
}
47+
48+
var defaultType = ScalarType.Float16;
49+
torch.manual_seed(1);
50+
torch.set_default_dtype(defaultType);
51+
var weightFolder = @"C:\Users\xiaoyuz\source\repos\Phi-3-mini-4k-instruct";
52+
var pipeline = Utils.LoadPhi3Mini4KFromFolder(weightFolder, device);
53+
54+
55+
var kernel = Kernel.CreateBuilder()
56+
.AddGenAITextGeneration(pipeline)
57+
.Build();
58+
59+
var response = await kernel.InvokePromptAsync("Tell a joke");
60+
Console.WriteLine(response);
61+
}
62+
}
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
using Microsoft.ML.GenAI.Core;
7+
using Microsoft.ML.GenAI.Phi;
8+
using Tensorboard;
9+
using static TorchSharp.torch;
10+
using TorchSharp;
11+
using Microsoft.ML.GenAI.Core.Extension;
12+
using System.Text.Json;
13+
using Microsoft.ML.Tokenizers;
14+
15+
namespace Microsoft.ML.GenAI.Samples.Phi3Mini;
16+
17+
internal static class Utils
18+
{
19+
public static ICausalLMPipeline<Tokenizer, Phi3ForCasualLM> LoadPhi3Mini4KFromFolder(
20+
string weightFolder,
21+
string configName = "config.json",
22+
string device = "cuda",
23+
int modelSizeOnCudaInGB = 16,
24+
int modelSizeOnMemoryInGB = 64,
25+
int modelSizeOnDiskInGB = 200,
26+
bool quantizeToInt8 = false,
27+
bool quantizeToInt4 = false)
28+
{
29+
Console.WriteLine("Loading Phi3 from huggingface model weight folder");
30+
torch.set_default_device("meta");
31+
var configPath = System.IO.Path.Combine(weightFolder, configName);
32+
var config = JsonSerializer.Deserialize<Phi3Config>(System.IO.File.ReadAllText(configPath)) ?? throw new ArgumentNullException(nameof(configPath));
33+
var timer = System.Diagnostics.Stopwatch.StartNew();
34+
var model = new Phi3ForCasualLM(config);
35+
var tokenzierPath = System.IO.Path.Combine(weightFolder, "tokenizer.model");
36+
var tokenizer = Phi3TokenizerHelper.FromPretrained(tokenzierPath);
37+
38+
if (quantizeToInt8)
39+
{
40+
model.ToInt8QuantizeModule();
41+
}
42+
else if (quantizeToInt4)
43+
{
44+
model.ToInt4QuantizeModule();
45+
}
46+
47+
var deviceSizeMap = new Dictionary<string, long>
48+
{
49+
["cuda"] = modelSizeOnCudaInGB * 1L * 1024 * 1024 * 1024,
50+
["cpu"] = modelSizeOnMemoryInGB * 1L * 1024 * 1024 * 1024,
51+
["disk"] = modelSizeOnDiskInGB * 1L * 1024 * 1024 * 1024,
52+
};
53+
54+
var deviceMap = model.InferDeviceMapForEachLayer(
55+
devices: ["cuda", "cpu", "disk"],
56+
deviceSizeMapInByte: deviceSizeMap);
57+
58+
var deviceMapJson = JsonSerializer.Serialize(deviceMap, new JsonSerializerOptions { WriteIndented = true });
59+
Console.WriteLine($"Device map:");
60+
Console.WriteLine(deviceMapJson);
61+
62+
// load weight
63+
torch.set_default_device("cpu");
64+
65+
Console.WriteLine("Start loading");
66+
timer = System.Diagnostics.Stopwatch.StartNew();
67+
model = new Phi3ForCasualLM(config);
68+
timer.Stop();
69+
Console.WriteLine($"Phi3 model created in {timer.ElapsedMilliseconds / 1000} s");
70+
71+
timer = System.Diagnostics.Stopwatch.StartNew();
72+
model.LoadSafeTensors(weightFolder);
73+
timer.Stop();
74+
Console.WriteLine($"Phi3 weight loaded in {timer.ElapsedMilliseconds / 1000} s");
75+
76+
if (quantizeToInt8 || quantizeToInt4)
77+
{
78+
timer = System.Diagnostics.Stopwatch.StartNew();
79+
Console.WriteLine("Start quantizing if needed");
80+
if (quantizeToInt8)
81+
{
82+
model.ToInt8QuantizeModule();
83+
}
84+
else if (quantizeToInt4)
85+
{
86+
model.ToInt4QuantizeModule();
87+
}
88+
Console.WriteLine("Quantizing done");
89+
timer.Stop();
90+
Console.WriteLine($"Quantizing done in {timer.ElapsedMilliseconds / 1000} s");
91+
}
92+
93+
timer = System.Diagnostics.Stopwatch.StartNew();
94+
Console.WriteLine($"Start loading to device: {device}");
95+
model = model.ToDynamicLoadingModel(deviceMap, "cuda");
96+
timer.Stop();
97+
Console.WriteLine($"Phi3 loaded to device: {device} in {timer.ElapsedMilliseconds / 1000} s");
98+
var pipeline = new CausalLMPipeline<Tokenizer, Phi3ForCasualLM>(tokenizer, model, device);
99+
torch.set_default_device(device);
100+
101+
return pipeline;
102+
}
103+
}
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
// See https://aka.ms/new-console-template for more information
2+
using Microsoft.ML.GenAI.Samples.Phi3Mini;
3+
4+
await SemanticKernelSample.RunChatCompletionSample();

eng/Versions.props

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,18 +63,22 @@
6363
<TensorflowDotNETVersion>0.20.1</TensorflowDotNETVersion>
6464
<TensorFlowMajorVersion>2</TensorFlowMajorVersion>
6565
<TensorFlowVersion>2.3.1</TensorFlowVersion>
66+
<TorchSharpPyBridgeVersion>1.4.1</TorchSharpPyBridgeVersion>
67+
<AutoGenVersion>0.0.15</AutoGenVersion>
68+
<SemanticKernelVersion>1.15.0</SemanticKernelVersion>
6669
<TorchSharpVersion>0.102.7</TorchSharpVersion>
6770
<LibTorchVersion>2.2.1.1</LibTorchVersion>
6871
<!-- Build/infrastructure Dependencies -->
6972
<CodecovVersion>1.12.4</CodecovVersion>
70-
<CoverletCollectorVersion>3.1.2</CoverletCollectorVersion>
71-
<CoverletMsbuildVersion>3.1.2</CoverletMsbuildVersion>
73+
<CoverletCollectorVersion>6.0.2</CoverletCollectorVersion>
74+
<CoverletMsbuildVersion>6.0.0</CoverletMsbuildVersion>
7275
<MicrosoftCodeAnalysisCSharpInternalAnalyzerVersion>3.3.1</MicrosoftCodeAnalysisCSharpInternalAnalyzerVersion>
7376
<MicrosoftCSharpVersion>4.5.0</MicrosoftCSharpVersion>
7477
<ReportGeneratorVersion>4.3.6</ReportGeneratorVersion>
7578
<SystemCompositionVersion>1.2.0</SystemCompositionVersion>
7679
<!-- Test-only Dependencies -->
7780
<ApprovalTestsVersion>5.4.7</ApprovalTestsVersion>
81+
<MoqVersion>4.20.70</MoqVersion>
7882
<BenchmarkDotNetVersion>0.13.1</BenchmarkDotNetVersion>
7983
<DotNetRuntime60Version>6.0.26</DotNetRuntime60Version>
8084
<DotNetRuntime80Version>8.0.1</DotNetRuntime80Version>

0 commit comments

Comments
 (0)