diff --git a/SemanticKernelStudy.Test/GlobalUsing.cs b/SemanticKernelStudy.Test/GlobalUsing.cs index 9a3082f..fbe5d57 100644 --- a/SemanticKernelStudy.Test/GlobalUsing.cs +++ b/SemanticKernelStudy.Test/GlobalUsing.cs @@ -22,6 +22,8 @@ global using Microsoft.Extensions.Options; global using Microsoft.Extensions.DependencyInjection; global using Microsoft.Extensions.Hosting; +global using Microsoft.Extensions.AI; + global using Microsoft.SemanticKernel; global using Microsoft.SemanticKernel.AudioToText; global using Microsoft.SemanticKernel.ChatCompletion; @@ -46,3 +48,5 @@ global using Microsoft.SemanticKernel.Agents.Extensions; global using Microsoft.SemanticKernel.Agents.Chat; global using Microsoft.SemanticKernel.Agents.Serialization; global using Microsoft.SemanticKernel.Agents.OpenAI; + +global using Xunit.Abstractions; diff --git a/SemanticKernelStudy.Test/KernelTest.cs b/SemanticKernelStudy.Test/KernelTest.cs new file mode 100644 index 0000000..4ea7f03 --- /dev/null +++ b/SemanticKernelStudy.Test/KernelTest.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +using Microsoft.Extensions.AI; + +namespace SemanticKernelStudy.Test +{ + public class SemanticKernelTest + { + private readonly ITestOutputHelper _output; + public SemanticKernelTest(ITestOutputHelper output) + { + _output = output; + } + + [Fact] + public async Task Use_SemanticKernel_Test() + { + var kernelBuilder = Kernel.CreateBuilder(); + + //添加AI服务 + kernelBuilder.AddOllamaChatClient("qwen3:0.6b", new Uri("http://localhost:11434/v1"), "ollamaService"); + + kernelBuilder.Plugins.AddFromType(); + + //添加企业级服务(日志、过滤器等) + kernelBuilder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Trace)); + + Kernel kernel = kernelBuilder.Build(); + + FunctionResult d = await kernel.InvokePromptAsync("你好"); + + ChatResponse? dd = d.GetValue(); + + var t = dd?.Messages.First().Text; + + _output.WriteLine(t); + } + } +} diff --git a/SemanticKernelStudy.Test/UseXunit.cs b/SemanticKernelStudy.Test/UseXunit.cs index 2d275a1..9d6b873 100644 --- a/SemanticKernelStudy.Test/UseXunit.cs +++ b/SemanticKernelStudy.Test/UseXunit.cs @@ -1,37 +1,20 @@ -using System.Threading.Tasks; +namespace SemanticKernelStudy.Test; -using Microsoft.Extensions.AI; - -namespace SemanticKernelStudy.Test +public class UseXunit { - public class UseXunit + private readonly ITestOutputHelper _output; + public UseXunit(ITestOutputHelper output) { - [Fact] - public void Test1() - { - Assert.True(true,"使用 xUnit 2 框架!"); - } - - [Fact] - public async Task Test2() - { - var kernelBuilder = Kernel.CreateBuilder(); - - //添加AI服务 - kernelBuilder.AddOllamaChatClient("qwen3:0.6b", new Uri("http://localhost:11434/v1"), "ollamaService"); - - kernelBuilder.Plugins.AddFromType(); - - //添加企业级服务(日志、过滤器等) - kernelBuilder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Trace)); - - Kernel kernel = kernelBuilder.Build(); + _output = output; + } - FunctionResult d = await kernel.InvokePromptAsync("你好"); + [Fact] + public void UseXunitFramwork_Test() + { + var msg = "使用 xUnit 2 单元测试框架!"; - ChatResponse? dd = d.GetValue(); + _output.WriteLine(msg); - var t = dd?.Messages.First().Text; - } + Assert.True(true,msg); } } diff --git a/SemanticKernelStudy.lutconfig b/SemanticKernelStudy.lutconfig new file mode 100644 index 0000000..596a860 --- /dev/null +++ b/SemanticKernelStudy.lutconfig @@ -0,0 +1,6 @@ + + + true + true + 180000 + \ No newline at end of file