|
|
|
@ -20,6 +20,7 @@ namespace OllamaStudy.UseExtensionsAI
|
|
|
|
|
private readonly HttpClient _uiUiAPIHttpClient;
|
|
|
|
|
private readonly HttpClient _bailianHttpClient;
|
|
|
|
|
private readonly HttpClient _zipuHttpClient;
|
|
|
|
|
private readonly HttpClient _siliconflowHttpClient;
|
|
|
|
|
|
|
|
|
|
public OpenAIAPITest
|
|
|
|
|
(
|
|
|
|
@ -38,37 +39,51 @@ namespace OllamaStudy.UseExtensionsAI
|
|
|
|
|
_uiUiAPIHttpClient = _httpClientFactory.CreateClient("UiUiAPIHttpClient");
|
|
|
|
|
_bailianHttpClient = _httpClientFactory.CreateClient("BailianHttpClient");
|
|
|
|
|
_zipuHttpClient = _httpClientFactory.CreateClient("ZiPuHttpClient");
|
|
|
|
|
_siliconflowHttpClient = _httpClientFactory.CreateClient("SiliconflowHttpClient");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#region 各种业务Client
|
|
|
|
|
#region 获取各种 HttpClient
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 从OpenAIClient获取各种业务 Client
|
|
|
|
|
/// </summary>
|
|
|
|
|
[Fact]
|
|
|
|
|
public void GetClients_Test()
|
|
|
|
|
public void Get_Clients_Test()
|
|
|
|
|
{
|
|
|
|
|
Assert.NotNull(_defaultHttpClient);
|
|
|
|
|
Assert.NotNull(_ollamaHttpClient);
|
|
|
|
|
Assert.NotNull(_uiUiAPIHttpClient);
|
|
|
|
|
Assert.NotNull(_bailianHttpClient);
|
|
|
|
|
Assert.NotNull(_zipuHttpClient);
|
|
|
|
|
Assert.NotNull(_siliconflowHttpClient);
|
|
|
|
|
}
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region 音频
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 创建语音
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Audio_Test()
|
|
|
|
|
public async Task Audio_CreateSpeech_Test()
|
|
|
|
|
{
|
|
|
|
|
var requetData = new
|
|
|
|
|
{
|
|
|
|
|
//语音模型
|
|
|
|
|
model = "gpt-4o-mini-tts",
|
|
|
|
|
model = "tts-1-1106",
|
|
|
|
|
|
|
|
|
|
//要生成音频的文本。最大长度为4096个字符。
|
|
|
|
|
input = "你好,上海今天的天气非常好,很适合户外游玩!",
|
|
|
|
|
input = """
|
|
|
|
|
断章
|
|
|
|
|
作者:卞之琳
|
|
|
|
|
你站在桥上看风景,
|
|
|
|
|
看风景人在楼上看你。
|
|
|
|
|
明月装饰了你的窗子,
|
|
|
|
|
你装饰了别人的梦。
|
|
|
|
|
|
|
|
|
|
""",
|
|
|
|
|
|
|
|
|
|
//生成音频时使用的语音。支持的语音有:alloy、echo、fable、onyx、nova 和 shimmer。
|
|
|
|
|
voice = "alloy",
|
|
|
|
|
voice = "nova",
|
|
|
|
|
|
|
|
|
|
//默认为 mp3 音频的格式。支持的格式有:mp3、opus、aac 和 flac。
|
|
|
|
|
response_format = "mp3",
|
|
|
|
@ -77,7 +92,7 @@ namespace OllamaStudy.UseExtensionsAI
|
|
|
|
|
speed = 1.0f,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "https://sg.uiuiapi.com/v1/audio/speech")
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "v1/audio/speech")
|
|
|
|
|
{
|
|
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
|
|
};
|
|
|
|
@ -86,17 +101,280 @@ namespace OllamaStudy.UseExtensionsAI
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
//var responseText = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadAsByteArrayAsync();
|
|
|
|
|
|
|
|
|
|
using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.mp3");
|
|
|
|
|
stream.Write(responseObject);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 创建转录
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Audio_CreateTranscriptions_Test()
|
|
|
|
|
{
|
|
|
|
|
var formData = new MultipartFormDataContent();
|
|
|
|
|
|
|
|
|
|
// 读取文件流
|
|
|
|
|
using var fileStream = File.OpenRead("Assets/dongdong.mp3");
|
|
|
|
|
|
|
|
|
|
// 创建文件内容并添加到表单
|
|
|
|
|
var fileContent = new StreamContent(fileStream);
|
|
|
|
|
|
|
|
|
|
//(必须) 要转录的音频文件对象(不是文件名),格式为:flac、mp3、mp4、mpeg、mpga、m4a、ogg、wav 或 webm
|
|
|
|
|
formData.Add(fileContent, "file","dongdong.mp3");
|
|
|
|
|
|
|
|
|
|
//(必须) 要使用的模型 ID。目前只有 whisper-1 是可用的
|
|
|
|
|
formData.Add(new StringContent("whisper-1"), "model");
|
|
|
|
|
|
|
|
|
|
//(可选) 输入音频的语言。以 ISO-639-1 格式提供输入语言可以提高准确性和延迟
|
|
|
|
|
formData.Add(new StringContent("zh"), "language");
|
|
|
|
|
|
|
|
|
|
//(可选) 一个可选的文本来指导模型的风格或继续之前的音频段落。提示应该与音频语言匹配。
|
|
|
|
|
formData.Add(new StringContent("请使用严肃风格"), "prompt");
|
|
|
|
|
|
|
|
|
|
//(可选) 转录输出的格式,默认为 json,可选择:json、text、srt、verbose_json 或 vtt。
|
|
|
|
|
formData.Add(new StringContent("json"), "response_format");
|
|
|
|
|
|
|
|
|
|
//(可选) 采样温度,between 0 和 1。更高的值像 0.8 会使输出更随机,而更低的值像 0.2 会使其更集中和确定性。如果设置为 0,模型将使用对数概率自动增加温度直到达到特定阈值。
|
|
|
|
|
//默认为 0
|
|
|
|
|
formData.Add(new StringContent("0"), "temperature");
|
|
|
|
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "v1/audio/transcriptions")
|
|
|
|
|
{
|
|
|
|
|
Content = formData,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
var responseMessage = await _uiUiAPIHttpClient.SendAsync(requestMessage);
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
var responseText = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
_output.WriteLine(responseText);
|
|
|
|
|
|
|
|
|
|
Assert.NotNull(responseText);
|
|
|
|
|
Assert.Contains("杀戮", responseText);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 创建翻译
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Audio_CreateTranslations_Test()
|
|
|
|
|
{
|
|
|
|
|
var formData = new MultipartFormDataContent();
|
|
|
|
|
|
|
|
|
|
// 读取文件流
|
|
|
|
|
using var fileStream = File.OpenRead("Assets/dongdong.mp3");
|
|
|
|
|
|
|
|
|
|
// 创建文件内容并添加到表单
|
|
|
|
|
var fileContent = new StreamContent(fileStream);
|
|
|
|
|
|
|
|
|
|
//(必须) 要转录的音频文件对象(不是文件名),格式为:flac、mp3、mp4、mpeg、mpga、m4a、ogg、wav 或 webm
|
|
|
|
|
formData.Add(fileContent, "file", "dongdong.mp3");
|
|
|
|
|
|
|
|
|
|
//(必须) 要使用的模型 ID。目前只有 whisper-1 是可用的
|
|
|
|
|
formData.Add(new StringContent("whisper-1"), "model");
|
|
|
|
|
|
|
|
|
|
//(可选) 一个可选的文本,用于指导模型的风格或继续之前的音频段落。提示文本应该是英文。
|
|
|
|
|
formData.Add(new StringContent("Please use a serious style"), "prompt");
|
|
|
|
|
|
|
|
|
|
//(可选) 转录输出的格式,默认为 json,可选择:json、text、srt、verbose_json 或 vtt。
|
|
|
|
|
formData.Add(new StringContent("json"), "response_format");
|
|
|
|
|
|
|
|
|
|
//(可选) 采样温度,between 0 和 1。更高的值像 0.8 会使输出更随机,而更低的值像 0.2 会使其更集中和确定性。如果设置为 0,模型将使用对数概率自动增加温度直到达到特定阈值。
|
|
|
|
|
//默认为 0
|
|
|
|
|
formData.Add(new StringContent("0"), "temperature");
|
|
|
|
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/v1/audio/translations")
|
|
|
|
|
{
|
|
|
|
|
Content = formData,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
var responseMessage = await _uiUiAPIHttpClient.SendAsync(requestMessage);
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
var responseText = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
_output.WriteLine(responseText);
|
|
|
|
|
|
|
|
|
|
Assert.NotNull(responseText);
|
|
|
|
|
Assert.Contains("killed", responseText);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region 聊天
|
|
|
|
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region 自动补全
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 流式生成请求 测试
|
|
|
|
|
/// </summary>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Completion_Request_Streaming_Test()
|
|
|
|
|
{
|
|
|
|
|
var requetData = new
|
|
|
|
|
{
|
|
|
|
|
model = _ollamaOptionsMonitor.CurrentValue.Model,
|
|
|
|
|
prompt = """天空为什么是蓝色的?""",
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/v1/completions")
|
|
|
|
|
{
|
|
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
|
|
var response = await _ollamaHttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
|
|
while (!reader.EndOfStream)
|
|
|
|
|
{
|
|
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
|
|
_output.WriteLine(line);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 非流式生成请求 测试
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Completion_Request_NoStreaming_Test()
|
|
|
|
|
{
|
|
|
|
|
var requetData = new
|
|
|
|
|
{
|
|
|
|
|
model = _ollamaOptionsMonitor.CurrentValue.Model,
|
|
|
|
|
prompt = """天空为什么是蓝色的?""",
|
|
|
|
|
|
|
|
|
|
//流式处理响应: 设置为 false 禁用流式处理
|
|
|
|
|
stream = false,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/v1/completions")
|
|
|
|
|
{
|
|
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
var responseMessage = await _ollamaHttpClient.SendAsync(requestMessage);
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
var responseText = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
|
|
|
|
Assert.NotNull(responseText);
|
|
|
|
|
_output.WriteLine(responseText);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 全参数生成请求 测试
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
[Fact]
|
|
|
|
|
public async Task Completion_Request_FullParameters_Test()
|
|
|
|
|
{
|
|
|
|
|
var requetData = new
|
|
|
|
|
{
|
|
|
|
|
//(必需)模型名(标识):model:tag格式,model可具有可选的命名空间,tag是可选的,用于标识模型,默认为 latest
|
|
|
|
|
model = ModelSelecter.ModelWithSuffixAndImage,
|
|
|
|
|
|
|
|
|
|
//(必需)生成响应的提示词:关键
|
|
|
|
|
prompt = """ 天空为什么是蓝色的?""",
|
|
|
|
|
|
|
|
|
|
//(可选)默认为1 在服务器端生成best_of个补全,并返回“最佳”补全(每个令牌的日志概率最高的那个)。
|
|
|
|
|
//无法流式传输结果。 与n一起使用时,best_of控制候选补全的数量,n指定要返回的数量 – best_of必须大于n。
|
|
|
|
|
//注意:因为这个参数会生成许多补全,所以它可以快速消耗您的令牌配额。请谨慎使用,并确保您对max_tokens和stop有合理的设置。
|
|
|
|
|
best_of = 1,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为false 除了补全之外,还回显提示
|
|
|
|
|
echo = false,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为0 -2.0和2.0之间的数字。正值根据文本目前的现有频率处罚新令牌,降低模型逐字重复相同行的可能性。
|
|
|
|
|
frequency_penalty = 0f,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为null 修改完成中指定令牌出现的可能性。
|
|
|
|
|
//接受一个JSON对象,该对象将令牌(由GPT令牌化器中的令牌ID指定)映射到关联偏差值,-100到100。
|
|
|
|
|
//您可以使用这个令牌化器工具(适用于GPT-2和GPT-3)将文本转换为令牌ID。从数学上讲,偏差在对模型进行采样之前添加到生成的logit中。
|
|
|
|
|
//确切效果因模型而异,但-1至1之间的值应降低或提高选择的可能性;像-100或100这样的值应导致相关令牌的禁用或专属选择。
|
|
|
|
|
//例如,您可以传递{"50256": -100}来防止生成<|endoftext|>令牌。
|
|
|
|
|
logit_bias = default(object),
|
|
|
|
|
|
|
|
|
|
//(可选)默认为null
|
|
|
|
|
//包括logprobs个最可能令牌的日志概率,以及所选令牌。例如,
|
|
|
|
|
//如果logprobs为5,API将返回5个最有可能令牌的列表。 API总会返回采样令牌的logprob,因此响应中最多可能有logprobs + 1个元素。logprobs的最大值是5。
|
|
|
|
|
logprobs = default(object),
|
|
|
|
|
|
|
|
|
|
//(可选)默认为16
|
|
|
|
|
//在补全中生成的最大令牌数。提示的令牌计数加上max_tokens不能超过模型的上下文长度。 计数令牌的Python代码示例。
|
|
|
|
|
max_tokens = 16,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为1
|
|
|
|
|
//为每个提示生成的补全数量。注意:因为这个参数会生成许多补全,所以它可以快速消耗您的令牌配额。
|
|
|
|
|
//请谨慎使用,并确保您对max_tokens和stop有合理的设置。
|
|
|
|
|
n =1,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为0
|
|
|
|
|
//-2.0和2.0之间的数字。正值根据它们是否出现在目前的文本中来惩罚新令牌,增加模型讨论新话题的可能性。 有关频率和存在惩罚的更多信息,请参阅。
|
|
|
|
|
presence_penalty = 0f,
|
|
|
|
|
|
|
|
|
|
//(可选)
|
|
|
|
|
//如果指定,我们的系统将尽最大努力确定性地进行采样,以便使用相同的种子和参数的重复请求应返回相同的结果。
|
|
|
|
|
//不保证确定性,您应该参考system_fingerprint响应参数来监视后端的更改。
|
|
|
|
|
seed = 654321,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为null
|
|
|
|
|
//最多4个序列,API将停止在其中生成更多令牌。返回的文本不会包含停止序列。
|
|
|
|
|
stop = default(string),
|
|
|
|
|
|
|
|
|
|
//(可选)流式处理响应: 可通过设置 false 来禁用流式处理
|
|
|
|
|
stream = true,
|
|
|
|
|
|
|
|
|
|
//(可选)默认为null
|
|
|
|
|
//在插入文本的补全之后出现的后缀。
|
|
|
|
|
//模型响应后的文本:只有专用模型才支持(qwen2.5-coder:3b等), 模型不支持则异常
|
|
|
|
|
//suffix = " return result",
|
|
|
|
|
|
|
|
|
|
//(可选)默认为1 要使用的采样温度,介于0和2之间。更高的值(如0.8)将使输出更随机,而更低的值(如0.2)将使其更集中和确定。
|
|
|
|
|
//我们通常建议更改这个或top_p,而不是两者都更改。
|
|
|
|
|
temperature = 1f,
|
|
|
|
|
|
|
|
|
|
//表示最终用户的唯一标识符,这可以帮助OpenAI监控和检测滥用。 了解更多。
|
|
|
|
|
user = "mynameisok",
|
|
|
|
|
|
|
|
|
|
//较高的值(例如 0.95)将导致文本更加多样化,而较低的值(例如 0.5)将生成更集中和保守的文本。(默认值:0.9)
|
|
|
|
|
top_p = 0.9f,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/v1/completions")
|
|
|
|
|
{
|
|
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
|
|
var response = await _ollamaHttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
|
|
while (!reader.EndOfStream)
|
|
|
|
|
{
|
|
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
|
|
_output.WriteLine(line);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
#region 嵌入
|
|
|
|
@ -117,8 +395,51 @@ namespace OllamaStudy.UseExtensionsAI
|
|
|
|
|
/// 列出模型 测试
|
|
|
|
|
/// </summary>
|
|
|
|
|
[Fact]
|
|
|
|
|
public void List_Models_Test()
|
|
|
|
|
public async Task List_Models_Test()
|
|
|
|
|
{
|
|
|
|
|
var responseMessage = await _ollamaHttpClient.GetAsync("/v1/models");
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
var resultObj = new
|
|
|
|
|
{
|
|
|
|
|
@object = "list",
|
|
|
|
|
data = new[] { new { id="model_id", @object = "model", created = 1754931997, owned_by = "library" } }
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
|
|
|
|
var modelObj = Newtonsoft.Json.JsonConvert.DeserializeAnonymousType(responseObject,resultObj);
|
|
|
|
|
|
|
|
|
|
Assert.NotNull(modelObj);
|
|
|
|
|
|
|
|
|
|
foreach (var model in modelObj.data)
|
|
|
|
|
{
|
|
|
|
|
_output.WriteLine($"模型ID:{model.id}, 模型名称:{model.@object}, 创建:{model.created}, 创建者:{model.owned_by}");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// 检索模型 测试
|
|
|
|
|
/// </summary>
|
|
|
|
|
[Theory]
|
|
|
|
|
[InlineData(ModelSelecter.ModelWithTool)]
|
|
|
|
|
public async Task Query_Model_Test(string modelName)
|
|
|
|
|
{
|
|
|
|
|
var responseMessage = await _ollamaHttpClient.GetAsync($"/v1/models/{modelName}");
|
|
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
|
|
|
|
var resultObj = new { id = "model_id", @object = "model", created = 1754931997, owned_by = "library" };
|
|
|
|
|
|
|
|
|
|
//处理响应
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
|
|
|
|
var model = Newtonsoft.Json.JsonConvert.DeserializeAnonymousType(responseObject, resultObj);
|
|
|
|
|
|
|
|
|
|
Assert.NotNull(model);
|
|
|
|
|
|
|
|
|
|
_output.WriteLine($"模型ID:{model.id}, 模型名称:{model.@object}, 创建:{model.created}, 创建者:{model.owned_by}");
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
#endregion
|
|
|
|
|