
.NET Core使用Semantic Kernel:接入本地deepseek-r1
安装Ollama
1.下载Ollama
-
访问Ollama官网(https://ollama.com),选择适合你操作系统的版本进行下载。
-
对于Linux用户,可以通过以下命令安装:
curl -fsSL https://ollama.com/install.sh | sudo bash
sudo systemctl start ollama
2.验证安装
-
安装完成后,在终端输入
ollama -v
,若显示版本号(如ollama version is 0.5.7
),则说明安装成功。 -
你还可以在浏览器中访问
http://localhost:11434/
,确认服务已启动。
部署DeepSeek模型
- 选择模型版本
-
访问Ollama官网的模型库(https://ollama.com/library/deepseek-r1),选择适合你硬件配置的DeepSeek模型版本。例如:
- `deepseek-r1:1.5b`:适合低端设备。
- `deepseek-r1:7b`:适合中端设备。
- `deepseek-r1:14b`:适合高端设备。
- `deepseek-r1:32b`:适合科研级设备。
- 下载并运行模型
ollama run deepseek-r1:1.5b
- 验证模型
curl
http://localhost:11434
/api/chat -d '{"model": "deepseek-r1:1.5b
", "messages": [{ "role": "user", "content": "你好" }]}'
安装 SK 及 ollama connector
首先在本地创建一个 Console 项目,然后安装以下包:
dotnet add package Microsoft.SemanticKernel --version 1.21.1
dotnet add package Microsoft.SemanticKernel.Connectors.Ollama --version 1.21.1-alpha
配置 Ollama 服务
接下来,我们需要配置 Ollama 服务的端点和模型 ID ,并添加 Ollama 的聊天服务:
var endpoint = new Uri("http://localhost:11434");
var modelId = "deepseek-r1:1.5b";
var builder = Kernel.CreateBuilder();
#pragma warning disable SKEXP0070
builder.Services.AddScoped<IChatCompletionService>(_ => new CustomChatCompletionService(modelId, endpoint));
/// <summary>
/// 搜索结果
/// </summary>
public class SearchResult
{
public string FunctionName { get; set; }
public bool SearchFunctionNameSucc { get; set; }
public KernelArguments FunctionParams { get; set; } = new KernelArguments();
public KernelFunction KernelFunction { get; set; }
}
自定义聊天服务
/// <summary>
/// 自定义聊天服务
/// </summary>
public class CustomChatCompletionService : IChatCompletionService
{
public static Dictionary<string, SearchResult> DicSearchResult = new Dictionary<string, SearchResult>();
/// <summary>
/// 使用SemanticKernel的kernel的函数元数据构建字典
/// </summary>
/// <param name="kernel"></param>
public static void GetDicSearchResult(Kernel kernel)
{
DicSearchResult = new Dictionary<string, SearchResult>();
foreach (var functionMetaData in kernel.Plugins.GetFunctionsMetadata())
{
string functionName = functionMetaData.Name;
if (DicSearchResult.ContainsKey(functionName))
continue;
var searchResult = new SearchResult
{
FunctionName = functionName,
KernelFunction = kernel.Plugins.GetFunction(null, functionName)
};
functionMetaData.Parameters.ToList().ForEach(x => searchResult.FunctionParams.Add(x.Name, null));
DicSearchResult.Add(functionName, searchResult);
}
}
private Chat chat = null;
public IReadOnlyDictionary<string, object?> Attributes => throw new NotImplementedException();
/// <summary>
/// 初始化
/// </summary>
/// <param name="modelId"></param>
/// <param name="endpoint"></param>
public CustomChatCompletionService(string modelId, Uri endpoint)
{
chat = new(new OllamaApiClient(endpoint, modelId));
}
/// <summary>
/// 获取聊天消息内容同步
/// </summary>
/// <param name="chatHistory"></param>
/// <param name="executionSettings"></param>
/// <param name="kernel"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default)
{
GetDicSearchResult(kernel);
var prompt = HistoryToText(chatHistory);
StringBuilder history = new();
await foreach (var item in chat.SendAsync(prompt, CancellationToken.None))
{
history.Append(item);
}
var result = history.ToString();
try
{
var chatResponse = result;
JToken jToken = JToken.Parse(chatResponse);
jToken = ConvertStringToJson(jToken);
var searchs = DicSearchResult.Values.ToList();
if (TryFindValues(jToken, ref searchs))
{
var func = searchs.First();
var funcCallResult = await func.KernelFunction.InvokeAsync(kernel, func.FunctionParams);
chatHistory.AddMessage(AuthorRole.Assistant, chatResponse);
chatHistory.AddMessage(AuthorRole.Tool, funcCallResult.ToString());
return await GetChatMessageContentsAsync(chatHistory, kernel: kernel);
}
}
catch (Exception e)
{
}
return new List<ChatMessageContent> { new ChatMessageContent(AuthorRole.Assistant, result) };
}
/// <summary>
/// 获取流媒体聊天消息内容同步
/// </summary>
/// <param name="chatHistory"></param>
/// <param name="executionSettings"></param>
/// <param name="kernel"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = new CancellationToken())
{
throw new NotImplementedException();
}
/// <summary>
/// 历史对话信息转文本
/// </summary>
/// <param name="history"></param>
/// <returns></returns>
private string HistoryToText(ChatHistory history)
{
StringBuilder sb = new();
foreach (var message in history)
{
if (message.Role == AuthorRole.User)
{
sb.AppendLine($"User: {message.Content}");
}
else if (message.Role == AuthorRole.System)
{
sb.AppendLine($"System: {message.Content}");
}
else if (message.Role == AuthorRole.Assistant)
{
sb.AppendLine($"Assistant: {message.Content}");
}
else if (message.Role == AuthorRole.Tool)
{
sb.AppendLine($"Tool: {message.Content}");
}
}
return sb.ToString();
}
/// <summary>
/// 将字符串转换为Json
/// </summary>
/// <param name="token"></param>
/// <returns></returns>
private JToken ConvertStringToJson(JToken token)
{
if (token.Type == JTokenType.Object)
{
// 遍历对象的每个属性
JObject obj = new JObject();
foreach (JProperty prop in token.Children<JProperty>())
{
obj.Add(prop.Name, ConvertStringToJson(prop.Value));
}
return obj;
}
else if (token.Type == JTokenType.Array)
{
// 遍历数组的每个元素
JArray array = new JArray();
foreach (JToken item in token.Children())
{
array.Add(ConvertStringToJson(item));
}
return array;
}
else if (token.Type == JTokenType.String)
{
// 尝试将字符串解析为 JSON
string value = token.ToString();
try
{
return JToken.Parse(value);
}
catch (Exception)
{
// 解析失败时返回原始字符串
return token;
}
}
else
{
// 其他类型直接返回
return token;
}
}
/// <summary>
/// 通过函数从模型中抽取我们所需要的回调函数名、参数,并赋值到一个临时变量中
/// </summary>
/// <param name="token"></param>
/// <param name="searches"></param>
/// <returns></returns>
private bool TryFindValues(JToken token, ref List<SearchResult> searches)
{
if (token.Type == JTokenType.Object)
{
foreach (var child in token.Children<JProperty>())
{
foreach (var search in searches)
{
if (child.Value.ToString().ToLower().Equals(search.FunctionName.ToLower()) && search.SearchFunctionNameSucc != true)
search.SearchFunctionNameSucc = true;
foreach (var par in search.FunctionParams)
{
if (child.Name.ToLower().Equals(par.Key.ToLower()) && par.Value == null)
search.FunctionParams[par.Key] = child.Value.ToString().ToLower();
}
}
if (searches.Any(x => x.SearchFunctionNameSucc == false || x.FunctionParams.Any(x => x.Value == null)))
TryFindValues(child.Value, ref searches);
}
}
else if (token.Type == JTokenType.Array)
{
foreach (var item in token.Children())
{
if (searches.Any(x => x.SearchFunctionNameSucc == false || x.FunctionParams.Any(x => x.Value == null)))
TryFindValues(item, ref searches);
}
}
return searches.Any(x => x.SearchFunctionNameSucc && x.FunctionParams.All(x => x.Value != null));
}
}
获取聊天服务
var chatService = kernel.GetRequiredService<IChatCompletionService>();
var history = new ChatHistory();
history.AddSystemMessage("你是一个乐于助人的助手。");
聊天循环
最后,我们实现一个简单的聊天循环,读取用户输入并获取 Ollama 的回复:
while (true)
{
Console.Write("You:");
var input = Console.ReadLine();
if (string.IsNullOrWhiteSpace(input))
{
break;
}
history.AddUserMessage(input);
var contents = await chatService.GetChatMessageContentsAsync(history);
foreach (var chatMessageContent in contents)
{
var content = chatMessageContent.Content;
Console.WriteLine($"Ollama: {content}");
history.AddMessage(chatMessageContent.Role, content ?? "");
}
}
更多推荐
所有评论(0)