• 列出模型: 获取 DeepSeek 提供的所有可用模型。 • 对话补全(包含流式): 实现与 DeepSeek 的对话交互,支持实时流式响应。 • FIM实例(包含流式): 支持 Fine-tuned Inference Model (FIM) 实例的调用,同样支持流式响应。 • 查询余额: 检查账户中的余额,确保有足够的资金进行 API 调用。 • 支持调用本地模型: 可以通过自定义 HttpClient 调用本地部署的 DeepSeek 模型。 • 对 ASP.NET Core 的集成支持: 提供了与 ASP.NET Core 的无缝集成,方便在 Web 应用中使用。
dotnet add package Ater.DeepSeek.AspNetCore
DeepSeekClient
using DeepSeek.AspNetCore;
using DeepSeek.Core;
using DeepSeek.Core.Models;
using Microsoft.AspNetCore.Mvc;
var builder = WebApplication.CreateBuilder(args);
var apiKey = builder.Configuration["DeepSeekApiKey"];
builder.Services.AddDeepSeek(option =>
{
option.BaseAddress = new Uri("https://api.deepseek.com");
option.Timeout = TimeSpan.FromSeconds(300);
option.DefaultRequestHeaders.TryAddWithoutValidation("Authorization", "Bearer " + apiKey);
});
var app = builder.Build();
app.MapGet("/test", async ([FromServices] DeepSeekClient client) =>
{
var res = await client.ChatAsync(new ChatRequest
{
Messages = new List<Message>
{
Message.NewUserMessage("Why dotnet is good?")
},
MaxTokens = 200
}, CancellationToken.None);
return res?.Choices.First().Message?.Content;
});
app.Run();
流式返回
app.MapGet("/chat", async (HttpContext context, [FromServices] DeepSeekClient client, CancellationToken token) =>
{
context.Response.ContentType = "text/plain;charset=utf-8";
try
{
var choices = client.ChatStreamAsync(new ChatRequest
{
Messages = new List<Message>
{
Message.NewUserMessage("Why dotnet is good?")
},
MaxTokens = 200
}, token);
if (choices != null)
{
awaitforeach (var choice in choices)
{
await context.Response.WriteAsync(choice.Delta!.Content);
}
}
}
catch (Exception ex)
{
await context.Response.WriteAsync("暂时无法提供服务" + ex.Message);
}
await context.Response.CompleteAsync();
});
自定义ds
dotnet add package Ater.DeepSeek.Core
var client = new DeepSeekClient(apiKey);
var httpClient = new HttpClient();
var client = new DeepSeekClient(httpClient, apiKey);
列出模型
var modelResponse = await client.ListModelsAsync(CancellationToken.None);
if (modelResponse != null)
{
foreach (var model in modelResponse.Data)
{
Console.WriteLine(model);
}
}
else
{
Console.WriteLine(client.ErrorMsg);
}
获取对话
var request = new ChatRequest
{
Messages = [
Message.NewSystemMessage("你是一个语言翻译家"),
Message.NewUserMessage("请翻译'它们害怕极了!'为英语!")
],
Model = Constant.Model.ChatModel
};
var chatResponse = await client.ChatAsync(request, CancellationToken.None);
Console.WriteLine(chatResponse?.Choices.First().Message?.Content);
获取对话(流式)
var choices = client.ChatStreamAsync(request, CancellationToken.None);
await foreach (var choice in choices ?? Enumerable.Empty<Choice>())
{
Console.Write(choice.Delta?.Content);
}
Console.WriteLine();
调用本地模型
如果需要调用本地部署的模型,可以自定义 HttpClient 并设置 BaseAddress 为本地地址。
var httpClient = new HttpClient
{
BaseAddress = new Uri("http://localhost:5000"),
Timeout = TimeSpan.FromSeconds(300),
};
var localClient = new DeepSeekClient(httpClient);
localClient.SetChatEndpoint("/chat");
localClient.SetCompletionEndpoint("/completions");
var res = await localClient.ChatAsync(new ChatRequest
{
Messages = new List<Message>
{
Message.NewUserMessage("hello")
}
}, CancellationToken.None);
Console.WriteLine(res?.Choices.First().Message?.Content);