Microsoft.Extensions.AI 是一个强大的 .NET AI 集成库,专注于为开发者提供丰富的功能,以便在 .NET 应用程序中轻松实现 AI 功能的集成。通过该库,用户可以方便地扩展和增强应用程序的智能化能力。本指南将深入介绍其主要特性及具体的使用方法,帮助开发者快速上手并充分利用 Ollama 提供的强大工具集。
在开始之前,请确保满足以下条件:
PowerShellNuGet\Install-Package [Microsoft.Extensions.AI](http://Microsoft.Extensions.AI) -Version 9.3.0-preview.1.25114.11 NuGet\Install-Package Microsoft.Extensions.AI.Ollama -Version 9.3.0-preview.1.25114.11
首先,使用 Ollama 下载必要的模型:
Bashollama pull deepseek-r1:1.5b # 聊天模型
ollama pull all-minilm:latest # 嵌入模型
C#using Microsoft.Extensions.AI;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var endpoint = "http://localhost:11434/";
var modelId = "deepseek-r1:1.5b";
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId);
var result = await client.GetResponseAsync("What is AI?");
Console.WriteLine(result.Message);
Console.ReadLine();
}
}
}
ChatResponse
类的说明:Choices
(消息选择列表):
IList<ChatMessage>
Message
属性直接访问第一个选项Message
(首选消息):
Choices
列表中的第一条消息InvalidOperationException
[JsonIgnore]
特性标记,表示在 JSON 序列化时忽略ResponseId
(响应标识):
string
ChatThreadId
(聊天线程标识):
string
ModelId
(模型标识):
string
CreatedAt
(创建时间):
DateTimeOffset
FinishReason
(完成原因):
ChatFinishReason
Usage
(使用情况):
UsageDetails
RawRepresentation
(原始表示):
object
[JsonIgnore]
特性标记AdditionalProperties
(额外属性):
AdditionalPropertiesDictionary
C#using Microsoft.Extensions.AI;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var endpoint = "http://localhost:11434/";
var modelId = "deepseek-r1:1.5b";
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId);
List<ChatMessage> conversation =
[
new(ChatRole.System, "You are a helpful AI assistant"),
new(ChatRole.User, "What is AI?")
];
Console.WriteLine(await client.GetResponseAsync(conversation));
Console.ReadLine();
}
}
}
C#using Microsoft.Extensions.AI;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var endpoint = "http://localhost:11434/";
var modelId = "deepseek-r1:1.5b";
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId);
await foreach (var update in client.GetStreamingResponseAsync("什么是AI?"))
{
Console.Write(update);
}
Console.WriteLine();
Console.ReadKey();
}
}
}
C#using System.ComponentModel;
using Microsoft.Extensions.AI;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
[Description("Gets the weather")]
string GetWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining";
var chatOptions = new ChatOptions
{
Tools = [AIFunctionFactory.Create(GetWeather)]
};
var endpoint = "http://localhost:11434/";
var modelId = "qwen2.5:3b";
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId)
.AsBuilder()
.UseFunctionInvocation()
.Build();
Console.WriteLine(await client.GetResponseAsync("你需要雨伞吗?", chatOptions));
Console.ReadKey();
}
}
}
C#using System.ComponentModel;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Caching.Distributed;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Options;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var endpoint = "http://localhost:11434/";
var modelId = "deepseek-r1:1.5b";
var options = Options.Create(new MemoryDistributedCacheOptions());
IDistributedCache cache = new MemoryDistributedCache(options);
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId)
.AsBuilder()
.UseDistributedCache(cache)
.Build();
string[] prompts = ["What is AI?", "What is .NET?", "What is AI?"];
foreach (var prompt in prompts)
{
await foreach (var message in client.GetStreamingResponseAsync(prompt))
{
Console.Write(message);
}
Console.WriteLine();
}
Console.ReadKey();
}
}
}
C#using System.ComponentModel;
using System.Diagnostics;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Caching.Distributed;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Options;
using OpenTelemetry.Trace;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var sourceName = Guid.NewGuid().ToString();
var activities = new List<Activity>();
var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder()
.AddInMemoryExporter(activities)
.AddSource(sourceName)
.Build();
[Description("Gets the weather")]
string GetWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining";
var chatOptions = new ChatOptions
{
Tools = [AIFunctionFactory.Create(GetWeather)]
};
var endpoint = "http://localhost:11434/";
var modelId = "qwen2.5:3b";
var options = Options.Create(new MemoryDistributedCacheOptions());
IDistributedCache cache = new MemoryDistributedCache(options);
IChatClient client = new OllamaChatClient(endpoint, modelId: modelId)
.AsBuilder()
.UseFunctionInvocation()
.UseOpenTelemetry(sourceName: sourceName, configure: o => o.EnableSensitiveData = true)
.UseDistributedCache(cache)
.Build();
List<ChatMessage> conversation =
[
new(ChatRole.System, "You are a helpful AI assistant"),
new(ChatRole.User, "需要带雨伞吗?")
];
Console.WriteLine(await client.GetResponseAsync("需要带雨伞吗?", chatOptions));
Console.ReadKey();
}
}
}
C#using System.ComponentModel;
using Microsoft.Extensions.AI;
namespace AppMicrosoftAI
{
internal class Program
{
static async Task Main(string[] args)
{
var endpoint = "http://localhost:11434/";
var modelId = "all-minilm:latest";
IEmbeddingGenerator<string, Embedding<float>> generator = new OllamaEmbeddingGenerator(endpoint, modelId: modelId);
var embedding = await generator.GenerateEmbeddingVectorAsync("What is AI?");
Console.WriteLine(string.Join(", ", embedding.ToArray()));
Console.ReadKey();
}
}
}
Microsoft.Extensions.AI - Ollama 提供了强大、灵活的 AI 集成方案,使 .NET 开发者能轻松构建智能应用程序。
本文作者:技术老小子
本文链接:
版权声明:本博客所有文章除特别声明外,均采用 BY-NC-SA 许可协议。转载请注明出处!