You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1203 lines
65 KiB
C#

4 weeks ago
using System.Threading.Tasks;
using OllamaSharp;
using OllamaSharp.Models;
using OllamaSharp.Models.Chat;
using Xunit.Abstractions;
namespace OllamaStudy.UseOllamaSharp
{
public class OllamaApiTest
{
private readonly ITestOutputHelper _output;
private readonly IOptionsMonitor<OllamaServerOption> _optionsMonitor;
private readonly IConfiguration _configuration;
private readonly IOllamaApiClient _ollamaApiClient;
public OllamaApiTest
(
ITestOutputHelper outputHelper,
IOptionsMonitor<OllamaServerOption> optionsMonitor,
IConfiguration configuration,
IOllamaApiClient ollamaApiClient
)
{
_output = outputHelper;
_optionsMonitor = optionsMonitor;
_configuration = configuration;
_ollamaApiClient = ollamaApiClient;
}
#region 配置与ID
[Fact]
public void Config_Test()
{
var ollamaServerOption = new OllamaServerOption();
_configuration.GetSection("OllamaServer").Bind(ollamaServerOption);
Assert.NotNull(ollamaServerOption);
Assert.NotNull(ollamaServerOption.OllamaServerUrl);
Assert.NotNull(ollamaServerOption.Model);
}
[Fact]
public void OllamaOption_Test()
{
var options = _optionsMonitor.CurrentValue;
Assert.NotNull(options);
Assert.NotNull(options.Model);
Assert.NotNull(options.OllamaServerUrl);
}
#endregion
#region 生成补全
/// <summary>
/// 流式生成请求
/// </summary>
[Fact]
public async Task Completion_Request_Streaming_Test()
{
var generateRequest = new GenerateRequest()
{
Prompt = """天空为什么是蓝色的?""",
//Stream = false,
};
IAsyncEnumerable<GenerateResponseStream?>? response = _ollamaApiClient.GenerateAsync(generateRequest);
StringBuilder stringBuilder = new StringBuilder();
await foreach (GenerateResponseStream? stream in response)
{
stringBuilder.Append(stream?.Response);
}
_output.WriteLine(stringBuilder.ToString());
}
/// <summary>
/// 流式生成请求
/// </summary>
[Fact]
public async Task Completion_Request_NoStreaming_Test()
{
var generateRequest = new GenerateRequest()
{
Prompt = """天空为什么是蓝色的?""",
Stream = false,
};
var responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
//处理响应
var responseText = responseStream?.Response;
Assert.NotNull(responseText);
_output.WriteLine(responseText);
}
/// <summary>
/// 带后缀的生成请求
/// </summary>
/// <returns></returns>
[Fact]
public async Task Completion_Request_WithSuffix_Test()
{
var generateRequest = new GenerateRequest()
{
Model = ModelSelecter.ModelWithSuffixAndImage,
Prompt = """def compute_gcd(a, b):""",
//(可选)模型响应后的文本
Suffix = " return result",
Options = new RequestOptions()
{
Temperature = 0,
},
Stream = false,
};
var responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
//处理响应
Assert.NotNull(responseStream);
Assert.NotNull(responseStream.Response);
_output.WriteLine(responseStream?.Response);
}
/// <summary>
/// 结构化输出请求
/// </summary>
[Fact]
public async Task completion_request_structuredoutputs_test()
{
var generateRequest = new GenerateRequest()
{
Prompt = "22岁的 ollama 正忙于拯救世界。使用json响应。",
Format = new
{
type = "object",
properties = new { age = new { type = "integer" }, available = new { type = "boolean" } },
required = new string[] { "age", "available" }
},
Options = new RequestOptions()
{
Temperature = 0,
},
Stream = false,
};
//处理响应
GenerateDoneResponseStream? responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
Assert.NotNull(responseStream);
Assert.NotNull(responseStream.Response);
Assert.True(responseStream.Done);
_output.WriteLine(responseStream.Response);
}
/// <summary>
/// json模式请求
/// </summary>
[Fact]
public async Task Completion_Request_JsonMode_Test()
{
var generateRequest = new GenerateRequest()
{
Prompt = "一天中不同时间的天空是什么颜色的使用JSON进行响应!",
Format = "json",
Options = new RequestOptions()
{
Temperature = 0,
},
Stream = false,
};
//处理响应
GenerateDoneResponseStream? responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
var responseObject = JsonSerializer.Deserialize<object>(responseStream?.Response ?? "{}");
Assert.NotNull(responseStream);
Assert.NotNull(responseStream.Response);
Assert.True(responseStream.Done);
_output.WriteLine(responseStream.Response);
}
/// <summary>
/// 带图像的生成请求
/// </summary>
[Fact]
public async Task Completion_Request_WhithImages_Test()
{
var generateRequest = new GenerateRequest()
{
// (必需) 模型名称
Model = ModelSelecter.ModelWithSuffixAndImage,
// (必需) 生成响应的提示词
Prompt = "这张照片里有什么东西?",
//可选Base64 编码的图像列表(适用于 LLAVA 等多模态模型)
Images = new string[]
{
"iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43Lhxoy
},
//(可选)流式处理响应: 可通过设置 false 来禁用流式处理
Stream = false,
//可选控制模型在请求后加载到内存中的时间默认值5M
KeepAlive = "5m",
};
//处理响应
var response = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
_output.WriteLine(response?.Response);
Assert.NotNull(response);
Assert.NotEmpty(response.Response);
}
/// <summary>
/// 原始模式请求
/// 在某些情况下您可能希望绕过模板系统并提供完整的提示。在这种情况下您可以使用raw参数禁用模板。还要注意原始模式不会返回上下文。
/// </summary>
/// <returns></returns>
[Fact]
public async Task Completion_Request_RawMode_Test()
{
var generateRequest = new GenerateRequest()
{
Model = ModelSelecter.ModelWithRawmodel,
Prompt = """[INST] 天空为什么是蓝色的? [/INST]""",
Stream = false,
Raw = true,
};
//处理响应
var response = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
_output.WriteLine(response?.Response);
Assert.NotNull(response);
Assert.NotEmpty(response.Response);
}
/// <summary>
/// 可复现输出的请求
/// </summary>
/// <returns></returns>
[Fact]
public async Task Completion_Request_ReproducibleOutputs_Test()
{
//第一次请求
var generateRequest1 = new GenerateRequest()
{
Prompt = """天空为什么是蓝色的? """,
Stream = false,
Options = new RequestOptions()
{
Seed = 999999999,
Temperature = 0,
}
};
//处理响应
var response1 = await _ollamaApiClient.GenerateAsync(generateRequest1).StreamToEndAsync();
_output.WriteLine(response1?.Response);
var response1Text = response1?.Response;
//第二次请求
var generateRequest2 = new GenerateRequest()
{
Prompt = """天空为什么是蓝色的? """,
Stream = false,
Options = new RequestOptions()
{
Seed = 999999999,
Temperature = 0,
}
};
//处理响应
var response2 = await _ollamaApiClient.GenerateAsync(generateRequest2).StreamToEndAsync();
_output.WriteLine(response2?.Response);
var response2Text = response2?.Response;
Assert.Equal(response1Text, response2Text);
}
/// <summary>
/// 参数化生成请求
/// </summary>
[Fact]
public async Task Completion_Request_Options_Test()
{
//使用提供的模型为给定的提示生成响应。这是一个流式端点,因此会有一系列响应。最终的响应对象将包括请求中的统计数据和其他数据。
var generateRequest = new GenerateRequest()
{
/*基础选项*/
// (必需) 模型名称
Model = ModelSelecter.ModelWithSuffixAndImage,
// (必需) 生成响应的提示词
Prompt = "天空为什么是蓝色的?",
//(可选)模型响应后的文本:只有专用模型才支持(qwen2.5-coder:3b等), 模型不支持则异常
//Suffix for Fill-In-the-Middle generate
//Suffix = " return result",
//可选Base64 编码的图像列表(适用于 LLAVA 等多模态模型)
//Images = new string[]
//{
// "iVBORw0KGgoAAAANSUhEUgAAADkAAAA8CAYAAADc1RI2AAAACXBIWXMAABnWAAAZ1gEY0crtAAAAEXRFWHRTb2Z0d2FyZQBTbmlwYXN0ZV0Xzt0AAAAXdEVYdFVzZXIgQ29tbWVudABTY3JlZW5zaG9093UNRwAADM9JREFUaIHVm99PG1ebxz/nzNgGxzbgHwSMkyAICUWwSZ1Cfv8g0Dbv26qJ1Kiq1MtdqbtdaaW97FX3L+gqN91eZKXepIpU9aa9WHUbNZHyo0kgVGmhaSIEBRJwDHZiY/xjxjNnLxLPQt+q79t42u1+pZERnjl+vuc8z3me8z1nBD+BUmrAtu1/EEK8BGwBNv30nj8g1oAFpdR/SynPCiG+W/+lqP2hlPLZtv3vQoh/XP//Xwul1C9+L4T4m+6pA0op9aGU8l+FEBV4SkYp5VNK/RcwXE/rT9tCKYVt20gpAbBtm0qlQrVaBUBKicfjwePxOISUUgghnMsFXBRC/EkIUREAlmV9IIT4JzdaXk+yWCySyWTIZrMsLi7y6NEjqtUqwWCQtrY2EokEsViMxsZGpJROx7hEEqXUB5qm/bNQSg0opW5Th4v+pGGq1SoPHjzgzp07jI+Pk0qlsG0by7KQUjoj3N7ezqFDh9i9ezeBQABN09wcSQAlhNglLMs6I4T4l7paWhdjhmEwNTXFhQsXmJubw7ZtACzLAkDTNGzbdtwzGAyyf/9+EokE0WiU9vZ2GhsbnfbqJayUOiMsy7ojhOitp6EaEYBLly7xySefkMvl0DTNGR0Aj8dT+2E0TdvgmkII4vE4yWSSvXv3EgqFEEI4o14HftB5kibqQm3GTKVSfPjhh/z444/ouo7H48Hr9TqftSsYDKLr+obOAXjw4AHpdJpyuczIyAiBQKBe0wC2SFzMg5cuXWJmZgalFJZlUS6XWVtbo1Ao8PjxY6rVKv39/Zw4cYK+vj6UUlQqlQ3ubpomV65c4c6dO46L14lNuhut1HDz5k0n1izLQtM0vF4vXV1d9Pb2snXrVoaGhkgkEszNzbGyssLk5CQejwefz4fX60VKSblc5saNGwwMDKDr9ZvoKsn79+8DT2JUCEEoFGJ4eJgjR47Q29tLIBDA5/OhlKKzs5OhoSFmZ2dJp9M0NDTQ0NDgkJ2ZmeHx48cbJqFnhQ7/m4jrRblcdgj6/X5eeukl3nzzTTZv3uxMQKZpUi6XkVLS39/P119/zcOHD6lUKti2jWma+Hw+TNMklUrR3t7uDkk3UMtv6ysdpRRtbW1omkY+n+fatWtMTEywtraGpmns27eP1tZWGhoaKJVKG0o+y7LI5/Ou2OYKyZphuq4jpaStrY2Ojg7Gx8f57LPPOHXqFGtrayilGBoawuv1YpomwWCQYrGI3+9nbW3N6aD1OdUNSKg/4cKTOGxubnYmnWg0ytDQEGNjY058RiIR8vk8hmGQSCTYsWMHu3fvJhKJYNs21WrVuZRSdHR01G0XPCVZL2pJu6urC4BsNsv4+Dh3797F7/cDT0Z7YWGB69evMz09zfT0NGtra3R2dvL222+TTCadDrJtm1gsRjQadcM8dLfqxBrRY8eO8corryCl5Ny5c/T09ADg9/sZGRnhwIEDBAIBpJT4fD50Xaenp4ejR48yNzdHNpulWq0Sj8ddSR8Auhszay0mDcPg3XffJRQKkUqlyOfzbN26FXiyvGpqaqKpqekvSrUaYaUUJ0+epLGx0Zml3YCredLv99PQ0OCUbM3NzTz//PMopZBSOp/rja910OzsLH6/n5WVFQ4ePPiznfGscKWV2hqyUCgwMTGBbdtkMhkKhQKTk5MsLS1hmubPGi2EoFAocO/ePQYHB3n11Vf56KOPWFlZcW0ktffee+/f6m2sliMNw2B2dpaBgQE+/fRTbt68ydLSEtlsFk3TiMfjzv01KKUwDIOuri4OHjzoxGFHRwft7e2upBFX3fXQoUOcOXMGj8fDG2+8wZYtWzh48CCxWGzDkmo9lFIEAgF6enooFApcuXKFnTt3smvXLtcmHnec/in8fj+GYaCUwufzkUqleP/997l79y6ZTMYp+9aj5gWlUolz585hmibRaNQ1guDySAohWF5eZnV1le+//57z588jpeSbb76hu7ubEydOcOTIESKRyIbnlFLMzc1hWRbHjx+no6Pjryp6vwaukKzVnEop+vv7mZqaYseOHQwPD7O4uMimTZswDINMJsPq6irhcNh51jRNDMNgZWWFY8eOOQWAizoPwrIs5VZZJ4Tgxo0bXLt2jXfeeYdMJkMqlcLj8RAOh2lvb98gQa6srJBOpx1ZpKOjg0Ag4CpBcNldAQYGBpiammJ+fp7u7m7i8fiGpdx6Ne/atWtIKenr6yMajTpk3YYrI7k+fpRSZDIZbt26RV9fH62trXi9Xsel19bWWFpaYnp6Gp/PRzKZJJ1O4/V6icfjzr1uwlWSNSKWZZFOp1lcXHQWwbXlVC6XQ9d1tm3bRkdHB1JKMpkMUkqCweDPqurrO/FZbHVt4lkPKSUtLS18++23fPHFF4RCIQYGBmhsbCQYDNLd3U0sFkPTNCdPTkxM0NHRQSKR2FAA1Aiu78hfC9djEp6s6m/fvs2XX35JqVQiFouxurrqaDi1fZFaLtR1neXlZWZmZjhw4ABer9chFQgEaG5udureZ4HrJG3b5s6dO3z++eek02kaGxtJpVIUCgXK5TK6rpNIJDYk+9roTExMMDU1hWEYjpuOjo5y4sSJuuLUVfkDYHV1lUuXLlEulymXy5RKJSqVCn19fezZs4eurq4NeRJgYWGB2dlZyuUy8/PzTmFg2zb79u2r2z5XyrpajlRKcfHiRebn5zl8+DCmaSKEoLe3l9dff53+/n7C4fCGUSkWi9y8eZMHDx4ghCASidDW1ubc09nZWfds65r8cfv2bc6fP++o6L29vRw9epSmpiaGh4fZtWsXDQ0NGwy2bZvr168zPj5OsVgEnsTn5s2bCYVCaJrmLLrrgSvuWq1WOXv2LMVi0dkDyeVynD59mp6eHiKRCF6vF9io8f7www989dVXzMzMoGnaBoE5kUhgmuZf1LnPAtdI1gxNJBKEw2Gmp6dJJpPouk4mk9kgZyilSKfTXLlyhfn5earVKoZhYBgGhUIBXdfx+XycOnXKmaDqcVlX3NXn89HX14eUkocPHyKEYGFhgUwmw8cff8zZs2e5cOGCY2ylUuHq1atcvnyZarVKOBx25I5qteqo6cePH9+wxf6sRF0hKaVkcHCQlpYWGhsbWVpa4tGjR9y4cYOxsTHy+Tzj4+NMT08D8N1333H58mVWVlbI5/Pk83k8Hg/RaJRwOExjYyPJZJJQKOSGee5pPP39/fh8PkKhEC0tLU6y9/v95HI5isUi2WyWmZkZLl68yL1795BSYhgGq6urpNNpp2CIRCKMjo46cVwvXJtdt2zZgs/nc2rQ/fv309raimmatLa2Eo/H2b17N/Pz89y/fx+fz0e1WnVUvFKpxOLiIjMzMwQCAbZsqXtv2IFrFY/f76dYLLK8vEwsFkMpRS6XwzAMlpeXKRaLTE5OcuvWLSzLIh6PY5omuVyOx48fO/uZtm3z4osv4vf7/3i6q5SSWCzG5OQkKysrdHd3EwqFCIfDlEol5ubmuHr1Krdu3cK2bUKhEF6vl2g0SiQSoVgsksvlGBwcZM+ePa6uLSX89VNUfyt27txJqVTCsixmZ2cZGxtDSkkgEKBarTI2NkalUnHkjkKhgGVZCCEIBALs2rWLt956i1Ao5JqwDC6qdUop9u3bx6ZNT44g3L9/n9nZWQqFgnPao6mpiZaWFnRdxzRN0uk02WwWwzBoaWnh5MmTxONxZ8P2D6egCyHo6elhz5496LpONpvFNE2y2eyGexsaGojFYrS1tREMBikUCkgpefnll+nr63NViqxBunwCisHBQTZt2uTsM5ZKJedMnfOjUjrxmEwmOX36NAMDA7+ZxuOquAzQ1dVFc3Mz1WoV27YxDINcLudsrMKTkdd1nXg8zmuvvcaRI0cIBoNuHzlz4Lq43NbWhmmajlJeE68sy3JOf/j9
// "base64"
//},
/*高级选项*/
//可选返回响应的格式json 或 JSON schema
//Format = "json",
//选项参数
Options = new RequestOptions()
{
//设置模型回溯多长时间以防止重复(0 = 禁用,-1 = num_ctx)
RepeatLastN = 60,
//设置对重复项的惩罚力度(越低越宽松):较高的值例如1.5将更严厉地惩罚重复而较低的值如0.9将更宽容。默认值1.1
RepeatPenalty = 1.1f,
//模型的温度:值越大模型的答案越具有创造性。默认值0.8
Temperature = 0.8f,
//设置用于生成的随机数种子。将其设置为特定数字将使模型为同一提示生成相同的文本。默认值0
Seed = 0,
//设置要使用的停止序列。遇到此模式时LLM 将停止生成文本并返回。通过在模型文件中指定多个单独的停止参数,可以设置多个停止模式
Stop = new string[] { "exc", "stop" },
//生成文本时要预测的最大令牌数。(默认值:-1无限生成
NumPredict = -1,
//降低产生无意义的可能性。较高的值(例如 100将给出更多样化的答案而较低的值例如 10将更保守。默认值40
TopK = 40,
//与 top-k 一起使用。较高的值(例如 0.95)将导致文本更加多样化,而较低的值(例如 0.5将生成更集中和保守的文本。默认值0.9
TopP = 0.9f,
//启用F16键值对 (默认值false)
F16kv = false,
//根据令牌在提示中的出现频率对其进行处罚
FrequencyPenalty = 0.0f,
//返回所有令牌的logits而不仅仅是最后一个。默认值False
LogitsAll = false,
//启用低VRAM模式默认值False
LowVram = false,
//此选项控制哪个GPU用于小张量。在所有GPU上分割计算的开销是不值得的。GPU将使用稍多的VRAM来存储临时结果的暂存缓冲区。默认情况下使用GPU 0。
MainGpu = null,
//top_p的替代方案旨在确保质量和多样性的平衡。min_p表示相对于最有可能的令牌的概率考虑令牌的最小概率。例如当min_p=0.05并且最可能的令牌具有0.9的概率时值小于0.05*0.9=0.045的logits被过滤掉。默认值0.0
MinP = 0.0f,
//启用Mirostat采样以控制困惑。默认值00=禁用1=Mirostat2=Mirostat2.0
MiroStat = 0,
//影响算法对生成文本反馈的响应速度。较低的学习率将导致较慢的调整而较高的学习率会使算法更具响应性。默认值0.1
MiroStatEta = 0.1f,
//控制输出的连贯性和多样性之间的平衡。较低的值将导致文本更加集中和连贯。默认值5.0
MiroStatTau = 5.0f,
//设置用于生成下一个标记的上下文窗口的大小
NumCtx = 2048,
//启用NUMA支持。默认值False
Numa = false,
//提示处理最大批量。默认值512
NumBatch = 512,
//要发送到GPU的层数。在macOS上默认为1启用金属支持0禁用。
NumGpu = 0,
//变压器层中GQA组的数量。某些型号需要例如骆驼2:70b为8
NumGqa = null,
//从初始提示开始保留的令牌数。默认值4-1=全部)
NumKeep = 4,
//设置计算期间要使用的线程数。默认情况下Ollama会检测到这一点以获得最佳性能。建议将此值设置为系统具有的物理CPU核数而不是逻辑核数
//NumThread = null,
//惩罚换行符默认值True
PenalizeNewline = true,
//根据令牌在提示中的存在对其进行处罚。默认值0.0
PresencePenalty = 0.0f,
//无尾采样用于减少输出中不太可能的令牌的影响。更高的值例如2.0将更多地减少影响而1.0的值将禁用此设置。默认值1
TfsZ = 1,
//用于采样的典型p值。论文中描述的本地典型采样实现https://arxiv.org/abs/2202.00666.默认值1.0
TypicalP = 1.0f,
//将模型锁定在内存中以防止交换。这可以提高性能但它会使用更多的RAM并可能减慢加载速度。默认值False
UseMlock = false,
//默认情况下模型被映射到内存中这允许系统根据需要仅加载必要的部分。禁用mmap会使加载速度变慢但如果您没有使用mlock则可以减少分页。如果模型大于RAM关闭mmap会阻止其加载。
//默认值true
UseMmap = true,
//只加载词汇不加载权重。默认值False
VocabOnly = false,
},
//(可选)流式处理响应: 可通过设置 false 来禁用流式处理
Stream = true,
//(可选)系统消息(覆盖 Modelfile 中定义的内容)
//System = "Modelfile 文件格式",
//(可选)要使用的提示模板(覆盖 Modelfile 中定义的内容),内容为 Modelfile 文件格式的字符串
Template = """
<system>
You are a helpful assistant.
</system>
<user>
{prompt}
</user>
<assistant>
""",
//(可选)如果为 true则不会对提示应用任何格式。如果您在对 API 的请求中指定了完整的模板化提示,则可以选择使用 raw 参数
// 在某些情况下您可能希望绕过模板系统并提供完整的提示。在这种情况下您可以使用raw参数禁用格式化。
//Raw = false,
//可选控制模型在请求后加载到内存中的时间默认值5M
KeepAlive = "5m",
//(可选且已弃用):从上一个请求返回的 context 参数 /generate这可用于保持较短的对话记忆
//Context = new long[] { 25897,46546546,546458},
};
//处理响应
IAsyncEnumerable<GenerateResponseStream?>? responses = _ollamaApiClient.GenerateAsync(generateRequest);
StringBuilder stringBuilder = new StringBuilder();
await foreach (GenerateResponseStream? stream in responses)
{
stringBuilder.Append(stream?.Response);
}
_output.WriteLine(stringBuilder.ToString());
Assert.NotNull(responses);
Assert.NotEmpty(responses);
}
/// <summary>
/// 加载模型
/// </summary>
[Fact]
public async Task Completion_Request_LoadModel_Test()
{
var generateRequest = new GenerateRequest()
{
//默认模型
//Model = ModelSelecter.ModelWithTool
};
//处理响应
GenerateDoneResponseStream? responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
Assert.NotNull(responseStream);
Assert.NotNull(responseStream.Response);
Assert.True(responseStream.Done);
}
/// <summary>
/// 卸载模型
/// </summary>
[Fact]
public async Task Completion_Request_UnLoadModel_Test()
{
var generateRequest = new GenerateRequest()
{
//默认模型
//Model = ModelSelecter.ModelWithTool,
//保持活跃0时长即立即卸载
KeepAlive = "0",
};
//处理响应
GenerateDoneResponseStream? responseStream = await _ollamaApiClient.GenerateAsync(generateRequest).StreamToEndAsync();
Assert.NotNull(responseStream);
Assert.NotNull(responseStream.Response);
Assert.True(responseStream.Done);
}
#endregion
#region 生成对话补全(OllamaApiClient)
/// <summary>
/// 流式对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_Streaming_Test()
{
var chatRequest = new ChatRequest()
{
Messages = new[]
{
new Message()
{
Role = ChatRole.User,
Content = "天空为什么是蓝的?",
}
},
Think = true,
};
StringBuilder responseText = new StringBuilder();
var chatResponses = _ollamaApiClient.ChatAsync(chatRequest);
await foreach (var response in chatResponses)
{
responseText.Append(response?.Message.Content);
}
_output.WriteLine(responseText.ToString());
}
/// <summary>
/// 非流式对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_NoStreaming_Test()
{
var chatRequest = new ChatRequest()
{
Messages = new[]
{
new Message()
{
Role = ChatRole.User,
Content = "天空为什么是蓝的?",
}
},
Think = false,
//(可选)禁用流式处理响应
Stream = false,
};
var chatDoneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
_output.WriteLine(chatDoneResponse?.Message.Content);
Assert.NotNull(chatDoneResponse);
Assert.NotNull(chatDoneResponse.Message?.Content);
Assert.True(chatDoneResponse.Done);
}
/// <summary>
/// 结构化输出对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_StructuredOutputs_Test()
{
var chatRequest = new ChatRequest()
{
Messages = new[]
{
new Message()
{
Role = ChatRole.User,
Content = "Ollama is 22 years old and busy saving the world. Return a JSON object with the age and availability.",
}
},
Think = false,
//(可选)禁用流式处理响应
Stream = false,
Format = new
{
type = "object",
properties = new { age = new { type = "integer" }, available = new { type = "boolean" } },
required = new string[] { "age", "available" }
},
};
var chatDoneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
_output.WriteLine(chatDoneResponse?.Message.Content);
var jsonObject = new { age = 0, available = false };
var responseObject = Newtonsoft.Json.JsonConvert.DeserializeAnonymousType(chatDoneResponse?.Message.Content ?? "{}", jsonObject);
Assert.NotNull(responseObject);
Assert.Equal(22, responseObject.age);
Assert.True(responseObject.available);
}
/// <summary>
/// 带历史上下文的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_WithHistory_Test()
{
var chatRequest = new ChatRequest()
{
Messages = new List<Message>(),
Think = false,
Stream = false,
};
var history = new List<string>();
List<Message> messages = new List<Message>()
{
new Message { Role="user", Content = "为什么天空是蓝色的?"},
new Message { Role="assistant", Content = "due to rayleigh scattering."},
new Message { Role="user", Content = "how is that different than mie scattering?使用中文回答"}
};
foreach (var message in messages)
{
List<Message> current = chatRequest.Messages.ToList();
current.Add(message);
chatRequest.Messages = current;
var chatDoneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
history.Add(chatDoneResponse?.Message.Content ?? "");
}
_output.WriteLine(string.Join(Environment.NewLine + "++++++++++++++++++++++++++++++++++++++" + Environment.NewLine, history));
}
/// <summary>
/// 带图像的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_WithImages_Test()
{
var chatRequest = new ChatRequest()
{
Model = ModelSelecter.ModelWithVision,
Messages = new[]
{
new Message()
{
Role = ChatRole.User,
Content = "这张图片的内容是什么?用中文回答!",
Images = new string[]
{ "iVBORw0KGgoAAAANSUhEUgAAAFoAAABaCAYAAAA4qEECAAAACXBIWXMAABnWAAAZ1gEY0crtAAAAEXRFWHRTb2Z0d2FyZQBTbmlwYXN0ZV0Xzt0AAAAXdEVYdFVzZXIgQ29tbWVudABTY3JlZW5zaG9093UNRwAAESNJREFUeJztnXl8G+WZx78zkmxZli/ZTuwczmkTExznIHFCcEkgCeSglKv9sLAsXaBdSAMLS8t2d4GyXWDDUmjDsRQS2C0FtsuWEiAJxzYQIBxxICchN4lz+D5kSbYla+btH+NDjmSNRiM7duCXj6zJzPu+8+qnR8/7PM/7vO9InILH1+0sUVBvkmAREqOB1FPLDDZIetclvRIJgQ84JsE7QpVWr1g6ZVevPnQdrFp/IFnF+5iE9Hfo931QYZAQHQohJJ5Gddxx25JCP3T2cdX6A8lC8m1AMD/elq0WmZFZqYx0OclJt5PhSCY12UqS1ZKozg8I/B0KbYEgza1+at1tnGzyUdXkQ1GF4bYkeE8VjsW3LSn0WwFUvI9JQjJMsiRBXkYq5xS4mDA8Y8iRGgnJNgvJNguZqcmMzU0HNPIP17awq7KeupY2RIycC5gvSa2PAsulx9ftLFEldQcG1UVepoM5RXmMzk4z+FGGNk40evlwbxX1LW2xVhGoUqm1c+CLmeRkm4W5Z+UzeZTrdOi+046RLiffnzORXZUNfLK/mqCi6lWRZIt0o1WCRbHeJCfNzuJpY8lKTTbX2yEOWZIoHZPD6Gwn6784irstELW8QFwsd5pwuijISeOq2RO/8SSHwuW0c9WciYzI0rWAR8vEYCcX5KSxbPrYM2KwSzTsNgvLpo/VIztV1msoO83OkmljsFp0i35jYbPKLJ42hswov/ao7CVZLSyd9q0kxwK7zcLiqQXY+hDIqESfPyk/6rf0LXrD5bQzpygv4rU+ic7LdDB5lKvfOnWm4pzRLnLTU8LORyRakqB80ohvpJ1sFpIkMbtweNh5GTRvJfSVl+EgX99k+RZ9oCAnjeEZjl7nrJEKThmTMyAd6kJLAI75IKBAsgVy7ZAb/usbUpg82kWNu7X7/2FEW2SJccPSB6Qz2xvgl5/DhkrwqyCEprasEuSnwrICWFoA5fngtA1IlxKG8cPT2bRH6o76hRE90uXsd3NOAP+1D275EPzKKdcEBAQc9cCTX2ovpw2uGg/XFWqkJw0Bkz7ZamFEVirHGrxAhMEwBnfSNN49DjdvCie5L3g7tC9m4Zsw4SX45Rdw1Nu/fUwERrqc3cdhROdm9K9y9HXAjzaBYjyOjgCO++DeChj/Elz9Lmyuhjhi8gOCnDR793EY0ZmO/nVQXjiQGGlUBfzfYShfC2V/gjePDj7CMxxJ3cdhRKckRTRE4ofi115oUvzMV4ltXgBb6+DStzTC11US8wxIf8OR3DOCh7GabDM5EAoBB1+HnWvgxMfQ1tDZcDrVo86ifdQM8uzlNNZdSMAf2V2NF1vrYNkGmDkMfjEDFhec3lnmJGuPHCdWfANeeP0aOPRm+DV/CxW2CkaMrWDE2KcRQsbjnkF99aXUVV2Oz1OMEImxdipqYdlbsGAkPDALZuYmpFlTSBzRahDe+KvIJHdiR4gAS5JKemYF6ZkVjJ90L63eQmpOXEPNiR/Q6p2EEOZsOCE06+b9k5pZ+OAsyHPo1+svJM4i3fMiHOybZEWG/dl9V3c4DzDurH9l9vwSzi0/j5Fjn8Fqc5vuVocKz++DSX+AX+/SvM/TgcQQrQTgo/vRhqbI8Nmg1tnn5R5IKulZnzGp9MfMXVRA8bQbcabvRpJ0J0Gjwh2AOz6GmX+CT2qi9bR/kBiiq7ZAy5GoRQ5nGf9wVmsLIwqeo2xeKdPOu4icvDeR5I64uwmwswEueB3+frMWYxkoJIboA6/p2lSVGSbal1Syct6ntOxSyuZNI2/U77FYWvXr9YEOFVbthhmvajp8IKQ7AUQLOLBWt1Rlpvk7AaSmfcnkGX/N7IuKGTFmDbIl5kSWMBx0w6J1sOIjzc3vT8im7UxvNbRU6hY7nuCEJntKJcVTb6Js3lRy8t5AluPTAx2qFria9apmh/cXzEt0035tMIwCIcU4EMYBh3M/pWWXcW75eWRmfxj3oPlVs+bOr9qlkZ9omCe6bpduEQE09musSpCW+TnT587nnBnX4Eg9GFcr7Qrc/rEWrGrwJ7aH5omu2aZbJGABb1L0Mi45H5ecjxmnWZIUho38X2bNL2Vs0QNYrPFFr9Yegbmvwd7muLsSBvNE1+7QLdJsj349xzKS53L28PvcwzyVvYVLUv6WLDl8gjNWWCytTCj+F8rmTyF72DvEY1fsa9bIXqc//MQEk36uAk2HdIu16ERepyZdiFPOJEmyU2Q7l7sy1vBC7iHuSH+GcdaSuLuX4via0tmLmTz9BmxJ9YbrN/rh8rc1j9JsCNYc0X4PqPqjvU9HbUywloads0upLHXczG9ztrHS9Q7FttnIGA86SZJK3ujfUTZvGq7cdw3X71Dhzo81rzKeyYoumCM60KIFk3TQphO6Gm4Z0+c1GQszkhayKnszj7repzRpnsFOakhOOc7UOZdQVHK7YWdHAI/vhts3x0+2OaKD7aDqR2n8OkRnyPpxTAmZc5LO5xHXn1npepezbXNi7WVPG5LK6PGrmFF+PimOI4bqCjR7+65P4ptYMEe04gehb3TqEe2UY/fPJWRmJC3gN9kf8Y8ZLzDcMjbmul1Iy9jGud+ZRXrWZ4brrtoNvztguJpZiW4jEZGCFMm42yghsyDlOp7P2cM1qT8nSdIxbU5BUnId0+ZcjDN9t6F6qtACUlUGQy3miI5BPwO064xh8QxyXUiSUrgx7UGezv7CsDqx2tyUzLwKWTbmnTQH4BdbDVVJgOowCRmLKaK7UGAt5rHsTVzr/GdkAx/L4dzHqHFPGr7fG0djz0uBAZLoaJA6/yUCFmz80PlvXO+831C97OEbDN+ruhWaDMjZEEiuMo4gRmOe/R+RNjc5a9HxRGKAQJCoD6oQ5BXfI7zsfdBQvYaaJYbvlZ8KWQZyjaxgYlG6bD7FU0VBweyMqeBIcA9PtKxge+A9QzV9nrM5fuQWw3e8YpyWYhwrzEm0HFt1uw6PqgmiPWojL/keYm3rEwREu6G6HYFsdm99GVUxFsN1JcM90w1VMUm0NQUtrKnz09e53C58hm/tF22sb3uWl70P0ahWG64faM9n+6dv4m2ZYqieRYJfz4VhBuPr5nW0JGtRvCiw6xgnHrUp5lsGRDsf+9eyxvNPVCmHY64XCndTGbsqXsHfFtOi4W7IEtw5Ba6baPye5oi2OUC2gBKd6GQdzdCs1ureKiDa2ex/jf/23sfx4H4jveyGqiZx7PDtHN57v2F1IUvwk8nw72XaqgSjMEm0U9PTOnOGKTrWVq1ytM9rPuHmvbb/4RXfrzihxBFk6ESbbwJ7tj1Pc0O54boWCe4q1dLK4p3NNkd0coampzuiO/6pOkQfCobP0tQrx3mr7Xlea30iJonvC0LYqD52Lft3/YZg0PjanBQrPDoHflwcnyR3wRzRkgyZE3pSc/tAmo4HtSPwPm61Hqtk40DHF6xtfZIt/g34RfxJMgA+z2T27vhPmhvOJ565yGEp8OJFWlaqWVhNL9rMLdFSwqIgsz26bVKnHOf6uokIBK2ixVx/gGBHBkcP3k3loTtQFWNRvS6UuGDtJTAuQfko5tN2h0/VLZKkQGog+ky4T5jPHBWqjdqqK9m/67G4k9wl4PsT4JnvQLp5x7cb5okeFj7fdyokwNWqn3IQPySaG8o5+OVK3E1lxJuy4LTBw7M1fWw+has3zBOdOVGzp6NYHpKA4b7E5d+FwuOextd776O+5lJTyetTsuGli2ByVgI7FwLzRDvzIX0MNEU3vUa7oSIBg0oXfN5JfL33PmpPXm1qSYZVhlsnw3/M7t+FoolZWlH4Xdjyq6hFRpkf4wDwuKdyZN891FVfZnrNS2EGrJkH5YldsxQRiSG66HKoeDTq9PBYE+lVQsg0N8zj6IGf0lC3CEyub7HJcMvZsHI22Adoc53
},
}
},
Think = false,
Stream = false,
};
var chatDoneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
_output.WriteLine(chatDoneResponse?.Message.Content);
}
/// <summary>
/// 可复现输出的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ClientRequest_ReproducibleOutputs_Test()
{
var chatRequest = new ChatRequest()
{
Messages = new[]
{
new Message()
{
Role = ChatRole.User,
Content = "为什么天空是蓝的?",
}
},
Think = false,
Stream = false,
Options = new RequestOptions()
{
Seed = 19491001,
Temperature = 0f,
},
};
//第一次请求
var doneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
var responseText1 = doneResponse?.Message.Content;
//第二次请求
var doneResponse2 = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
var responseText2 = doneResponse?.Message.Content;
Assert.Equal(responseText1, responseText2);
_output.WriteLine(responseText1);
_output.WriteLine(responseText2);
}
/// <summary>
/// 支持工具调用的对话请求 测试
/// </summary>
/// <remarks>
/// 携带工具本地执行结果,再次发送给大模型不
/// </remarks>
/// <returns></returns>
[Fact]
public async Task ClientRequest_WithTools_Test()
{
List<Message> Messages = new List<Message>();
Messages.Add(new Message() { Role = ChatRole.User, Content = "获取北京市当前的时间?", });
var chatRequest = new ChatRequest()
{
Model = ModelSelecter.ModelWithRawmodel,
Messages = Messages,
Stream = false,
Tools = new[]
{
new
{
type = "function",
function = new
{
name = "GetCurrentTime",
description = "获取指定城市的当前时间",
parameters = new
{
type = "object",
properties = new
{
city = new
{
type = "string",
description = "城市名称"
}
}
},
required = new string[] { "city" }
},
}
},
};
//第1步向大模型发送带tool信息的请求
var doneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
//第2步接收大模型响应解析响应中携带有“工具调用信息”
//响应信息
Message toolResponseMessage = doneResponse!.Message;
//加入请求信息列表
Messages.Add(doneResponse.Message);
//函数信息
var currentFunction = toolResponseMessage.ToolCalls!.First().Function;
//参数信息
var city = currentFunction?.Arguments!["city"]?.ToString() ?? string.Empty;
//第3步使用大模型响应中的函数信息调用本地工具函数获取执行结果
/*真实项目应该用特性、反射等技术,动态执行。此处仅作演示,直接写死第一个函数*/
//调用本地工具函数
var callCurrentDate = GetCurrentTime(city);
//第4步传递工具执行结果给大模型大模型生成最终回复
//函数调用信息
var ss = string.Join(", ", currentFunction?.Arguments?.Select(kvp => $"{kvp.Key}:{kvp.Value}") ?? new string[] { });
var toolResultText = $"{currentFunction?.Name ?? "(unnamed tool)"}({string.Join(", ",ss)})";
Messages.Add(new Message() { Role = ChatRole.Tool, Content = $"Tool:{toolResultText}:\nResult:{callCurrentDate.ToString("yyyy-MM-dd HH:mm:ss")}"});
chatRequest.Messages = Messages;
//理论上:大模型应该根据工具调用结果,生成最终回复,但此处没有真正起到作用
var doneResponse2 = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
var lastContentText = doneResponse2?.Message.Content;
_output.WriteLine(lastContentText);
}
#endregion
#region 生成对话补全(Chat方便、功能不全,应该在开发中)
/// <summary>
/// 流式对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ChatRequest_Streaming_Test()
{
var myChat = new Chat(_ollamaApiClient)
{
Think = true,
};
StringBuilder responseText = new StringBuilder();
IAsyncEnumerable<string>? chatResponses = myChat.SendAsync("天空为什么是蓝的?");
await foreach (var response in chatResponses)
{
responseText.Append(response);
}
_output.WriteLine(responseText.ToString());
}
/// <summary>
/// 非流式对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ChatRequest_NoStreaming_Test()
{
var myChat = new Chat(_ollamaApiClient)
{
Think = true,
};
var chatDoneResponse = await myChat.SendAsync("天空为什么是蓝的?").StreamToEndAsync();
_output.WriteLine(chatDoneResponse);
Assert.NotNull(chatDoneResponse);
}
/// <summary>
/// 结构化输出对话请求(没有直接实现)
/// </summary>
/// <returns></returns>
[Fact]
public void ChatRequest_StructuredOutputs_Test()
{
//var chatRequest = new ChatRequest()
//{
// Messages = new[]
// {
// new Message()
// {
// Role = ChatRole.User,
// Content = "Ollama is 22 years old and busy saving the world. Return a JSON object with the age and availability.",
// }
// },
// Think = false,
// //(可选)禁用流式处理响应
// Stream = false,
// Format = new
// {
// type = "object",
// properties = new { age = new { type = "integer" }, available = new { type = "boolean" } },
// required = new string[] { "age", "available" }
// },
//};
//var chatDoneResponse = await _ollamaApiClient.ChatAsync(chatRequest).StreamToEndAsync();
//_output.WriteLine(chatDoneResponse?.Message.Content);
//var jsonObject = new { age = 0, available = false };
//var responseObject = Newtonsoft.Json.JsonConvert.DeserializeAnonymousType(chatDoneResponse?.Message.Content ?? "{}", jsonObject);
//Assert.NotNull(responseObject);
//Assert.Equal(22, responseObject.age);
//Assert.True(responseObject.available);
}
/// <summary>
/// 带历史上下文的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ChatRequest_WithHistory_Test()
{
var myChat = new Chat(_ollamaApiClient);
List<Message> messages = new List<Message>()
{
new Message { Role="user", Content = "为什么天空是蓝色的?"},
new Message { Role="assistant", Content = "due to rayleigh scattering."},
new Message { Role="user", Content = "how is that different than mie scattering?使用中文回答"}
};
foreach (var message in messages)
{
var chatDoneResponse = await myChat.SendAsAsync(message.Role!.Value, message.Content??"").StreamToEndAsync();
_output.WriteLine(chatDoneResponse);
_output.WriteLine("----------------------------------------------------");
}
}
/// <summary>
/// 带图像的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ChatRequest_WithImages_Test()
{
var myChat = new Chat(_ollamaApiClient)
{
Model = ModelSelecter.ModelWithVision,
};
var base64img1 ="iVBORw0KGgoAAAANSUhEUgAAAFcAAABWCAYAAAC6lArJAAAACXBIWXMAABnWAAAZ1gEY0crtAAAAEXRFWHRTb2Z0d2FyZQBTbmlwYXN0ZV0Xzt0AAAAXdEVYdFVzZXIgQ29tbWVudABTY3JlZW5zaG9093UNRwAAFi9JREFUeJztnXt0VEWexz91b7/SnRdBIBCIgCKIQYMZdVRcFQQGH8dBYVdR1jPrzAioOO7q4OiemdHVgw7r4zDjjiP4GB8wGgdGGJQoCshLMEp4CfIIAcIrpEnS6fTz3qr9o5NApztJd+iOzuN7zk2n6/7u/VV9b/WvflX1q7qCNnhr9c4RUokfA+OAAYDr9PNKqbaXdAghRNLy7elI9l7dgCYFhzTUR6Dm33n1+dtOP9ma27kf7LHnOsLPo2nTTk9vi3+S2y4UQrzU0ysevP76IUFoJnHuB3vsORnGh8C17RVAAE6HlZ6ZdnJddjIdVlwOKw6rjtWiYdE0NE2gCYEQIIh8fpegFCgUSoFUCikVhlSEDZNA2MQXNGgMhKlvCuJuDOAPGiRXlUDTtJU9Gplw/fVDghaAXEf4eYW4Np6wzaIxuE8O5+TnkOuyoX3XGEsCLQ8dAXrkD3YAuyVGVipFnTdI5XEPlTUewoZMVM21J13yOeBe8drqnSM0yRaaa3FLzdWE4Nz8HC4c2JMMW6zyfyT4ggYVVbXsr2ns1Cw286dQ5kUWXfFj1cbGOu0WrhiaT98ervh3+AdDCx9n98piw+5jBEJmZ5cIoel3a0ox7vTUvEw7Pygu/CexcVCQ52L8RYXkuuztyrTUbAXjNSHEACEEQgh6ZmUw5sIBuBzW7srv3xyyMqyMGVFArstOC29tDwAUAzSa/Vin3cI1F/TDYdW/xaz/bSDD1syVrUOuXBaINF5XDM3/ztTYUCiEz+fD5/MRDAZRSmGxWHA6nTidThwOB5qmfat5zHRYuXxIH1Z9fYT22jgLwLn5Od+qjQ2Hw+zdu5eVK1fyySefsG3bNmprawkGgxiGgVIKTdOw2WxkZWUxePBgrrzySsaPH09JSQnZ2dnfSr775bkY3Cebfcc8cc+Ld9buUTddMvBbcbc8Hg+LFy9m7ty5bNu2jXA4nNT1Qgh69+7NlClTmDZtGkOGDOn2XpwvaLD0ywMYZqwfLL7Ye1x975ze3ZohwzBYvHgxs2bNoqqqKukudTw4nU5+8pOf8Nhjj9GrV68U5DJxlO87wTdH6mPShbsxoPIy23ctUo0TJ04wY8YMFi1ahJQJ93oSRmFhIa+++ipjxoxJ+b3bw0lvkA83H4xJ13q4bN2WiV27djFq1Cjee++9tBALcPDgQW644QZ+97vfpU1HW+S6bDjjdKG17rJRX3/9Nddddx27d+9Ou65gMMjPfvYzZs+enRKT0xk0ITgryxGbnnbNQHV1NTfeeCOHDx/uDnUAmKbJr371K+bPn98t+nKcsRYg7eQGAgGmTp3K/v37060qBqZp8sADD7Bhw4a068rMiO0jpNX/Ukrx3HPPsWrVqg7l7HY7kyZNori4mC1btlBaWkowGGxXvri4mIkTJxIIBFiwYAEHDhxoV9bv9/OjH/2I8vJyMjMzu1qUTuGyxemAqTRi165dyuVyKaDdw263q0WLFkVd99e//lXZ7fa48pMnT1Z+v79VtqamRhUXF3eoA1APP/xwOouq6puC6q3PdkcdaSPXNE01adKkTgs9bdo0JaWMuf7RRx+Nke3Xr59yu90xsuXl5cpms3Wox+l0qsrKynQVV3kD4Rhy02Zzt2/fzpIlSzqUsVqtzJw5M26vavr06dhs0Y3EXXfdRV5eXoxscXExJSUlHery+Xw89dRTCeS8a7BosWVIC7lKKZ5//nlCoVCHcr1792bw4MGt39evX09xcTGPPvooBQUFDB8+/FRGNY1x48a13t/tdlNXV4dSCl3XE+o0vPPOOxw7dqyLpeoYWneR63a7Wbx4cadyubm5WCyRNlVKycyZM9myZQvPPPMMmzdvZtCgQacyqmkUFhayefNmRo0aRf/+/SkoKGDMmDE0NDRQUFDQqT6v18vrr7/e5XJ1hHhzi2khd8mSJTQ0NHQqFwqFWntRpmly5MgRIEJ0dXV1TA/L6/UyZcoUbr75Zr788kv+8pe/cPHFF2O1WvH5fAnlbeHChWnpucXti6XasEsp1Q033NBpQwaonJwcVVdX13rt9OnTFaB69eqljh07poqKilplNU1TK1asUAcOHIir94477khIp67raufOnakutpJSpb9B83q9rFu3LiFZj8fD8uXLW7/PmTOHN954g1WrVuH1evnmm29az0kpWbZsGYWFhTH3qaurY8WKFQnpNE2TDz/8MCHZZBCv5qac3C1btlBfHzv8Fg9KKWbPnk1TUxMALpeLqVOncv755zNnzpyY8d0333yT6urqmHu89NJLHD9+POE8dtapSRVSTm6yXc2tW7cydepUampqkFLi9/t54YUX4o4J1NbWMnnyZKqqqpBSEggEeO2113j88ceT0lleXo5hGEld0xUIpVI7bHTnnXfy9ttvJ31dz549GTZsGMeOHaOysrLD0azs7GyKiopwu93s2bMn6QZK13UqKyvjmpgzwdtr9kR9T/nYws6dO7t0ndvtZsOGddgskJUBNovAZoGWeUilIGxAyIBQ0MOmjesxTZKO5YKI3d27d2/KyW2LlJIbDodjbGK7inXo20MwvFAwcrDGiLMFg/MFvbIFmQ6wWQW6iG4oTAmGCb6gor4JDp5Q7Dgk2bJfsbVKUlWjaAokltd9+/YxevToLpQycaSUXK/X26l/m+sS3HmNxl2jdYYWaNi7MJuf7RTk94Bh/QXjRkaqtinheL1i6SbJ75cb7DzUcZ1u8anTiZSSW19f3+EMrkWH9x6xctXw1PdddA365Qnu+YHOHdfoXPZQkL1H2ye4trY25Xloi5SW0ufzddi4GCY89qbB5krZJVuZCGo9iv95x2D/8Y41eDzxYw1SiZTW3I4GuFuwcbfkX34R4ubv6zxwk87IQRqWFERQHXYr3lxlMnepibux80eXSF7PFCklN1GvLmRA6VqTRetNRpwdsb/jR2oM7CPQk/gtuRsV63dJFqyWLP/KxJd+vpJCSsm1WpNrnUwJFfslFa9IrDoMyhdcW6TxvSEa5/YV9M4RuBygCQiG4aRXcfCEYtsBxcptkq/2SXyBrrljup7+gMOUkpsXNvi+zcGOcIhGlZxjHzZh92HF7sMmfygzESJCaiTUHqSieS3DmeXRgqDQYuFSPf1BhyklN3fJByzpNYBGKfkqHGBt0M9XoQDfGCHcpkGn8dinQSkwz5BIAbg0jbN1KxdZ7Vxqz2CULYP+Fiv6V1+jPI2I7KwzU3K6vjYrkVJGrmpqIrh4KQBZmsbVdidX250AGEpxVBpsD4fYEQ6yzwhRZYQ5ZhrUSUlASbo6wioAqxBkCY3eus4A3co5FhvnWW2MsNo5x2LFJWINuaqvJ7hsOY7bJ3dRc+dIGbnB5SuQJ+viKxGCAbqVAbqVCY5IqKoATMCvJG7T5IQ0qZEmJ00TjzJpUoqAUhhKIYkQaAWcQiNLE/TQdM7SLPTWdc7SdLKEhk2IpOyvf8G7OP7t1lN97BQjNeQqReCNBcldQsTJdgkNl0WjkDO3gclaEWPrdoyvd2EpGt65cII4fbI1JY/M2PkN4a8qUnGr7oVpElj4btpunxJyA38qpd3Y9e84gks/RKWpt3bG5KoGD8E/v5+KvHwrkCfrCC5PbIooWZwxucvLynj8YCWfh/x4VfrGDFKNsFLsM8K81tTAw//3YnpmhM9kJkIpxYQJEygrK0MAuZpOkdXOKHsGl9syOM9qI0fT+bYXXynApySHDIPysJ91QT/loQDVhoGJwm63U1FRwbBhw85Iz9tr9rQ2aEqpM/MWqqqqWLlyZWsB6qTJmqCPNUFfZJW70BjZJ58PnnsBsXMX4U1fYmz/+owK0CE0DWG1IDIz0fr1xTJ0CJYLi5i39jOe+OPr1Ekzrj8dDAZZsGABTzzxREqzc0bkLlq0qN2QJQU0KYmvXz
var chatDoneResponse = await myChat.SendAsync("这张图片的内容是什么?用中文回答!", new string[] { base64img1 }).StreamToEndAsync();
_output.WriteLine(chatDoneResponse);
Assert.NotNull(chatDoneResponse);
Assert.Contains("企鹅", chatDoneResponse);
}
/// <summary>
/// 可复现输出的对话请求 测试
/// </summary>
/// <returns></returns>
[Fact]
public async Task ChatRequest_ReproducibleOutputs_Test()
{
//第一次请求
var myChat1 = new Chat(_ollamaApiClient)
{
Think = false,
Options = new RequestOptions()
{
Seed = 19491001,
Temperature = 0f,
},
};
var doneResponse1 = await myChat1.SendAsync("通常情况,天空是什么颜色?").StreamToEndAsync();
//第二次请求
var myChat2 = new Chat(_ollamaApiClient)
{
Think = false,
Options = new RequestOptions()
{
Seed = 19491001,
Temperature = 0f,
},
};
var doneResponse2 = await myChat2.SendAsync("通常情况,天空是什么颜色?").StreamToEndAsync();
Assert.Equal(doneResponse1, doneResponse2);
_output.WriteLine(doneResponse1);
_output.WriteLine(doneResponse2);
}
/// <summary>
/// 支持工具调用的对话请求 测试
/// </summary>
/// <remarks>
/// 携带工具本地执行结果,再次发送给大模型不
/// </remarks>
/// <returns></returns>
[Fact]
public async Task ChatRequest_WithTools_Test()
{
var chat = new Chat(_ollamaApiClient) {Model = ModelSelecter.ModelWithRawmodel};
List<object> Tools = [new GetWeatherTool(), new GetLatLonAsyncTool(), new GetPopulationTool()];
StringBuilder outputString = new StringBuilder(1000);
//各个步骤被Chat类包装了
await foreach (var answerToken in chat.SendAsync("上海市的天气怎么样?", Tools))
{
outputString.Append(answerToken);
}
_output.WriteLine(outputString.ToString());
}
#endregion
#region 列出本地模型
[Fact]
public async Task List_Local_Models_Test()
{
var models = await _ollamaApiClient.ListLocalModelsAsync();
_output.WriteLine("本地模型有:");
foreach (OllamaSharp.Models.Model model in models)
{
_output.WriteLine(model.Name);
}
_output.WriteLine($"共:{models.Count()} 个");
Assert.NotNull(models);
Assert.True(models.Any());
}
[Theory]
[InlineData(ModelSelecter.ModelWithTool)]
[InlineData(ModelSelecter.ModelWithVision)]
[InlineData(ModelSelecter.ModelWithEmbedding)]
public async Task Exist_Local_Models_Test(string selectedModel)
{
var models = await _ollamaApiClient.ListLocalModelsAsync();
_output.WriteLine("本地模型有:");
foreach (OllamaSharp.Models.Model model in models)
{
_output.WriteLine(model.Name);
}
_output.WriteLine($"共:{models.Count()} 个");
Assert.NotNull(models);
Assert.Contains(models, m => m.Name == selectedModel);
}
#endregion
#region 列出运行中的模型
/// <summary>
/// 列出运行中的模型
/// </summary>
/// <returns></returns>
[Fact]
public async Task List_Running_Models_Test()
{
var models = await _ollamaApiClient.ListRunningModelsAsync();
_output.WriteLine("运行中的模型有:");
foreach (OllamaSharp.Models.Model model in models)
{
_output.WriteLine(model.Name);
}
_output.WriteLine($"共:{models.Count()} 个");
Assert.NotNull(models);
}
/// <summary>
/// 指定模型是否在运行中
/// </summary>
/// <param name="selectedModel"></param>
/// <returns></returns>
[Theory]
[InlineData(ModelSelecter.ModelWithTool)]
public async Task IsRunning_Models_Test(string selectedModel)
{
var models = await _ollamaApiClient.ListRunningModelsAsync();
_output.WriteLine("运行中的模型有:");
foreach (OllamaSharp.Models.Model model in models)
{
_output.WriteLine(model.Name);
}
_output.WriteLine($"共:{models.Count()} 个");
var isRunning = models.Any(m => m.Name == selectedModel);
var message = $"{selectedModel}模型:{(isRunning ? "" : "")}";
_output.WriteLine(message);
Assert.True(true, message);
}
#endregion
#region Ollama服务运行状态
[Fact]
public async Task OllamaServer_IsRunning_Test()
{
var isRunning = await _ollamaApiClient.IsRunningAsync();
var ollamaState = $"Ollama服务{(isRunning ? "" : "")}";
_output.WriteLine(ollamaState);
Assert.True(isRunning, ollamaState);
}
#endregion
#region 版本信息
/// <summary>
/// 获取版本信息
/// </summary>
/// <returns></returns>
[Fact]
public async Task Get_Version_Test()
{
var version = await _ollamaApiClient.GetVersionAsync();
_output.WriteLine($"Version: {version}");
Assert.NotNull(version);
}
#endregion
#region tools 方法
/// <summary>
/// Gets the current weather for a given location.
/// </summary>
/// <param name="location">The location or city to get the weather for</param>
/// <param name="unit">The unit to measure the temperature in</param>
/// <returns>The weather for the given location</returns>
[OllamaTool]
public static string GetWeather(string location, Unit unit) => $"It's cold at only 6° {unit} in {location}.";
/// <summary>
/// Gets the latitude and longitude for a given location.
/// </summary>
/// <param name="location">The location to get the latitude and longitude for</param>
/// <returns>The weather for the given location</returns>
[OllamaTool]
public async static Task<string> GetLatLonAsync(string location)
{
await Task.Delay(200).ConfigureAwait(false);
return $"{new Random().Next(20, 50)}.4711, {new Random().Next(3, 15)}.0815";
}
/// <summary>
/// Gets the amount of people living in a given city
/// </summary>
/// <param name="city">The city to get the population info for</param>
/// <returns>The population of a given city</returns>
[OllamaTool]
public static int GetPopulation(string city) => new Random().Next(1000, 10000000);
public static DateTime GetCurrentTime(string city)
{
return DateTime.Now;
}
#endregion
}
public enum Unit
{
Celsius,
Fahrenheit
}
}