Function Calling 是賦予LLMs應用強大能力的一個很重要的機制,它可以使得LLM模型增強其延伸的能力(但不是LLMs真的自已能調用工具)。
假設我們為 Kernel 掛載一個可以取得天氣資訊的 Plugin
var builder = Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
endpoint: Config.aoai_endpoint,
deploymentName: Config.aoai_deployment,
apiKey: Config.aoai_apiKey);
builder.Plugins.AddFromType<WeatherPlugin>();
在 Semantic Kernel 裡實現 Function Calling 有二種方式
var weather = await kernel.Plugins.GetFunction("WeatherPlugin", "GetWeather").InvokeAsync(kernel);
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
在手動調用的情況,我們可以控制何時調用,取得的結果是什麼,但在自動調用的情況下,何時被調用了?取得的結果內容是什麼?有時候我們可能需要監看或記錄它,以做為後續的改善調整,在 Semantic Kernel 裡提供了一個 IAutoFunctionInvocationFilter 介面可以讓開發者實作
public class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter
{
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
//get function information
Console.WriteLine($"functionName: {context.Function.Name}");
Console.WriteLine($"PluginName: {context.Function.PluginName}");
// Calling next filter in pipeline or function itself.
// By skipping this call, next filters and function won't be invoked, and function call loop will proceed to the next function.
await next(context);
// Example: get function result
Console.WriteLine($"function result: {Regex.Unescape(context.Result.ToString())}");
// Example: override function result value
// context.Result = new FunctionResult(context.Result, "Result from auto function invocation filter");
// Example: Terminate function invocation
//context.Terminate = false;
}
}
builder.Services.AddSingleton<IAutoFunctionInvocationFilter>(new AutoFunctionInvocationFilter());
這樣便可以追蹤監看在 Auto Function Calling 的情況下,調用了哪些 Function,分別得到了什麼結果,甚至可以在 Filter 裡改寫 Function 原始執行後結果。
context.Result = new FunctionResult(context.Result, "Result from auto function invocation filter");
public async Task Run()
{
// use Azure OpenAI
var builder = Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
endpoint: Config.aoai_endpoint,
deploymentName: Config.aoai_deployment,
apiKey: Config.aoai_apiKey);
builder.Plugins.AddFromType<WeatherPlugin>();
builder.Services.AddSingleton<IAutoFunctionInvocationFilter>(new AutoFunctionInvocationFilter());
Kernel kernel = builder.Build();
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var prompt =
"""
The weather is:\n {{WeatherPlugin.GetWeather}}
想去高雄旅行,適合穿什麼衣服?
""";
Console.WriteLine(await kernel.InvokePromptAsync(prompt, new(settings)));
}
public class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter
{
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
//get function information
Console.WriteLine($"functionName: {context.Function.Name}");
Console.WriteLine($"PluginName: {context.Function.PluginName}");
// Calling next filter in pipeline or function itself.
// By skipping this call, next filters and function won't be invoked, and function call loop will proceed to the next function.
await next(context);
// Example: get function result
Console.WriteLine($"function result: {Regex.Unescape(context.Result.ToString())}");
// Example: override function result value
// context.Result = new FunctionResult(context.Result, "Result from auto function invocation filter");
// Example: Terminate function invocation
//context.Terminate = false;
}
}
Hello, World GenAI !
functionName: GetWeather
PluginName: WeatherPlugin
function result: {"\u53F0\u5317":"\u60B6\u71B1, 35\u00B0C","\u53F0\u4E2D":"\u6674\u6717, 32\u00B0C","\u9AD8\u96C4":"\u708E\u71B1, 35\u00B0C","\u53F0\u5357":"\u6674\u6717, 34\u00B0C","\u53F0\u6771":"\u9670\u96E8, 28\u00B0C"}
function result: {"台北":"悶熱, 35°C","台中":"晴朗, 32°C","高雄":"炎熱, 35°C","台南":"晴朗, 34°C","台東":"陰雨, 28°C"}
在高雄的天氣是「晴朗,34°C」。這樣的高溫建議您穿著輕便、透氣的衣物,例如:
- 短袖T恤或襯衫
- 輕薄的短褲或裙子
- 遮陽帽和太陽眼鏡以防曬
記得多補充水分,避免在中午時分長時間暴露在陽光下!希望您在高雄有個愉快的旅程!