函数调用(Function calling)可以极大地增强大语言模型的功能,可以增强推理效果或进行其他外部操作,包括信息检索、数据库操作、知识图谱搜索与推理、操作系统、触发外部操作等工具调用场景。在设计各类bot或agent时非常有用。
函数调用图示
如上所示,在第二步中,可以提供特定的接口函数(也就是外部API),额外增加特定的数据以增强大语言模型的能力。在特定的工作流中,比如查询天气,查询车次等,大语言模型都能够办到啰!
下面我使用的是OpenAI中的示例,进行测试后,得到的结果和代码,大家可以参考。
import OpenAI from "openai"
import dotEnv from "dotenv"
dotEnv.config()
const apiKey = process.env.API_KEY
const openai = new OpenAI({
apiKey: apiKey
})
// Example dummy function hard coded to return the same weather
// In production, this could be your backend API or an external API
function getCurrentWeather(location, unit = "fahrenheit") {
if (location.toLowerCase().includes("tokyo")) {
return JSON.stringify({ location: "Tokyo", temperature: "10", unit: "celsius" });
} else if (location.toLowerCase().includes("san francisco")) {
return JSON.stringify({ location: "San Francisco", temperature: "72", unit: "fahrenheit" });
} else if (location.toLowerCase().includes("paris")) {
return JSON.stringify({ location: "Paris", temperature: "22", unit: "fahrenheit" });
} else {
return JSON.stringify({ location, temperature: "unknown" });
}
}
async function runConversation() {
// Step 1: send the conversation and available functions to the model
const messages = [
{ role: "user", content: "What's the weather like in San Francisco, Tokyo, and Paris?" },
];
const tools = [
{
type: "function",
function: {
name: "get_current_weather",
description: "Get the current weather in a given location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city and state, e.g. San Francisco, CA",
},
unit: { type: "string", enum: ["celsius", "fahrenheit"] },
},
required: ["location"],
},
},
},
];
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0125",
messages: messages,
tools: tools,
tool_choice: "auto", // auto is default, but we'll be explicit
});
const responseMessage = response.choices[0].message;
console.log(111, "response", response)
/*
{
id: 'chatcmpl-90oyNdEDQSxsvpMpwxZ3KeYShovtH',
object: 'chat.completion',
created: 1709982967,
model: 'gpt-3.5-turbo-0125',
choices: [
{
index: 0,
message: [Object],
logprobs: null,
finish_reason: 'tool_calls'
}
],
usage: { prompt_tokens: 88, completion_tokens: 77, total_tokens: 165 },
system_fingerprint: 'fp_4f0b692a78'
}
*/
console.log(123, "responseMessage", responseMessage)
/* promptX
{
role: 'assistant',
content: null,
tool_calls: [
{
id: 'call_0QMDlATqYjBjgdySQVCL0PI4',
type: 'function',
function: [Object]
},
{
id: 'call_e8ceipN3i2OUiF3Z38qNEEs5',
type: 'function',
function: [Object]
},
{
id: 'call_cHlSgOIcsOXwpiOpzHIev6vF',
type: 'function',
function: [Object]
}
]
}
*/
//console.log(396, "function", responseMessage.tool_calls[0].function)
/*
{
name: 'get_current_weather',
arguments: '{"location": "San Francisco", "unit": "celsius"}'
}
*/
// Step 2: check if the model wanted to call a function
const toolCalls = responseMessage.tool_calls
if (responseMessage.tool_calls) {
// call the function
// Note: the JSON response may not always be valid; be sure to handle errors
const availableFunctions = {
get_current_weather: getCurrentWeather,
}; // only one function in this example, but you can have multiple
messages.push(responseMessage); // extend conversation with assistant's reply
console.log(225,"messages", messages)
/*
[{
role: 'user',
content: "What's the weather like in San Francisco, Tokyo, and Paris?"
},
{
role: 'assistant',
content: null,
tool_calls: [ [Object], [Object], [Object] ]
}]
*/
for (const toolCall of toolCalls) {
const functionName = toolCall.function.name;
const functionToCall = availableFunctions[functionName];
const functionArgs = JSON.parse(toolCall.function.arguments);
const functionResponse = functionToCall(
functionArgs.location,
functionArgs.unit
);
messages.push({
tool_call_id: toolCall.id,
role: "tool",
name: functionName,
content: functionResponse,
}); // extend conversation with function response
}
console.log(365,"messages2", messages)
/*
[{
role: 'user',
content: "What's the weather like in San Francisco, Tokyo, and Paris?"
},
{
role: 'assistant',
content: null,
tool_calls: [ [Object], [Object], [Object] ]
},
{
tool_call_id: 'call_0QMDlATqYjBjgdySQVCL0PI4',
role: 'tool',
name: 'get_current_weather',
content: '{"location":"San Francisco","temperature":"72","unit":"fahrenheit"}'
},
{
tool_call_id: 'call_e8ceipN3i2OUiF3Z38qNEEs5',
role: 'tool',
name: 'get_current_weather',
content: '{"location":"Tokyo","temperature":"10","unit":"celsius"}'
},
{
tool_call_id: 'call_cHlSgOIcsOXwpiOpzHIev6vF',
role: 'tool',
name: 'get_current_weather',
content: '{"location":"Paris","temperature":"22","unit":"fahrenheit"}'
}]
*/
//Step 3: get a new response from the model where it can see the function response
const secondResponse = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0125",
messages: messages,
});
console.log(569, secondResponse)
return secondResponse.choices;
}
}
runConversation().then(console.log).catch(console.error);
经过三步之后会得到最终的结果,目前来看效果还不错!其实函数调用(Function calling)和外挂向量数据库有类似的地方,都是要给大语言模型提供额外的信息以得到更为准确的结果。 如果你正在开发bot或agent,那就用起来吧,它会使大语言模型的能力得到拓展!