LLM Tool Calling
Overview Tool calling refers to the ability of large language models to interface with external tools, enabling them to interact with APIs, fetch real-time data, and perform actions. These tools are functions made accessible to the model. For example, a weather tool might return live temperature data for a city. Cinna’s LLM Gateway supports tool calling in a way that is fully compatible with the OpenAI SDK.
Despite the name, tool calling does not involve the model directly invoking the function. Instead, each tool is described through a schema that includes its name, parameters, and purpose. When a user asks a question requiring tool use, the model stops generating text and returns a structured response with the relevant tool name and parameter values. This allows the external system to run the function and return its result, which the model can then use to complete its answer.
Example Usage
The following script integrates a language model with two tools: one for fetching cryptocurrency prices and one for retrieving weather data.
pythonCopy codefrom openai import OpenAI
import json
from typing import Dict, Any
# Use a model that supports function calling
model_id = "hermes-3-llama3.1-8b"
# Configure the client
client = OpenAI(
api_key="your_user_id#your_api_key",
base_url="https://llm-gateway.cinna.xyz"
)
def get_coin_price(token: str) -> float:
print("calling get_coin_price")
prices = {
"solana": 150.00,
"dogecoin": 0.25,
}
return prices.get(token.lower(), 0.0)
def get_weather(city: str) -> Dict[str, Any]:
print("calling get_weather")
weathers = {
"new york": {"temperature": 20, "condition": "Cloudy"},
"london": {"temperature": 15, "condition": "Rainy"},
"tokyo": {"temperature": 25, "condition": "Sunny"},
}
return weathers.get(city.lower(), {"temperature": 0, "condition": "Unknown"})
def format_price(price: float) -> str:
return f"${price:.2f}"
# Define available tools
tools = [
{
"type": "function",
"function": {
"name": "get_coin_price",
"description": "Get the current price of a specific cryptocurrency in USD",
"parameters": {
"type": "object",
"properties": {
"token": {
"type": "string",
"description": "The name or symbol of the cryptocurrency"
}
},
"required": ["token"]
}
}
},
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather for a specific city",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "The name of the city"
}
},
"required": ["city"]
}
}
}
]
def query_llm_with_tools(prompt: str) -> str:
messages = [{"role": "user", "content": prompt}]
response = client.chat.completions.create(
model=model_id,
messages=messages,
temperature=0.01,
tools=tools,
tool_choice="auto"
)
if response.choices[0].message.tool_calls:
tool_call = response.choices[0].message.tool_calls[0]
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
if function_name == "get_coin_price":
result = get_coin_price(function_args["token"])
tool_response = format_price(result)
elif function_name == "get_weather":
result = get_weather(function_args["city"])
tool_response = f"Temperature: {result['temperature']}°C, Condition: {result['condition']}"
else:
tool_response = "Unknown function"
messages.append(response.choices[0].message)
messages.append({
"role": "tool",
"content": tool_response,
"tool_call_id": tool_call.id
})
final_response = client.chat.completions.create(
model=model_id,
messages=messages,
temperature=0.01
)
return final_response.choices[0].message.content
else:
return response.choices[0].message.content
# Example usage
if __name__ == "__main__":
prompts = [
"What is the current Solana price?",
"How is the weather in Tokyo?",
"Tell me a short story about space travel"
]
for prompt in prompts:
print(f"User: {prompt}")
response = query_llm_with_tools(prompt)
print(f"AI: {response}\n")
Last updated