Overview
Function calling allows models to:- Determine when to call a function
- Extract parameters from user input
- Return structured function calls
- Integrate with external systems
Smart Integration
Models decide when and how to call your functions
Parameter Extraction
Automatically extract arguments from natural language
Type Safety
Ensure correct parameter types and validation
Multi-step Workflows
Chain multiple function calls together
Basic Example
Copy
import os
from openai import OpenAI
client = OpenAI(
api_key=os.environ["CHEAPESTINFERENCE_API_KEY"],
base_url="https://api.cheapestinference.ai/v1",
)
# Define function schema
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather for a location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City name, e.g. San Francisco"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "Temperature unit"
}
},
"required": ["location"]
}
}
}
]
# Make request
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "What's the weather in Paris?"}],
tools=tools,
tool_choice="auto"
)
# Check if model wants to call function
if response.choices[0].message.tool_calls:
tool_call = response.choices[0].message.tool_calls[0]
function_name = tool_call.function.name
arguments = json.loads(tool_call.function.arguments)
print(f"Function: {function_name}")
print(f"Arguments: {arguments}")
# Output: Function: get_weather
# Output: Arguments: {'location': 'Paris', 'unit': 'celsius'}
Complete Workflow
Here’s a full example with function execution:Copy
import json
def get_weather(location: str, unit: str = "celsius") -> dict:
"""Actual implementation of weather fetching"""
# Your API call here
return {
"location": location,
"temperature": 22,
"unit": unit,
"condition": "sunny"
}
# Step 1: Initial request
messages = [{"role": "user", "content": "What's the weather in Tokyo?"}]
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=messages,
tools=tools,
tool_choice="auto"
)
# Step 2: Check for function call
if response.choices[0].message.tool_calls:
# Add assistant's response to messages
messages.append(response.choices[0].message)
# Execute each function call
for tool_call in response.choices[0].message.tool_calls:
function_name = tool_call.function.name
arguments = json.loads(tool_call.function.arguments)
# Call the actual function
if function_name == "get_weather":
result = get_weather(**arguments)
# Add function result to messages
messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"name": function_name,
"content": json.dumps(result)
})
# Step 3: Get final response with function results
final_response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=messages
)
print(final_response.choices[0].message.content)
# Output: "The weather in Tokyo is sunny with a temperature of 22°C."
Multiple Functions
Define multiple tools for complex workflows:Copy
tools = [
{
"type": "function",
"function": {
"name": "search_products",
"description": "Search for products in the catalog",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string"},
"category": {"type": "string"},
"max_price": {"type": "number"}
},
"required": ["query"]
}
}
},
{
"type": "function",
"function": {
"name": "get_product_details",
"description": "Get detailed information about a specific product",
"parameters": {
"type": "object",
"properties": {
"product_id": {"type": "string"}
},
"required": ["product_id"]
}
}
},
{
"type": "function",
"function": {
"name": "check_inventory",
"description": "Check product availability",
"parameters": {
"type": "object",
"properties": {
"product_id": {"type": "string"},
"store_location": {"type": "string"}
},
"required": ["product_id"]
}
}
}
]
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "Find wireless headphones under $100"}],
tools=tools
)
Tool Choice Options
Control when functions are called:Auto (Default)
Model decides whether to call functions:Copy
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "Hello!"}],
tools=tools,
tool_choice="auto" # Model may or may not call functions
)
Required
Force model to call a function:Copy
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "Get weather"}],
tools=tools,
tool_choice="required" # Must call at least one function
)
Specific Function
Force a specific function:Copy
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "Paris"}],
tools=tools,
tool_choice={"type": "function", "function": {"name": "get_weather"}}
)
None
Disable function calling:Copy
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "What's the weather?"}],
tools=tools,
tool_choice="none" # Never call functions
)
Parallel Function Calling
Models can call multiple functions simultaneously:Copy
tools = [
{"type": "function", "function": {"name": "get_weather", ...}},
{"type": "function", "function": {"name": "get_time", ...}},
{"type": "function", "function": {"name": "get_news", ...}}
]
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[{"role": "user", "content": "Get weather, time, and news for NYC"}],
tools=tools,
parallel_tool_calls=True
)
# Model may return multiple tool calls
for tool_call in response.choices[0].message.tool_calls:
print(f"Calling {tool_call.function.name}")
Common Use Cases
Database Queries
Copy
tools = [
{
"type": "function",
"function": {
"name": "query_database",
"description": "Query the customer database",
"parameters": {
"type": "object",
"properties": {
"table": {"type": "string"},
"filters": {"type": "object"},
"limit": {"type": "integer"}
},
"required": ["table"]
}
}
}
]
def query_database(table, filters=None, limit=10):
# Execute database query
return results
API Integration
Copy
tools = [
{
"type": "function",
"function": {
"name": "send_email",
"description": "Send an email",
"parameters": {
"type": "object",
"properties": {
"to": {"type": "string"},
"subject": {"type": "string"},
"body": {"type": "string"}
},
"required": ["to", "subject", "body"]
}
}
}
]
Calculations
Copy
tools = [
{
"type": "function",
"function": {
"name": "calculate",
"description": "Perform mathematical calculations",
"parameters": {
"type": "object",
"properties": {
"expression": {
"type": "string",
"description": "Math expression, e.g. '2 + 2 * 3'"
}
},
"required": ["expression"]
}
}
}
]
def calculate(expression: str) -> float:
return eval(expression) # Use safely in production!
Best Practices
Write clear descriptions
Write clear descriptions
- Explain what the function does
- Describe when to use it
- Provide parameter examples
- Be specific and concise
Define precise schemas
Define precise schemas
- Use appropriate types
- Set required fields
- Add enums for categorical values
- Include parameter descriptions
Handle errors gracefully
Handle errors gracefully
- Validate function arguments
- Return error messages clearly
- Allow model to retry with corrections
- Log failures for debugging
Optimize performance
Optimize performance
- Cache function results when possible
- Use parallel calls for independent operations
- Set reasonable timeouts
- Batch similar operations
Advanced Patterns
Conversational Agent
Copy
def run_agent(user_message):
messages = [{"role": "user", "content": user_message}]
while True:
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=messages,
tools=tools,
tool_choice="auto"
)
assistant_message = response.choices[0].message
messages.append(assistant_message)
# If no tool calls, we're done
if not assistant_message.tool_calls:
return assistant_message.content
# Execute tool calls
for tool_call in assistant_message.tool_calls:
result = execute_function(tool_call)
messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"name": tool_call.function.name,
"content": json.dumps(result)
})
Chain of Thought with Tools
Copy
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=[
{
"role": "system",
"content": "Think step by step and use tools when needed"
},
{
"role": "user",
"content": "Plan a trip to Paris for 3 days"
}
],
tools=tools
)
Error Recovery
Copy
def execute_with_retry(messages, tools, max_retries=3):
for attempt in range(max_retries):
response = client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
messages=messages,
tools=tools
)
if response.choices[0].message.tool_calls:
try:
# Execute functions
results = execute_tools(response.choices[0].message.tool_calls)
return results
except Exception as e:
# Add error to context and retry
messages.append({
"role": "system",
"content": f"Error: {str(e)}. Please try again."
})
else:
return response.choices[0].message.content
raise Exception("Max retries exceeded")
Security Considerations
Always validate and sanitize function parameters before execution. Never execute arbitrary code from model outputs.
Copy
def safe_execute_function(function_name, arguments):
# Whitelist allowed functions
allowed_functions = ["get_weather", "search_products"]
if function_name not in allowed_functions:
raise ValueError(f"Function {function_name} not allowed")
# Validate arguments
if not isinstance(arguments, dict):
raise ValueError("Arguments must be a dictionary")
# Execute with timeout
try:
with timeout(5): # 5 second timeout
return globals()[function_name](**arguments)
except Exception as e:
return {"error": str(e)}