Use this file to discover all available pages before exploring further.
The Edgee Rust SDK supports OpenAI-compatible function calling (tools), allowing models to request execution of functions you define. This enables models to interact with external APIs, databases, and your application logic.
use serde_json::json;let input = InputObject::new(vec![ Message::user("What is the weather?")]).with_tools(vec![Tool::function(function)]).with_tool_choice(json!({ "type": "function", "function": {"name": "get_weather"}}));let response = client.send("gpt-5.2", input).await?;// Model will always call get_weather
Example - Disable Tool Calls:
use serde_json::json;let input = InputObject::new(vec![ Message::user("What is the weather?")]).with_tools(vec![Tool::function(function)]).with_tool_choice(json!("none"));let response = client.send("gpt-5.2", input).await?;// Model will not call tools, even though they're available
use serde_json;if let Some(tool_calls) = response.tool_calls() { let tool_call = &tool_calls[0]; let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?; // args is now a serde_json::Value println!("Location: {}", args["location"]);}
Here’s a complete end-to-end example with error handling:
use edgee::{Edgee, Message, InputObject, Tool, FunctionDefinition, JsonSchema};use std::collections::HashMap;use serde_json;#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { let client = Edgee::from_env()?; // Define the weather function let function = FunctionDefinition { name: "get_weather".to_string(), description: Some("Get the current weather for a location".to_string()), parameters: JsonSchema { schema_type: "object".to_string(), properties: Some({ let mut props = HashMap::new(); props.insert("location".to_string(), serde_json::json!({ "type": "string", "description": "The city name" })); props.insert("unit".to_string(), serde_json::json!({ "type": "string", "enum": ["celsius", "fahrenheit"], "description": "Temperature unit" })); props }), required: Some(vec!["location".to_string()]), description: None, }, }; // Step 1: Initial request with tools let input = InputObject::new(vec![ Message::user("What is the weather in Paris and Tokyo?") ]) .with_tools(vec![Tool::function(function)]); let response1 = client.send("gpt-5.2", input).await?; // Step 2: Execute all tool calls let mut messages = vec![ Message::user("What is the weather in Paris and Tokyo?") ]; // Add assistant's message if let Some(message) = response1.message() { messages.push(message.clone()); } if let Some(tool_calls) = response1.tool_calls() { for tool_call in tool_calls { let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?; let result = get_weather( args["location"].as_str().unwrap(), args.get("unit").and_then(|v| v.as_str()) ); messages.push(Message::tool( tool_call.id.clone(), serde_json::to_string(&result)? )); } } // Step 3: Send results back let function2 = FunctionDefinition { name: "get_weather".to_string(), description: Some("Get the current weather for a location".to_string()), parameters: JsonSchema { schema_type: "object".to_string(), properties: Some({ let mut props = HashMap::new(); props.insert("location".to_string(), serde_json::json!({ "type": "string", "description": "The city name" })); props.insert("unit".to_string(), serde_json::json!({ "type": "string", "enum": ["celsius", "fahrenheit"] })); props }), required: Some(vec!["location".to_string()]), description: None, }, }; let input2 = InputObject::new(messages) .with_tools(vec![Tool::function(function2)]); let response2 = client.send("gpt-5.2", input2).await?; println!("{}", response2.text().unwrap_or("")); Ok(())}fn get_weather(location: &str, unit: Option<&str>) -> serde_json::Value { serde_json::json!({ "location": location, "temperature": 15, "unit": unit.unwrap_or("celsius"), "condition": "sunny" })}
Example - Multiple Tools:You can provide multiple tools and let the model choose which ones to call:
let get_weather_tool = Tool::function(get_weather_function);let send_email_tool = Tool::function(send_email_function);let input = InputObject::new(vec![ Message::user("Get the weather in Paris and send an email about it")]).with_tools(vec![get_weather_tool, send_email_tool]);let response = client.send("gpt-5.2", input).await?;
use tokio_stream::StreamExt;let input = InputObject::new(vec![ Message::user("What is the weather in Paris?")]).with_tools(vec![Tool::function(function)]);let mut stream = client.stream("gpt-5.2", input).await?;while let Some(result) = stream.next().await { match result { Ok(chunk) => { if let Some(text) = chunk.text() { print!("{}", text); } // Check for tool calls in the delta if let Some(choice) = chunk.choices.first() { if let Some(tool_calls) = &choice.delta.tool_calls { println!("\nTool calls detected: {:?}", tool_calls); } } if chunk.finish_reason() == Some("tool_calls") { println!("\nModel requested tool calls"); } } Err(e) => eprintln!("Stream error: {}", e), }}
Models can request multiple tool calls in a single response. Use parallel execution when possible:
use futures::future;if let Some(tool_calls) = response.tool_calls() { // Execute all tool calls in parallel let results: Vec<_> = future::join_all( tool_calls.iter().map(|tool_call| { let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?; let result = execute_function(&tool_call.function.name, &args)?; Ok((tool_call.id.clone(), result)) }) ).await; // Add all tool results to messages for (tool_call_id, result) in results { messages.push(Message::tool( tool_call_id, serde_json::to_string(&result)? )); }}
Example - Handling Multiple Tool Calls:
// Step 2: Execute all tool callslet mut messages = vec![ Message::user("What is the weather in Paris and Tokyo?"),];if let Some(message) = response1.message() { messages.push(message.clone());}if let Some(tool_calls) = response1.tool_calls() { for tool_call in tool_calls { let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?; let result = get_weather( args["location"].as_str().unwrap(), args.get("unit").and_then(|v| v.as_str()) ); messages.push(Message::tool( tool_call.id.clone(), serde_json::to_string(&result)? )); }}
Include tools in follow-up requests so the model can call them again if needed:
let input2 = InputObject::new(messages_with_tool_results) .with_tools(vec![ // Keep the same tools available Tool::function(function) ]);let response2 = client.send("gpt-5.2", input2).await?;
Example - Checking for Tool Calls:
if let Some(tool_calls) = response.tool_calls() { // Model wants to call a function for tool_call in tool_calls { println!("Function: {}", tool_call.function.name); println!("Arguments: {}", tool_call.function.arguments); }}
Example - Executing Functions and Sending Results:
// Execute the functionif let Some(tool_calls) = response.tool_calls() { let tool_call = &tool_calls[0]; let args: serde_json::Value = serde_json::from_str(&tool_call.function.arguments)?; let weather_result = get_weather( args["location"].as_str().unwrap(), args.get("unit").and_then(|v| v.as_str()) ); // Send the result back let mut messages = vec![ Message::user("What is the weather in Paris?"), ]; // Include assistant's message with tool_calls if let Some(message) = response.message() { messages.push(message.clone()); } messages.push(Message::tool( tool_call.id.clone(), serde_json::to_string(&weather_result)? )); let input2 = InputObject::new(messages) .with_tools(vec![Tool::function(function)]); let response2 = client.send("gpt-5.2", input2).await?; println!("{}", response2.text().unwrap_or("")); // "The weather in Paris is 15°C and sunny."}