tool_calling/
tool_calling.rs

1#![allow(clippy::uninlined_format_args)]
2//! Modern tool/function calling example with streaming support.
3//!
4//! This example demonstrates:
5//! - Function tool definition with parameters
6//! - Tool calling in chat completions
7//! - Handling tool responses
8//! - Streaming with tool calls
9//! - Error handling for tool execution
10//!
11//! Run with: `cargo run --example tool_calling`
12
13use openai_ergonomic::{
14    builders::chat::tool_function,
15    responses::{chat::ToolCallExt, ToolChoiceHelper},
16    Client, Result,
17};
18// Note: Complex message types are commented out for simplification
19// use openai_client_base::models::{
20//     ChatCompletionRequestAssistantMessage, ChatCompletionRequestMessage,
21//     ChatCompletionRequestToolMessage, ChatCompletionRequestUserMessage,
22// };
23use serde::{Deserialize, Serialize};
24use serde_json::json;
25
26#[derive(Debug, Serialize, Deserialize)]
27struct WeatherParams {
28    location: String,
29    unit: Option<String>,
30}
31
32#[derive(Debug, Serialize)]
33struct WeatherResponse {
34    temperature: i32,
35    unit: String,
36    description: String,
37}
38
39fn get_weather_tool() -> openai_client_base::models::ChatCompletionTool {
40    tool_function(
41        "get_weather",
42        "Get the current weather in a given location",
43        json!({
44            "type": "object",
45            "properties": {
46                "location": {
47                    "type": "string",
48                    "description": "The city and state, e.g. San Francisco, CA"
49                },
50                "unit": {
51                    "type": "string",
52                    "enum": ["celsius", "fahrenheit"],
53                    "description": "The temperature unit to use"
54                }
55            },
56            "required": ["location"]
57        }),
58    )
59}
60
61fn get_time_tool() -> openai_client_base::models::ChatCompletionTool {
62    tool_function(
63        "get_current_time",
64        "Get the current time in a specific timezone",
65        json!({
66            "type": "object",
67            "properties": {
68                "timezone": {
69                    "type": "string",
70                    "description": "The timezone, e.g. America/New_York"
71                }
72            },
73            "required": ["timezone"]
74        }),
75    )
76}
77
78fn execute_weather_function(params: WeatherParams) -> Result<String> {
79    // Simulated weather API call
80    let response = WeatherResponse {
81        temperature: 72,
82        unit: params.unit.unwrap_or_else(|| "fahrenheit".to_string()),
83        description: format!("Sunny in {}", params.location),
84    };
85
86    Ok(serde_json::to_string(&response)?)
87}
88
89fn execute_time_function(timezone: &str) -> String {
90    // Simulated time API call
91    format!("Current time in {}: 2:30 PM", timezone)
92}
93
94#[tokio::main]
95async fn main() -> Result<()> {
96    // Initialize client from environment
97    let client = Client::from_env()?.build();
98
99    println!("=== Tool Calling Example ===\n");
100
101    // Example 1: Simple tool call
102    println!("1. Simple Tool Call:");
103    simple_tool_call(&client).await?;
104
105    // Example 2: Multiple tools
106    println!("\n2. Multiple Tools:");
107    multiple_tools(&client).await?;
108
109    // Example 3: Tool choice control
110    println!("\n3. Tool Choice Control:");
111    tool_choice_control(&client).await?;
112
113    // Example 4: Conversation with tool calls
114    println!("\n4. Conversation with Tool Calls:");
115    conversation_with_tools(&client).await?;
116
117    // Example 5: Streaming with tools (simplified)
118    println!("\n5. Streaming with Tools (Simplified):");
119    streaming_with_tools(&client);
120
121    // Example 6: Parallel tool calls (simplified)
122    println!("\n6. Parallel Tool Calls (Simplified):");
123    parallel_tool_calls(&client).await?;
124
125    Ok(())
126}
127
128async fn simple_tool_call(client: &Client) -> Result<()> {
129    let builder = client
130        .chat()
131        .user("What's the weather like in San Francisco?")
132        .tools(vec![get_weather_tool()]);
133    let response = client.send_chat(builder).await?;
134
135    // Check for tool calls
136    let tool_calls = response.tool_calls();
137    if !tool_calls.is_empty() {
138        for tool_call in tool_calls {
139            println!("Tool called: {}", tool_call.function_name());
140            println!("Arguments: {}", tool_call.function_arguments());
141
142            // Execute the function
143            let params: WeatherParams = serde_json::from_str(tool_call.function_arguments())?;
144            let result = execute_weather_function(params)?;
145            println!("Function result: {}", result);
146        }
147    }
148
149    Ok(())
150}
151
152async fn multiple_tools(client: &Client) -> Result<()> {
153    let builder = client
154        .chat()
155        .user("What's the weather in NYC and what time is it there?")
156        .tools(vec![get_weather_tool(), get_time_tool()]);
157    let response = client.send_chat(builder).await?;
158
159    for tool_call in response.tool_calls() {
160        match tool_call.function_name() {
161            "get_weather" => {
162                let params: WeatherParams = serde_json::from_str(tool_call.function_arguments())?;
163                let result = execute_weather_function(params)?;
164                println!("Weather result: {}", result);
165            }
166            "get_current_time" => {
167                let params: serde_json::Value =
168                    serde_json::from_str(tool_call.function_arguments())?;
169                if let Some(timezone) = params["timezone"].as_str() {
170                    let result = execute_time_function(timezone);
171                    println!("Time result: {}", result);
172                }
173            }
174            _ => println!("Unknown tool: {}", tool_call.function_name()),
175        }
176    }
177
178    Ok(())
179}
180
181async fn tool_choice_control(client: &Client) -> Result<()> {
182    // Force specific tool
183    println!("Forcing weather tool:");
184    let builder = client
185        .chat()
186        .user("Tell me about Paris")
187        .tools(vec![get_weather_tool(), get_time_tool()])
188        .tool_choice(ToolChoiceHelper::specific("get_weather"));
189    let response = client.send_chat(builder).await?;
190
191    for tool_call in response.tool_calls() {
192        println!("Forced tool: {}", tool_call.function_name());
193    }
194
195    // Disable tools
196    println!("\nDisabling tools:");
197    let builder = client
198        .chat()
199        .user("What's the weather?")
200        .tools(vec![get_weather_tool()])
201        .tool_choice(ToolChoiceHelper::none());
202    let response = client.send_chat(builder).await?;
203
204    if let Some(content) = response.content() {
205        println!("Response without tools: {}", content);
206    }
207
208    Ok(())
209}
210
211async fn conversation_with_tools(client: &Client) -> Result<()> {
212    // This example demonstrates proper multi-turn tool calling with full message history
213
214    println!("=== Conversation with Tools (Full Implementation) ===");
215
216    // Initialize the conversation
217    let mut builder = client
218        .chat()
219        .user("What's the weather in Tokyo?")
220        .tools(vec![get_weather_tool()]);
221
222    // First request - the model will call the tool
223    let response = client.send_chat(builder.clone()).await?;
224
225    // Check for tool calls
226    let tool_calls = response.tool_calls();
227    if !tool_calls.is_empty() {
228        println!("Step 1: Model requests tool call");
229        for tool_call in &tool_calls {
230            println!("  Tool: {}", tool_call.function_name());
231            println!("  Args: {}", tool_call.function_arguments());
232        }
233
234        // IMPORTANT: Add the assistant's response (with tool calls) to the history
235        // This is the key step for maintaining proper conversation context!
236        builder = builder.assistant_with_tool_calls(
237            response.content().unwrap_or(""),
238            tool_calls.iter().map(|tc| (*tc).clone()).collect(),
239        );
240
241        // Execute the tools and add results
242        println!("\nStep 2: Execute tools and add results to conversation");
243        for tool_call in tool_calls {
244            let params: WeatherParams = serde_json::from_str(tool_call.function_arguments())?;
245            let result = execute_weather_function(params)?;
246            println!("  Tool result: {}", result);
247
248            // Add the tool result to the conversation history
249            builder = builder.tool(tool_call.id(), result);
250        }
251
252        // Send the follow-up request with tool results
253        println!("\nStep 3: Send follow-up request with tool results");
254        let final_response = client
255            .send_chat(builder.tools(vec![get_weather_tool()]))
256            .await?;
257
258        if let Some(content) = final_response.content() {
259            println!("  Final assistant response: {}", content);
260        }
261    }
262
263    println!("\nNote: This demonstrates the complete tool calling loop with proper");
264    println!("message history management using assistant_with_tool_calls()");
265
266    Ok(())
267}
268
269fn streaming_with_tools(_client: &Client) {
270    println!("Streaming response with tools:");
271
272    // Note: Streaming with tool calls is more complex and requires
273    // proper handling of partial tool call chunks. For now, this is
274    // a placeholder showing the concept.
275
276    println!("This would demonstrate streaming tool calls if streaming API was available");
277    println!("In streaming mode, tool calls would arrive as chunks that need to be assembled");
278}
279
280async fn parallel_tool_calls(client: &Client) -> Result<()> {
281    let builder = client
282        .chat()
283        .user("Check the weather in Tokyo, London, and New York")
284        .tools(vec![get_weather_tool()]);
285    let response = client.send_chat(builder).await?;
286
287    // Modern models can call multiple tools in parallel
288    let tool_calls = response.tool_calls();
289    println!("Parallel tool calls: {}", tool_calls.len());
290
291    // Collect arguments first to avoid lifetime issues
292    let args_vec: Vec<String> = tool_calls
293        .iter()
294        .map(|tc| tc.function_arguments().to_string())
295        .collect();
296
297    // Execute all in parallel using tokio
298    let mut handles = Vec::new();
299    for args in args_vec {
300        let handle = tokio::spawn(async move {
301            let params: WeatherParams = serde_json::from_str(&args)?;
302            execute_weather_function(params)
303        });
304        handles.push(handle);
305    }
306
307    // Wait for all results
308    for (i, handle) in handles.into_iter().enumerate() {
309        match handle.await {
310            Ok(Ok(result)) => println!("Location {}: {}", i + 1, result),
311            Ok(Err(e)) => println!("Location {} error: {}", i + 1, e),
312            Err(e) => println!("Task {} panicked: {}", i + 1, e),
313        }
314    }
315
316    Ok(())
317}