function_call_openai/
function_call_openai.rs

1use ai_lib::types::common::Content;
2use ai_lib::types::function_call::{FunctionCallPolicy, Tool};
3use ai_lib::{AiClient, ChatCompletionRequest, Message, Provider, Role};
4use serde_json::json;
5
6#[tokio::main]
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8    println!("🔧 OpenAI Function Calling example (ai-lib)");
9
10    // Ensure OPENAI_API_KEY is set in env before running
11    let client = AiClient::new(Provider::OpenAI)?;
12
13    // Build a simple user message
14    let user_msg = Message {
15        role: Role::User,
16        content: Content::Text("Please call the ascii_horse tool with size=3".to_string()),
17        function_call: None,
18    };
19
20    // Define a Tool (JSON Schema for parameters)
21    let ascii_horse_tool = Tool {
22        name: "ascii_horse".to_string(),
23        description: Some("Draws an ASCII horse of given size".to_string()),
24        parameters: Some(json!({
25            "type": "object",
26            "properties": {
27                "size": { "type": "integer", "description": "Size of the horse" }
28            },
29            "required": ["size"]
30        })),
31    };
32
33    let mut req = ChatCompletionRequest::new("gpt-4o-mini".to_string(), vec![user_msg]);
34    req.functions = Some(vec![ascii_horse_tool]);
35    req.function_call = Some(FunctionCallPolicy::Auto("auto".to_string()));
36    req = req.with_max_tokens(200).with_temperature(0.0);
37
38    println!("📤 Sending request to OpenAI (model={})", req.model);
39
40    let resp = client.chat_completion(req).await?;
41
42    // Handle a possible function call from the model: execute locally and send the result back
43    for choice in resp.choices {
44        let msg = choice.message;
45        if let Some(fc) = msg.function_call {
46            println!("🛠️  Model invoked function: {}", fc.name);
47            let args = fc.arguments.unwrap_or(serde_json::json!(null));
48            println!("   arguments: {}", args);
49
50            // Simple local tool: ascii_horse
51            if fc.name == "ascii_horse" {
52                // Parse size param
53                let size = args.get("size").and_then(|v| v.as_i64()).unwrap_or(3) as usize;
54                let horse = generate_ascii_horse(size);
55                println!("⚙️ Executed ascii_horse locally, output:\n{}", horse);
56
57                // Send follow-up message with tool result as assistant message
58                let tool_msg = Message {
59                    role: Role::Assistant,
60                    content: Content::Text(horse.clone()),
61                    function_call: None,
62                };
63
64                let mut followup =
65                    ChatCompletionRequest::new("gpt-4o-mini".to_string(), vec![tool_msg]);
66                followup = followup.with_max_tokens(200).with_temperature(0.0);
67                let follow_resp = client.chat_completion(followup).await?;
68                for fc_choice in follow_resp.choices {
69                    println!(
70                        "🗨️ Final model response: {}",
71                        fc_choice.message.content.as_text()
72                    );
73                }
74            }
75        } else {
76            println!("💬 Model message: {}", msg.content.as_text());
77        }
78    }
79
80    Ok(())
81}
82
83fn generate_ascii_horse(size: usize) -> String {
84    let mut out = String::new();
85    let s = std::cmp::max(1, size);
86    for _ in 0..s {
87        out.push_str("  \\ \\__\\\n");
88    }
89    out.push_str(" (horse)\n");
90    out
91}