Skip to main content

function_calling/
function_calling.rs

1use gemini_client_api::gemini::ask::Gemini;
2use gemini_client_api::gemini::types::request::Tool;
3use gemini_client_api::gemini::types::sessions::Session;
4use gemini_client_api::gemini::utils::{GeminiSchema, execute_function_calls, gemini_function};
5use std::env;
6use std::error::Error;
7
8/// This function will be made available to Gemini.
9/// The doc comments are used as descriptions for the tool.
10#[gemini_function]
11/// Returns the result of adding two numbers.
12fn add_numbers(
13    /// The first number.
14    a: f64,
15    /// The second number.
16    b: f64,
17) -> f64 {
18    println!("[Executing Tool] adding {} + {}", a, b);
19    a + b
20}
21
22#[gemini_function]
23/// Function to get the current temperature.
24fn get_temperature(location: String) -> Result<String, &'static str> {
25    println!("[Executing Tool] getting temperature for {}", location);
26    Err("API is out of service")
27}
28
29#[tokio::main]
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34    // 1. Initialize Gemini and register tools
35    let ai =
36        Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
37            vec![
38                add_numbers::gemini_schema(),
39                get_temperature::gemini_schema(),
40            ],
41        )]);
42
43    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
44    println!("User: {}\n", prompt);
45
46    // 2. Ask Gemini. It might reply with one or more function calls.
47    let mut response = ai.ask(session.ask(prompt)).await?;
48
49    // 3. Loop to handle potential multiple rounds of function calls
50    loop {
51        if response.get_chat().has_function_call() {
52            println!("Gemini requested function calls...");
53
54            // 4. Use the macro to execute all requested calls and update the session
55            let results = execute_function_calls!(session, add_numbers, get_temperature);
56
57            for (idx, res) in results.iter().enumerate() {
58                if let Some(r) = res {
59                    println!("  Call #{} result: {:?}", idx, r);
60                }
61            }
62
63            // 5. Send the results back to Gemini to get the final natural language response
64            response = ai.ask(&mut session).await?;
65        } else {
66            // No more function calls, show the final response
67            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
68            break;
69        }
70    }
71
72    Ok(())
73}
74
75#[tokio::test]
76async fn handle_manually() {
77    let mut session = Session::new(10);
78    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
79
80    // 1. Initialize Gemini and register tools
81    let ai =
82        Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
83            vec![
84                add_numbers::gemini_schema(),
85                get_temperature::gemini_schema(),
86            ],
87        )]);
88
89    println!("--- Function Calling Example ---");
90    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
91    println!("User: {}\n", prompt);
92
93    // 2. Ask Gemini. It might reply with one or more function calls.
94    let mut response = ai.ask(session.ask(prompt)).await?;
95
96    // 3. Loop to handle potential multiple rounds of function calls
97    loop {
98        if response.get_chat().has_function_call() {
99            println!("Gemini requested function calls...");
100
101            // 4. Use the macro to execute all requested calls and update the session
102            let _ = execute_function_calls!(session, add_numbers);
103
104            for call in response.get_chat().get_function_calls() {
105                if call.name() == "get_temperature" {
106                    let (location,) =
107                        get_temperature::parse_arguments(call.args().as_ref().unwrap())
108                            .expect("Gemini responded with wrong argument format");
109
110                    println!("[Executing call] getting temperature for {}", location);
111                    session // Note: You must update session manually
112                        .add_function_response(
113                            "get_temperature",
114                            format!("temperature of {location} is 38 degree Celsius"),
115                        )
116                        .unwrap();
117                }
118            }
119
120            // 5. Send the results back to Gemini to get the final natural language response
121            response = ai.ask(&mut session).await?;
122        } else {
123            // No more function calls, show the final response
124            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
125            break;
126        }
127    }
128}