Skip to main content

function_calling/
function_calling.rs

1use gemini_client_api::gemini::ask::Gemini;
2use gemini_client_api::gemini::types::request::Tool;
3use gemini_client_api::gemini::types::sessions::Session;
4use gemini_client_api::gemini::utils::{GeminiSchema, execute_function_calls, gemini_function};
5use std::env;
6use std::error::Error;
7
8/// This function will be made available to Gemini.
9/// The doc comments are used as descriptions for the tool.
10#[gemini_function]
11/// Returns the result of adding two numbers.
12fn add_numbers(
13    /// The first number.
14    a: f64,
15    /// The second number.
16    b: f64,
17) -> f64 {
18    println!("[Executing Tool] adding {} + {}", a, b);
19    a + b
20}
21
22#[gemini_function]
23/// Function to get the current temperature.
24fn get_temperature(location: String) -> Result<String, &'static str> {
25    println!("[Executing Tool] getting temperature for {}", location);
26    Err("API is out of service")
27}
28
29#[tokio::main]
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34    // 1. Initialize Gemini and register tools
35    let ai =
36        Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
37            vec![
38                add_numbers::gemini_schema(),
39                get_temperature::gemini_schema(),
40            ],
41        )]);
42
43    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
44    println!("User: {}\n", prompt);
45
46    // 2. Ask Gemini. It might reply with one or more function calls.
47    let mut response = ai.ask(session.ask(prompt)).await?;
48
49    // 3. Loop to handle potential multiple rounds of function calls
50    loop {
51        if response.get_chat().has_function_call() {
52            println!("Gemini requested function calls...");
53
54            // 4. Use the macro to execute all requested calls and update the session
55            let results = execute_function_calls!(session, add_numbers, get_temperature);
56
57            for (idx, res) in results.iter().enumerate() {
58                if let Some(r) = res {
59                    println!("  Call #{} result: {:?}", idx, r);
60                }
61            }
62
63            // 5. Send the results back to Gemini to get the final natural language response
64            response = ai.ask(&mut session).await?;
65        } else {
66            // No more function calls, show the final response
67            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
68            break;
69        }
70    }
71
72    Ok(())
73}
74
75#[tokio::test]
76async fn handle_manually() {
77    use gemini_client_api::gemini::types::request::PartType;
78    let mut session = Session::new(10);
79    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
80
81    // 1. Initialize Gemini and register tools
82    let ai =
83        Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
84            vec![
85                add_numbers::gemini_schema(),
86                get_temperature::gemini_schema(),
87            ],
88        )]);
89
90    println!("--- Function Calling Example ---");
91    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
92    println!("User: {}\n", prompt);
93
94    // 2. Ask Gemini. It might reply with one or more function calls.
95    let mut response = ai.ask(session.ask(prompt)).await?;
96
97    // 3. Loop to handle potential multiple rounds of function calls
98    loop {
99        if response.get_chat().has_function_call() {
100            println!("Gemini requested function calls...");
101
102            // 4. Use the macro to execute all requested calls and update the session
103            let _ = execute_function_calls!(session, add_numbers);
104
105            for call in response.get_chat().get_function_calls() {
106                if call.name() == "get_temperature" {
107                    get_temperature::execute_with_closure(
108                        call.args().as_ref().unwrap(),
109                        |location| {
110                            println!("[Executing Closure] getting temperature for {}", location);
111                            session // Note: You must update session manually
112                                .add_function_response(
113                                    "get_temperature",
114                                    format!("temperature of {location} is 38 degree Celsius"),
115                                )
116                                .unwrap();
117                        },
118                    )
119                    .expect("Gemini responded with wrong argument format")
120                }
121            }
122
123            // 5. Send the results back to Gemini to get the final natural language response
124            response = ai.ask(&mut session).await?;
125        } else {
126            // No more function calls, show the final response
127            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
128            break;
129        }
130    }
131}