Struct GenerationResponse

Source
pub struct GenerationResponse {
    pub candidates: Vec<Candidate>,
    pub prompt_feedback: Option<PromptFeedback>,
    pub usage_metadata: Option<UsageMetadata>,
}
Expand description

Response from the Gemini API for content generation

Fields§

§candidates: Vec<Candidate>

The candidates generated

§prompt_feedback: Option<PromptFeedback>

The prompt feedback

§usage_metadata: Option<UsageMetadata>

Usage metadata

Implementations§

Source§

impl GenerationResponse

Source

pub fn text(&self) -> String

Get the text of the first candidate

Examples found in repository?
examples/test_api.rs (line 20)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    let api_key = env::var("GEMINI_API_KEY")?;
7
8    // Create client with the default model (gemini-2.0-flash)
9    let client = Gemini::new(api_key);
10
11    println!("Sending request to Gemini API...");
12
13    // Simple text completion with minimal content
14    let response = client
15        .generate_content()
16        .with_user_message("Say hello")
17        .execute()
18        .await?;
19
20    println!("Response: {}", response.text());
21
22    Ok(())
23}
More examples
Hide additional examples
examples/google_search.rs (line 25)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    // Get API key from environment variable
7    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
8
9    // Create client
10    let client = Gemini::new(api_key);
11
12    println!("--- Google Search tool example ---");
13
14    // Create a Google Search tool
15    let google_search_tool = Tool::google_search();
16
17    // Create a request with Google Search tool
18    let response = client
19        .generate_content()
20        .with_user_message("What is the current Google stock price?")
21        .with_tool(google_search_tool)
22        .execute()
23        .await?;
24
25    println!("Response: {}", response.text());
26
27    Ok(())
28}
examples/custom_base_url.rs (line 28)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    // Get API key from environment variable
7    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
8    // Using custom base URL
9    let custom_base_url = "https://generativelanguage.googleapis.com/v1beta/";
10    let client_custom = Gemini::with_model_and_base_url(
11        api_key,
12        "models/gemini-2.5-flash-lite-preview-06-17".to_string(),
13        custom_base_url.to_string(),
14    );
15    println!("Custom base URL client created successfully");
16    let response = client_custom
17        .generate_content()
18        .with_system_prompt("You are a helpful assistant.")
19        .with_user_message("Hello, can you tell me a joke about programming?")
20        .with_generation_config(GenerationConfig {
21            temperature: Some(0.7),
22            max_output_tokens: Some(100),
23            ..Default::default()
24        })
25        .execute()
26        .await?;
27
28    println!("Response: {}", response.text());
29
30    Ok(())
31}
examples/gemini_pro_example.rs (line 42)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    // Replace with your actual API key
8    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
9
10    // Create a Gemini client
11    let gemini = Gemini::pro(api_key);
12
13    // This example matches the exact curl request format:
14    // curl "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=$GEMINI_API_KEY" \
15    //   -H 'Content-Type: application/json' \
16    //   -d '{
17    //     "system_instruction": {
18    //       "parts": [
19    //         {
20    //           "text": "You are a cat. Your name is Neko."
21    //         }
22    //       ]
23    //     },
24    //     "contents": [
25    //       {
26    //         "parts": [
27    //           {
28    //             "text": "Hello there"
29    //           }
30    //         ]
31    //       }
32    //     ]
33    //   }'
34    let response = gemini
35        .generate_content()
36        .with_system_instruction("You are a cat. Your name is Neko.")
37        .with_user_message("Hello there")
38        .execute()
39        .await?;
40
41    // Print the response
42    println!("Response: {}", response.text());
43
44    Ok(())
45}
examples/blob.rs (line 45)
9async fn main() -> Result<(), Box<dyn std::error::Error>> {
10    // Get API key from environment variable
11    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
12
13    // Image file path (in the same directory)
14    let image_path = Path::new(file!())
15        .parent()
16        .unwrap_or(Path::new("."))
17        .join("image-example.webp"); // Replace with your image filename
18
19    // Read the image file
20    let mut file = File::open(&image_path)?;
21    let mut buffer = Vec::new();
22    file.read_to_end(&mut buffer)?;
23
24    // Convert to base64
25    let data = general_purpose::STANDARD.encode(&buffer);
26
27    println!("Image loaded: {}", image_path.display());
28
29    // Create client
30    let client = Gemini::new(api_key);
31
32    println!("--- Describe Image ---");
33    let response = client
34        .generate_content()
35        .with_inline_data(data, "image/webp")
36        .with_response_mime_type("text/plain")
37        .with_generation_config(GenerationConfig {
38            temperature: Some(0.7),
39            max_output_tokens: Some(400),
40            ..Default::default()
41        })
42        .execute()
43        .await?;
44
45    println!("Response: {}", response.text());
46
47    Ok(())
48}
examples/curl_google_search.rs (line 54)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    // Get API key from environment variable
7    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
8
9    println!("--- Curl equivalent with Google Search tool ---");
10
11    // This is equivalent to the curl example:
12    // curl "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=$GEMINI_API_KEY" \
13    //   -H "Content-Type: application/json" \
14    //   -d '{
15    //       "contents": [
16    //           {
17    //               "parts": [
18    //                   {"text": "What is the current Google stock price?"}
19    //               ]
20    //           }
21    //       ],
22    //       "tools": [
23    //           {
24    //               "google_search": {}
25    //           }
26    //       ]
27    //   }'
28
29    // Create client
30    let client = Gemini::new(api_key);
31
32    // Create a content part that matches the JSON in the curl example
33    let text_part = Part::Text {
34        text: "What is the current Google stock price?".to_string(),
35        thought: None,
36    };
37
38    let content = Content {
39        parts: vec![text_part],
40        role: None,
41    };
42
43    // Create a Google Search tool
44    let google_search_tool = Tool::google_search();
45
46    // Add the content and tool directly to the request
47    // This exactly mirrors the JSON structure in the curl example
48    let mut content_builder = client.generate_content();
49    content_builder.contents.push(content);
50    content_builder = content_builder.with_tool(google_search_tool);
51
52    let response = content_builder.execute().await?;
53
54    println!("Response: {}", response.text());
55
56    Ok(())
57}
Source

pub fn function_calls(&self) -> Vec<&FunctionCall>

Get function calls from the response

Examples found in repository?
examples/advanced.rs (line 43)
9async fn main() -> Result<(), Box<dyn std::error::Error>> {
10    let api_key = env::var("GEMINI_API_KEY")?;
11
12    // Create client
13    let client = Gemini::new(api_key);
14
15    // Define a weather function
16    let get_weather = FunctionDeclaration::new(
17        "get_weather",
18        "Get the current weather for a location",
19        FunctionParameters::object()
20            .with_property(
21                "location",
22                PropertyDetails::string("The city and state, e.g., San Francisco, CA"),
23                true,
24            )
25            .with_property(
26                "unit",
27                PropertyDetails::enum_type("The unit of temperature", ["celsius", "fahrenheit"]),
28                false,
29            ),
30    );
31
32    // Create a request with function calling
33    println!("Sending function call request...");
34    let response = client
35        .generate_content()
36        .with_user_message("What's the weather like in Tokyo right now?")
37        .with_function(get_weather)
38        .with_function_calling_mode(FunctionCallingMode::Any)
39        .execute()
40        .await?;
41
42    // Check if there are function calls
43    if let Some(function_call) = response.function_calls().first() {
44        println!(
45            "Function call received: {} with args: {}",
46            function_call.name, function_call.args
47        );
48
49        // Get parameters from the function call
50        let location: String = function_call.get("location")?;
51        let unit = function_call
52            .get::<String>("unit")
53            .unwrap_or_else(|_| String::from("celsius"));
54
55        println!("Location: {}, Unit: {}", location, unit);
56
57        // Simulate function execution (in a real app, this would call a weather API)
58        // Create a JSON response object
59        let weather_response = serde_json::json!({
60            "temperature": 22,
61            "unit": unit,
62            "condition": "sunny",
63            "location": location
64        });
65
66        // Continue the conversation with the function result
67        // We need to replay the entire conversation with the function response
68        println!("Sending function response...");
69
70        // First, need to recreate the original prompt and the model's response
71        let mut final_request = client
72            .generate_content()
73            .with_user_message("What's the weather like in Tokyo right now?");
74
75        // Add the function call from the model's response
76        let mut call_content = Content::default();
77        call_content.parts.push(Part::FunctionCall {
78            function_call: (*function_call).clone(),
79        });
80        final_request.contents.push(call_content);
81
82        // Now add the function response using the JSON value
83        final_request = final_request.with_function_response("get_weather", weather_response);
84
85        // Execute the request
86        let final_response = final_request.execute().await?;
87
88        println!("Final response: {}", final_response.text());
89    } else {
90        println!("No function calls in the response.");
91        println!("Response text: {}", response.text());
92    }
93
94    Ok(())
95}
More examples
Hide additional examples
examples/simple.rs (line 62)
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9    // Get API key from environment variable
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
11
12    // Create client
13    let client = Gemini::new(api_key);
14
15    // Simple generation
16    println!("--- Simple generation ---");
17    let response = client
18        .generate_content()
19        .with_system_prompt("You are a helpful assistant.")
20        .with_user_message("Hello, can you tell me a joke about programming?")
21        .with_generation_config(GenerationConfig {
22            temperature: Some(0.7),
23            max_output_tokens: Some(100),
24            ..Default::default()
25        })
26        .execute()
27        .await?;
28
29    println!("Response: {}", response.text());
30
31    // Function calling example
32    println!("\n--- Function calling example ---");
33
34    // Define a weather function
35    let get_weather = FunctionDeclaration::new(
36        "get_weather",
37        "Get the current weather for a location",
38        FunctionParameters::object()
39            .with_property(
40                "location",
41                PropertyDetails::string("The city and state, e.g., San Francisco, CA"),
42                true,
43            )
44            .with_property(
45                "unit",
46                PropertyDetails::enum_type("The unit of temperature", ["celsius", "fahrenheit"]),
47                false,
48            ),
49    );
50
51    // Create a request with function calling
52    let response = client
53        .generate_content()
54        .with_system_prompt("You are a helpful weather assistant.")
55        .with_user_message("What's the weather like in San Francisco right now?")
56        .with_function(get_weather)
57        .with_function_calling_mode(FunctionCallingMode::Any)
58        .execute()
59        .await?;
60
61    // Check if there are function calls
62    if let Some(function_call) = response.function_calls().first() {
63        println!(
64            "Function call: {} with args: {}",
65            function_call.name, function_call.args
66        );
67
68        // Get parameters from the function call
69        let location: String = function_call.get("location")?;
70        let unit = function_call
71            .get::<String>("unit")
72            .unwrap_or_else(|_| String::from("celsius"));
73
74        println!("Location: {}, Unit: {}", location, unit);
75
76        // Create model content with function call
77        let model_content = Content::function_call((*function_call).clone());
78
79        // Add as model message
80        let model_message = Message {
81            content: model_content,
82            role: Role::Model,
83        };
84
85        // Simulate function execution
86        let weather_response = format!(
87            "{{\"temperature\": 22, \"unit\": \"{}\", \"condition\": \"sunny\"}}",
88            unit
89        );
90
91        // Continue the conversation with the function result
92        let final_response = client
93            .generate_content()
94            .with_system_prompt("You are a helpful weather assistant.")
95            .with_user_message("What's the weather like in San Francisco right now?")
96            .with_message(model_message)
97            .with_function_response_str("get_weather", weather_response)?
98            .with_generation_config(GenerationConfig {
99                temperature: Some(0.7),
100                max_output_tokens: Some(100),
101                ..Default::default()
102            })
103            .execute()
104            .await?;
105
106        println!("Final response: {}", final_response.text());
107    } else {
108        println!("No function calls in the response.");
109    }
110
111    Ok(())
112}
examples/google_search_with_functions.rs (line 48)
9async fn main() -> Result<(), Box<dyn std::error::Error>> {
10    // Get API key from environment variable
11    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
12
13    // Create client
14    let client = Gemini::new(api_key);
15
16    println!("--- Google Search with Function Calling example ---");
17
18    // Define a calculator function
19    let calculate = FunctionDeclaration::new(
20        "calculate",
21        "Perform a calculation",
22        FunctionParameters::object()
23            .with_property(
24                "operation",
25                PropertyDetails::enum_type(
26                    "The mathematical operation to perform",
27                    ["add", "subtract", "multiply", "divide"],
28                ),
29                true,
30            )
31            .with_property("a", PropertyDetails::number("The first number"), true)
32            .with_property("b", PropertyDetails::number("The second number"), true),
33    );
34
35    // Create function tool
36    let function_tool = Tool::new(calculate);
37
38    // Create a request with both tools
39    let response = client
40        .generate_content()
41        .with_user_message("What is the current Google stock price multiplied by 2?")
42        .with_tool(function_tool.clone())
43        .with_function_calling_mode(FunctionCallingMode::Any)
44        .execute()
45        .await?;
46
47    // Check if there are function calls
48    if let Some(function_call) = response.function_calls().first() {
49        println!(
50            "Function call: {} with args: {}",
51            function_call.name, function_call.args
52        );
53
54        // Handle the calculate function
55        if function_call.name == "calculate" {
56            let operation: String = function_call.get("operation")?;
57            let a: f64 = function_call.get("a")?;
58            let b: f64 = function_call.get("b")?;
59
60            println!("Calculation: {} {} {}", a, operation, b);
61
62            let result = match operation.as_str() {
63                "add" => a + b,
64                "subtract" => a - b,
65                "multiply" => a * b,
66                "divide" => a / b,
67                _ => panic!("Unknown operation"),
68            };
69
70            let function_response = json!({
71                "result": result,
72            })
73            .to_string();
74
75            // Based on the curl example, we need to structure the conversation properly:
76            // 1. A user message with the original query
77            // 2. A model message containing the function call
78            // 3. A user message containing the function response
79
80            // Construct conversation following the exact curl pattern
81            let mut conversation = client.generate_content();
82
83            // 1. Add user message with original query
84            conversation = conversation
85                .with_user_message("What is the current Google stock price multiplied by 2?");
86
87            // 2. Create model message with function call
88            let model_function_call = FunctionCall::new(
89                "calculate",
90                json!({
91                    "operation": operation,
92                    "a": a,
93                    "b": b
94                }),
95            );
96
97            // Create model content with function call
98            let model_content = Content::function_call(model_function_call).with_role(Role::Model);
99
100            // Add as model message
101            let model_message = Message {
102                content: model_content,
103                role: Role::Model,
104            };
105            conversation = conversation.with_message(model_message);
106
107            // 3. Add user message with function response
108            conversation =
109                conversation.with_function_response_str("calculate", function_response)?;
110
111            // Execute the request
112            let final_response = conversation.execute().await?;
113
114            println!("Final response: {}", final_response.text());
115        } else {
116            println!("Unknown function call: {}", function_call.name);
117        }
118    } else {
119        println!("No function calls in the response.");
120        println!("Direct response: {}", response.text());
121    }
122
123    Ok(())
124}
examples/thinking_advanced.rs (line 95)
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9    // Get API key from environment variable
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
11
12    // Create client
13    let client = Gemini::with_model(api_key, "models/gemini-2.5-pro".to_string());
14
15    println!("=== Gemini 2.5 Thinking Advanced Example ===\n");
16
17    // Example 1: Streaming with thinking
18    println!("--- Example 1: Streaming with thinking ---");
19    let mut stream = client
20        .generate_content()
21        .with_system_prompt("You are a mathematics expert skilled at solving complex mathematical problems.")
22        .with_user_message("Solve this math problem: Find the sum of the first 50 prime numbers. Please explain your solution process in detail.")
23        .with_thinking_budget(2048)
24        .with_thoughts_included(true)
25        .execute_stream()
26        .await?;
27
28    println!("Streaming response:");
29    let mut thoughts_shown = false;
30    while let Some(chunk_result) = stream.next().await {
31        match chunk_result {
32            Ok(chunk) => {
33                // Check if there's thinking content
34                let thoughts = chunk.thoughts();
35                if !thoughts.is_empty() && !thoughts_shown {
36                    println!("\nThinking process:");
37                    for (i, thought) in thoughts.iter().enumerate() {
38                        println!("Thought {}: {}", i + 1, thought);
39                    }
40                    println!("\nAnswer:");
41                    thoughts_shown = true;
42                }
43
44                // Display general text content
45                print!("{}", chunk.text());
46                std::io::Write::flush(&mut std::io::stdout())?;
47            }
48            Err(e) => eprintln!("Streaming error: {}", e),
49        }
50    }
51    println!("\n");
52
53    // Example 2: Thinking combined with function calls
54    println!("--- Example 2: Thinking combined with function calls ---");
55
56    // Define a calculator function
57    let calculator = FunctionDeclaration::new(
58        "calculate",
59        "Perform basic mathematical calculations",
60        FunctionParameters::object()
61            .with_property(
62                "expression",
63                PropertyDetails::string(
64                    "The mathematical expression to calculate, e.g., '2 + 3 * 4'",
65                ),
66                true,
67            )
68            .with_property(
69                "operation_type",
70                PropertyDetails::enum_type("Type of calculation", ["arithmetic", "advanced"]),
71                false,
72            ),
73    );
74
75    let response = client
76        .generate_content()
77        .with_system_prompt("You are a mathematics assistant. When calculations are needed, use the provided calculator function.")
78        .with_user_message("Calculate the result of (15 + 25) * 3 - 8 and explain the calculation steps.")
79        .with_function(calculator)
80        .with_thinking_budget(1024)
81        .with_thoughts_included(true)
82        .execute()
83        .await?;
84
85    // Display thinking process
86    let thoughts = response.thoughts();
87    if !thoughts.is_empty() {
88        println!("Thinking process:");
89        for (i, thought) in thoughts.iter().enumerate() {
90            println!("Thought {}: {}\n", i + 1, thought);
91        }
92    }
93
94    // Check for function calls
95    let function_calls = response.function_calls();
96    if !function_calls.is_empty() {
97        println!("Function calls:");
98        for (i, call) in function_calls.iter().enumerate() {
99            println!("Call {}: {} Args: {}", i + 1, call.name, call.args);
100        }
101        println!();
102    }
103
104    println!("Answer: {}\n", response.text());
105
106    // Example 3: Complex reasoning task
107    println!("--- Example 3: Complex reasoning task ---");
108    let complex_response = client
109        .generate_content()
110        .with_system_prompt("You are a logical reasoning expert.")
111        .with_user_message(
112            "There are three people: Alice, Bob, and Carol, who live in red, green, and blue houses respectively.\
113            Given:\
114            1. The person in the red house owns a cat\
115            2. Bob does not live in the green house\
116            3. Carol owns a dog\
117            4. The green house is to the left of the red house\
118            5. Alice does not own a cat\
119            Please reason out which color house each person lives in and what pets they own.",
120        )
121        .with_thinking_config(
122            ThinkingConfig::new()
123                .with_thinking_budget(3072)
124                .with_thoughts_included(true),
125        )
126        .execute()
127        .await?;
128
129    // Display thinking process
130    let complex_thoughts = complex_response.thoughts();
131    if !complex_thoughts.is_empty() {
132        println!("Reasoning process:");
133        for (i, thought) in complex_thoughts.iter().enumerate() {
134            println!("Reasoning step {}: {}\n", i + 1, thought);
135        }
136    }
137
138    println!("Conclusion: {}\n", complex_response.text());
139
140    // Display token usage statistics
141    if let Some(usage) = &complex_response.usage_metadata {
142        println!("Token usage statistics:");
143        println!("  Prompt tokens: {}", usage.prompt_token_count);
144        println!(
145            "  Response tokens: {}",
146            usage.candidates_token_count.unwrap_or(0)
147        );
148        if let Some(thinking_tokens) = usage.thoughts_token_count {
149            println!("  Thinking tokens: {}", thinking_tokens);
150        }
151        println!("  Total tokens: {}", usage.total_token_count);
152    }
153
154    Ok(())
155}
examples/tools.rs (line 68)
9async fn main() -> Result<(), Box<dyn std::error::Error>> {
10    // Get API key from environment variable
11    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
12
13    // Create client
14    let client = Gemini::new(api_key);
15
16    println!("--- Tools example with multiple functions ---");
17
18    // Define a weather function
19    let get_weather = FunctionDeclaration::new(
20        "get_weather",
21        "Get the current weather for a location",
22        FunctionParameters::object()
23            .with_property(
24                "location",
25                PropertyDetails::string("The city and state, e.g., San Francisco, CA"),
26                true,
27            )
28            .with_property(
29                "unit",
30                PropertyDetails::enum_type("The unit of temperature", ["celsius", "fahrenheit"]),
31                false,
32            ),
33    );
34
35    // Define a calculator function
36    let calculate = FunctionDeclaration::new(
37        "calculate",
38        "Perform a calculation",
39        FunctionParameters::object()
40            .with_property(
41                "operation",
42                PropertyDetails::enum_type(
43                    "The mathematical operation to perform",
44                    ["add", "subtract", "multiply", "divide"],
45                ),
46                true,
47            )
48            .with_property("a", PropertyDetails::number("The first number"), true)
49            .with_property("b", PropertyDetails::number("The second number"), true),
50    );
51
52    // Create a tool with multiple functions
53    let tool = Tool::with_functions(vec![get_weather, calculate]);
54
55    // Create a request with tool functions
56    let response = client
57        .generate_content()
58        .with_system_prompt(
59            "You are a helpful assistant that can check weather and perform calculations.",
60        )
61        .with_user_message("What's 42 times 12?")
62        .with_tool(tool)
63        .with_function_calling_mode(FunctionCallingMode::Any)
64        .execute()
65        .await?;
66
67    // Process function calls
68    if let Some(function_call) = response.function_calls().first() {
69        println!(
70            "Function call: {} with args: {}",
71            function_call.name, function_call.args
72        );
73
74        // Handle different function calls
75        match function_call.name.as_str() {
76            "calculate" => {
77                let operation: String = function_call.get("operation")?;
78                let a: f64 = function_call.get("a")?;
79                let b: f64 = function_call.get("b")?;
80
81                println!("Calculation: {} {} {}", a, operation, b);
82
83                let result = match operation.as_str() {
84                    "add" => a + b,
85                    "subtract" => a - b,
86                    "multiply" => a * b,
87                    "divide" => a / b,
88                    _ => panic!("Unknown operation"),
89                };
90
91                let function_response = json!({
92                    "result": result,
93                })
94                .to_string();
95
96                // Based on the curl example, we need to structure the conversation properly:
97                // 1. A user message with the original query
98                // 2. A model message containing the function call
99                // 3. A user message containing the function response
100
101                // Construct conversation following the exact curl pattern
102                let mut conversation = client.generate_content();
103
104                // 1. Add user message with original query and system prompt
105                conversation = conversation
106                    .with_system_prompt("You are a helpful assistant that can check weather and perform calculations.")
107                    .with_user_message("What's 42 times 12?");
108
109                // 2. Create model content with function call
110                let model_content = Content::function_call((*function_call).clone());
111
112                // Add as model message
113                let model_message = Message {
114                    content: model_content,
115                    role: Role::Model,
116                };
117                conversation = conversation.with_message(model_message);
118
119                // 3. Add user message with function response
120                conversation =
121                    conversation.with_function_response_str("calculate", function_response)?;
122
123                // Execute the request
124                let final_response = conversation.execute().await?;
125
126                println!("Final response: {}", final_response.text());
127            }
128            "get_weather" => {
129                let location: String = function_call.get("location")?;
130                let unit = function_call
131                    .get::<String>("unit")
132                    .unwrap_or_else(|_| String::from("celsius"));
133
134                println!("Weather request for: {}, Unit: {}", location, unit);
135
136                let weather_response = json!({
137                    "temperature": 22,
138                    "unit": unit,
139                    "condition": "sunny"
140                })
141                .to_string();
142
143                // Based on the curl example, we need to structure the conversation properly:
144                // 1. A user message with the original query
145                // 2. A model message containing the function call
146                // 3. A user message containing the function response
147
148                // Construct conversation following the exact curl pattern
149                let mut conversation = client.generate_content();
150
151                // 1. Add user message with original query and system prompt
152                conversation = conversation
153                    .with_system_prompt("You are a helpful assistant that can check weather and perform calculations.")
154                    .with_user_message("What's 42 times 12?");
155
156                // 2. Create model content with function call
157                let model_content = Content::function_call((*function_call).clone());
158
159                // Add as model message
160                let model_message = Message {
161                    content: model_content,
162                    role: Role::Model,
163                };
164                conversation = conversation.with_message(model_message);
165
166                // 3. Add user message with function response
167                conversation =
168                    conversation.with_function_response_str("get_weather", weather_response)?;
169
170                // Execute the request
171                let final_response = conversation.execute().await?;
172
173                println!("Final response: {}", final_response.text());
174            }
175            _ => println!("Unknown function"),
176        }
177    } else {
178        println!("No function calls in the response.");
179        println!("Response: {}", response.text());
180    }
181
182    Ok(())
183}
Source

pub fn thoughts(&self) -> Vec<String>

Get thought summaries from the response

Examples found in repository?
examples/thinking_basic.rs (line 30)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    // Get API key from environment variable
7    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
8
9    // Create client
10    let client = Gemini::with_model(api_key, "models/gemini-2.5-pro".to_string());
11
12    println!("=== Gemini 2.5 Thinking Basic Example ===\n");
13
14    // Example 1: Using default dynamic thinking
15    println!(
16        "--- Example 1: Dynamic thinking (model automatically determines thinking budget) ---"
17    );
18    let response1 = client
19        .generate_content()
20        .with_system_prompt("You are a helpful mathematics assistant.")
21        .with_user_message(
22            "Explain Occam's razor principle and provide a simple example from daily life.",
23        )
24        .with_dynamic_thinking()
25        .with_thoughts_included(true)
26        .execute()
27        .await?;
28
29    // Display thinking process
30    let thoughts = response1.thoughts();
31    if !thoughts.is_empty() {
32        println!("Thinking summary:");
33        for (i, thought) in thoughts.iter().enumerate() {
34            println!("Thought {}: {}\n", i + 1, thought);
35        }
36    }
37
38    println!("Answer: {}\n", response1.text());
39
40    // Display token usage
41    if let Some(usage) = &response1.usage_metadata {
42        println!("Token usage:");
43        println!("  Prompt tokens: {}", usage.prompt_token_count);
44        println!(
45            "  Response tokens: {}",
46            usage.candidates_token_count.unwrap_or(0)
47        );
48        if let Some(thinking_tokens) = usage.thoughts_token_count {
49            println!("  Thinking tokens: {}", thinking_tokens);
50        }
51        println!("  Total tokens: {}\n", usage.total_token_count);
52    }
53
54    // Example 2: Set specific thinking budget
55    println!("--- Example 2: Set thinking budget (1024 tokens) ---");
56    let response2 = client
57        .generate_content()
58        .with_system_prompt("You are a helpful programming assistant.")
59        .with_user_message("List 3 main advantages of using the Rust programming language")
60        .with_thinking_budget(1024)
61        .with_thoughts_included(true)
62        .execute()
63        .await?;
64
65    // Display thinking process
66    let thoughts2 = response2.thoughts();
67    if !thoughts2.is_empty() {
68        println!("Thinking summary:");
69        for (i, thought) in thoughts2.iter().enumerate() {
70            println!("Thought {}: {}\n", i + 1, thought);
71        }
72    }
73
74    println!("Answer: {}\n", response2.text());
75
76    // Example 3: Disable thinking feature
77    println!("--- Example 3: Disable thinking feature ---");
78    let response3 = client
79        .generate_content()
80        .with_system_prompt("You are a helpful assistant.")
81        .with_user_message("What is artificial intelligence?")
82        .execute()
83        .await?;
84
85    println!("Answer: {}\n", response3.text());
86
87    // Example 4: Use GenerationConfig to set thinking
88    println!("--- Example 4: Use GenerationConfig to set thinking ---");
89    let thinking_config = ThinkingConfig::new()
90        .with_thinking_budget(2048)
91        .with_thoughts_included(true);
92
93    let generation_config = GenerationConfig {
94        temperature: Some(0.7),
95        max_output_tokens: Some(500),
96        thinking_config: Some(thinking_config),
97        ..Default::default()
98    };
99
100    let response4 = client
101        .generate_content()
102        .with_system_prompt("You are a creative writing assistant.")
103        .with_user_message(
104            "Write the opening of a short story about a robot learning to feel emotions.",
105        )
106        .with_generation_config(generation_config)
107        .execute()
108        .await?;
109
110    // Display thinking process
111    let thoughts4 = response4.thoughts();
112    if !thoughts4.is_empty() {
113        println!("Thinking summary:");
114        for (i, thought) in thoughts4.iter().enumerate() {
115            println!("Thought {}: {}\n", i + 1, thought);
116        }
117    }
118
119    println!("Answer: {}\n", response4.text());
120
121    Ok(())
122}
More examples
Hide additional examples
examples/thinking_curl_equivalent.rs (line 51)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    // Get API key from environment variable
7    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
8
9    // This is equivalent to the following curl example:
10    // curl "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent" \
11    //   -H "x-goog-api-key: $GEMINI_API_KEY" \
12    //   -H 'Content-Type: application/json' \
13    //   -X POST \
14    //   -d '{
15    //     "contents": [
16    //       {
17    //         "parts": [
18    //           {
19    //             "text": "Provide a list of the top 3 famous physicists and their major contributions"
20    //           }
21    //         ]
22    //       }
23    //     ],
24    //     "generationConfig": {
25    //       "thinkingConfig": {
26    //         "thinkingBudget": 1024,
27    //         "includeThoughts": true
28    //       }
29    //     }
30    //   }'
31
32    // Create client
33    let client = Gemini::with_model(api_key, "models/gemini-2.5-pro".to_string());
34
35    println!("=== Thinking Curl Equivalent Example ===\n");
36
37    // Method 1: Using high-level API (simplest approach)
38    println!("--- Method 1: Using high-level API ---");
39
40    let response1 = client
41        .generate_content()
42        .with_user_message(
43            "Provide a list of the top 3 famous physicists and their major contributions",
44        )
45        .with_thinking_budget(1024)
46        .with_thoughts_included(true)
47        .execute()
48        .await?;
49
50    // Display thinking process
51    let thoughts1 = response1.thoughts();
52    if !thoughts1.is_empty() {
53        println!("Thinking summary:");
54        for (i, thought) in thoughts1.iter().enumerate() {
55            println!("Thought {}: {}\n", i + 1, thought);
56        }
57    }
58
59    println!("Answer: {}\n", response1.text());
60
61    // Method 2: Using GenerationConfig to fully match curl example structure
62    println!("--- Method 2: Fully matching curl example structure ---");
63
64    let thinking_config = ThinkingConfig {
65        thinking_budget: Some(1024),
66        include_thoughts: Some(true),
67    };
68
69    let generation_config = GenerationConfig {
70        thinking_config: Some(thinking_config),
71        ..Default::default()
72    };
73
74    let response2 = client
75        .generate_content()
76        .with_user_message(
77            "Provide a list of the top 3 famous physicists and their major contributions",
78        )
79        .with_generation_config(generation_config)
80        .execute()
81        .await?;
82
83    // Display thinking process
84    let thoughts2 = response2.thoughts();
85    if !thoughts2.is_empty() {
86        println!("Thinking summary:");
87        for (i, thought) in thoughts2.iter().enumerate() {
88            println!("Thought {}: {}\n", i + 1, thought);
89        }
90    }
91
92    println!("Answer: {}\n", response2.text());
93
94    // Show token usage
95    if let Some(usage) = &response2.usage_metadata {
96        println!("Token usage:");
97        println!("  Prompt tokens: {}", usage.prompt_token_count);
98        println!(
99            "  Response tokens: {}",
100            usage.candidates_token_count.unwrap_or(0)
101        );
102        if let Some(thinking_tokens) = usage.thoughts_token_count {
103            println!("  Thinking tokens: {}", thinking_tokens);
104        }
105        println!("  Total tokens: {}", usage.total_token_count);
106    }
107
108    // Method 3: Demonstrate different thinking budget settings
109    println!("\n--- Method 3: Different thinking budget comparison ---");
110
111    // Thinking disabled
112    println!("Thinking disabled:");
113    let response_no_thinking = client
114        .generate_content()
115        .with_user_message("Explain the basic principles of quantum mechanics")
116        .execute()
117        .await?;
118    println!("Answer: {}\n", response_no_thinking.text());
119
120    // Dynamic thinking
121    println!("Dynamic thinking:");
122    let response_dynamic = client
123        .generate_content()
124        .with_user_message("Explain the basic principles of quantum mechanics")
125        .with_dynamic_thinking()
126        .with_thoughts_included(true)
127        .execute()
128        .await?;
129
130    let thoughts_dynamic = response_dynamic.thoughts();
131    if !thoughts_dynamic.is_empty() {
132        println!("Thinking summary:");
133        for (i, thought) in thoughts_dynamic.iter().enumerate() {
134            println!("Thought {}: {}\n", i + 1, thought);
135        }
136    }
137    println!("Answer: {}\n", response_dynamic.text());
138
139    // High thinking budget
140    println!("High thinking budget (4096 tokens):");
141    let response_high_budget = client
142        .generate_content()
143        .with_user_message("Explain the basic principles of quantum mechanics")
144        .with_thinking_budget(4096)
145        .with_thoughts_included(true)
146        .execute()
147        .await?;
148
149    let thoughts_high = response_high_budget.thoughts();
150    if !thoughts_high.is_empty() {
151        println!("Thinking summary:");
152        for (i, thought) in thoughts_high.iter().enumerate() {
153            println!("Thought {}: {}\n", i + 1, thought);
154        }
155    }
156    println!("Answer: {}", response_high_budget.text());
157
158    Ok(())
159}
examples/thinking_advanced.rs (line 34)
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9    // Get API key from environment variable
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable not set");
11
12    // Create client
13    let client = Gemini::with_model(api_key, "models/gemini-2.5-pro".to_string());
14
15    println!("=== Gemini 2.5 Thinking Advanced Example ===\n");
16
17    // Example 1: Streaming with thinking
18    println!("--- Example 1: Streaming with thinking ---");
19    let mut stream = client
20        .generate_content()
21        .with_system_prompt("You are a mathematics expert skilled at solving complex mathematical problems.")
22        .with_user_message("Solve this math problem: Find the sum of the first 50 prime numbers. Please explain your solution process in detail.")
23        .with_thinking_budget(2048)
24        .with_thoughts_included(true)
25        .execute_stream()
26        .await?;
27
28    println!("Streaming response:");
29    let mut thoughts_shown = false;
30    while let Some(chunk_result) = stream.next().await {
31        match chunk_result {
32            Ok(chunk) => {
33                // Check if there's thinking content
34                let thoughts = chunk.thoughts();
35                if !thoughts.is_empty() && !thoughts_shown {
36                    println!("\nThinking process:");
37                    for (i, thought) in thoughts.iter().enumerate() {
38                        println!("Thought {}: {}", i + 1, thought);
39                    }
40                    println!("\nAnswer:");
41                    thoughts_shown = true;
42                }
43
44                // Display general text content
45                print!("{}", chunk.text());
46                std::io::Write::flush(&mut std::io::stdout())?;
47            }
48            Err(e) => eprintln!("Streaming error: {}", e),
49        }
50    }
51    println!("\n");
52
53    // Example 2: Thinking combined with function calls
54    println!("--- Example 2: Thinking combined with function calls ---");
55
56    // Define a calculator function
57    let calculator = FunctionDeclaration::new(
58        "calculate",
59        "Perform basic mathematical calculations",
60        FunctionParameters::object()
61            .with_property(
62                "expression",
63                PropertyDetails::string(
64                    "The mathematical expression to calculate, e.g., '2 + 3 * 4'",
65                ),
66                true,
67            )
68            .with_property(
69                "operation_type",
70                PropertyDetails::enum_type("Type of calculation", ["arithmetic", "advanced"]),
71                false,
72            ),
73    );
74
75    let response = client
76        .generate_content()
77        .with_system_prompt("You are a mathematics assistant. When calculations are needed, use the provided calculator function.")
78        .with_user_message("Calculate the result of (15 + 25) * 3 - 8 and explain the calculation steps.")
79        .with_function(calculator)
80        .with_thinking_budget(1024)
81        .with_thoughts_included(true)
82        .execute()
83        .await?;
84
85    // Display thinking process
86    let thoughts = response.thoughts();
87    if !thoughts.is_empty() {
88        println!("Thinking process:");
89        for (i, thought) in thoughts.iter().enumerate() {
90            println!("Thought {}: {}\n", i + 1, thought);
91        }
92    }
93
94    // Check for function calls
95    let function_calls = response.function_calls();
96    if !function_calls.is_empty() {
97        println!("Function calls:");
98        for (i, call) in function_calls.iter().enumerate() {
99            println!("Call {}: {} Args: {}", i + 1, call.name, call.args);
100        }
101        println!();
102    }
103
104    println!("Answer: {}\n", response.text());
105
106    // Example 3: Complex reasoning task
107    println!("--- Example 3: Complex reasoning task ---");
108    let complex_response = client
109        .generate_content()
110        .with_system_prompt("You are a logical reasoning expert.")
111        .with_user_message(
112            "There are three people: Alice, Bob, and Carol, who live in red, green, and blue houses respectively.\
113            Given:\
114            1. The person in the red house owns a cat\
115            2. Bob does not live in the green house\
116            3. Carol owns a dog\
117            4. The green house is to the left of the red house\
118            5. Alice does not own a cat\
119            Please reason out which color house each person lives in and what pets they own.",
120        )
121        .with_thinking_config(
122            ThinkingConfig::new()
123                .with_thinking_budget(3072)
124                .with_thoughts_included(true),
125        )
126        .execute()
127        .await?;
128
129    // Display thinking process
130    let complex_thoughts = complex_response.thoughts();
131    if !complex_thoughts.is_empty() {
132        println!("Reasoning process:");
133        for (i, thought) in complex_thoughts.iter().enumerate() {
134            println!("Reasoning step {}: {}\n", i + 1, thought);
135        }
136    }
137
138    println!("Conclusion: {}\n", complex_response.text());
139
140    // Display token usage statistics
141    if let Some(usage) = &complex_response.usage_metadata {
142        println!("Token usage statistics:");
143        println!("  Prompt tokens: {}", usage.prompt_token_count);
144        println!(
145            "  Response tokens: {}",
146            usage.candidates_token_count.unwrap_or(0)
147        );
148        if let Some(thinking_tokens) = usage.thoughts_token_count {
149            println!("  Thinking tokens: {}", thinking_tokens);
150        }
151        println!("  Total tokens: {}", usage.total_token_count);
152    }
153
154    Ok(())
155}
Source

pub fn all_text(&self) -> Vec<(String, bool)>

Get all text parts (both regular text and thoughts)

Trait Implementations§

Source§

impl Clone for GenerationResponse

Source§

fn clone(&self) -> GenerationResponse

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for GenerationResponse

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'de> Deserialize<'de> for GenerationResponse

Source§

fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl Serialize for GenerationResponse

Source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,

Source§

impl<T> ErasedDestructor for T
where T: 'static,