Skip to main content

Gemini

Struct Gemini 

Source
pub struct Gemini { /* private fields */ }
Expand description

The main client for interacting with the Gemini API.

Use Gemini::new or Gemini::new_with_timeout to create an instance. You can configure various aspects of the request like model, system instructions, generation config, safety settings, and tools using the provided builder-like methods.

Implementations§

Source§

impl Gemini

Source

pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self

Creates a new Gemini client.

§Arguments
Examples found in repository?
examples/multimodal.rs (line 11)
8async fn raw_multimodal() {
9    let mut session = Session::new(6);
10    let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    session.ask("Where is there in this pdf");
14    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
15
16    let response = ai.ask(&mut session).await.unwrap();
17    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
18}
19#[tokio::main]
20async fn main() {
21    let mut session = Session::new(6);
22    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
23    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
24
25    println!("--- Multimodal (Images/Files) Example ---");
26
27    // Use MarkdownToParts to easily parse a string with image/file markers
28    // It supports both URLs and local file paths!
29    let content = "Describe this image: ![image](https://www.google.com/images/branding/googlelogo/1x/googlelogo_color_272x92dp.png)";
30    println!("Processing: {}", content);
31
32    let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33        .await
34        .process();
35
36    let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}
More examples
Hide additional examples
examples/structured_output.rs (line 27)
24async fn main() {
25    let mut session = Session::new(2).set_remember_reply(false);
26    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29    println!("--- Structured Output (JSON Mode) Example ---");
30
31    // Enable JSON mode by passing the generated schema
32    let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34    let prompt = "Give me a review for the movie Interstellar.";
35    println!("User: {}", prompt);
36
37    let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39    // Extract and deserialize the JSON response
40    if let Ok(review) = response.get_json::<MovieReview>() {
41        println!("\nGemini (Structured):");
42        println!("{:#?}", review);
43    } else {
44        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45    }
46}
examples/thinking.rs (line 12)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
examples/basic_chat.rs (line 13)
6async fn main() {
7    // 1. Initialize the session with a history limit (e.g., 6 messages)
8    let mut session = Session::new(6);
9
10    // 2. Create the Gemini client
11    // Get your API key from https://aistudio.google.com/app/apikey
12    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15    println!("--- Basic Chat Example ---");
16
17    // 3. Ask a question
18    let prompt = "What are the benefits of using Rust for systems programming?";
19    println!("User: {}", prompt);
20
21    let response = ai.ask(session.ask(prompt)).await.unwrap();
22
23    // 4. Print the reply
24    // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25    let reply = response.get_chat().get_text_no_think("");
26    println!("\nGemini: {}", reply);
27
28    // 5. The session now contains the interaction
29    println!("\nMessages in history: {}", session.get_history_length());
30}
examples/streaming.rs (line 11)
8async fn main() {
9    let mut session = Session::new(10);
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    println!("--- Streaming Example ---");
14    let prompt = "Write a short poem about crab-like robots on Mars.";
15    println!("User: {}\n", prompt);
16    print!("Gemini: ");
17    stdout().flush().unwrap();
18
19    // Start a streaming request
20    let mut response_stream = ai.ask_as_stream(session.ask(prompt).clone()).await.unwrap();
21
22    while let Some(chunk_result) = response_stream.next().await {
23        match chunk_result {
24            Ok(response) => {
25                // Get the text from the current chunk
26                let text = response.get_chat().get_text_no_think("");
27                print!("{}", text);
28                stdout().flush().unwrap();
29            }
30            Err(e) => {
31                eprintln!("\nError receiving chunk: {:?}", e);
32                break;
33            }
34        }
35    }
36
37    println!("\n\n--- Stream Complete ---");
38    // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}
examples/function_calling.rs (line 35)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
Source

pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self

Creates a new Gemini client with a custom API timeout.

§Arguments
  • api_key - Your Gemini API key.
  • model - The model variation to use.
  • sys_prompt - Optional system instructions.
  • api_timeout - Custom duration for request timeouts.
Source

pub fn set_generation_config(&mut self) -> &mut Value

Returns a mutable reference to the generation configuration. If not already set, initializes it to an empty object.

See Gemini docs for schema details.

Source

pub fn set_tool_config(self, config: ToolConfig) -> Self

Source

pub fn set_thinking_config(self, config: ThinkingConfig) -> Self

Examples found in repository?
examples/thinking.rs (line 13)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
Source

pub fn set_model(self, model: impl Into<String>) -> Self

Source

pub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self

Source

pub fn set_safety_settings(self, settings: Option<Vec<SafetySetting>>) -> Self

Source

pub fn set_api_key(self, api_key: impl Into<String>) -> Self

Source

pub fn set_json_mode(self, schema: Value) -> Self

Sets the response format to JSON mode with a specific schema.

To use a Rust struct as a schema, decorate it with #[gemini_schema] and pass StructName::gemini_schema().

§Arguments
Examples found in repository?
examples/structured_output.rs (line 32)
24async fn main() {
25    let mut session = Session::new(2).set_remember_reply(false);
26    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29    println!("--- Structured Output (JSON Mode) Example ---");
30
31    // Enable JSON mode by passing the generated schema
32    let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34    let prompt = "Give me a review for the movie Interstellar.";
35    println!("User: {}", prompt);
36
37    let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39    // Extract and deserialize the JSON response
40    if let Ok(review) = response.get_json::<MovieReview>() {
41        println!("\nGemini (Structured):");
42        println!("{:#?}", review);
43    } else {
44        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45    }
46}
Source

pub fn remove_json_mode(self) -> Self

Source

pub fn set_tools(self, tools: Vec<Tool>) -> Self

Sets the tools (functions) available to the model.

Examples found in repository?
examples/function_calling.rs (lines 36-39)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
Source

pub fn remove_tools(self) -> Self

Removes all tools.

Source

pub fn set_cached_content(self, name: impl Into<String>) -> Self

Examples found in repository?
examples/context_caching.rs (line 42)
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}
Source

pub fn remove_cached_content(self) -> Self

Source

pub async fn create_cache( &self, cached_content: &CachedContent, ) -> Result<CachedContent, GeminiResponseError>

Examples found in repository?
examples/context_caching.rs (line 30)
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}
Source

pub async fn list_caches( &self, ) -> Result<CachedContentList, GeminiResponseError>

Examples found in repository?
examples/context_caching.rs (line 53)
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}
Source

pub async fn get_cache( &self, name: &str, ) -> Result<CachedContent, GeminiResponseError>

Source

pub async fn update_cache( &self, name: &str, update: &CachedContentUpdate, ) -> Result<CachedContent, GeminiResponseError>

Source

pub async fn delete_cache(&self, name: &str) -> Result<(), GeminiResponseError>

Examples found in repository?
examples/context_caching.rs (line 68)
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}
Source

pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>

Sends a prompt to the model and waits for the full response.

Updates the session history with the model’s reply.

§Errors

Returns GeminiResponseError::NothingToRespond if the last message in history is from the model.

Examples found in repository?
examples/multimodal.rs (line 16)
8async fn raw_multimodal() {
9    let mut session = Session::new(6);
10    let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    session.ask("Where is there in this pdf");
14    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
15
16    let response = ai.ask(&mut session).await.unwrap();
17    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
18}
19#[tokio::main]
20async fn main() {
21    let mut session = Session::new(6);
22    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
23    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
24
25    println!("--- Multimodal (Images/Files) Example ---");
26
27    // Use MarkdownToParts to easily parse a string with image/file markers
28    // It supports both URLs and local file paths!
29    let content = "Describe this image: ![image](https://www.google.com/images/branding/googlelogo/1x/googlelogo_color_272x92dp.png)";
30    println!("Processing: {}", content);
31
32    let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33        .await
34        .process();
35
36    let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}
More examples
Hide additional examples
examples/structured_output.rs (line 37)
24async fn main() {
25    let mut session = Session::new(2).set_remember_reply(false);
26    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29    println!("--- Structured Output (JSON Mode) Example ---");
30
31    // Enable JSON mode by passing the generated schema
32    let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34    let prompt = "Give me a review for the movie Interstellar.";
35    println!("User: {}", prompt);
36
37    let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39    // Extract and deserialize the JSON response
40    if let Ok(review) = response.get_json::<MovieReview>() {
41        println!("\nGemini (Structured):");
42        println!("{:#?}", review);
43    } else {
44        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45    }
46}
examples/thinking.rs (line 19)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
examples/basic_chat.rs (line 21)
6async fn main() {
7    // 1. Initialize the session with a history limit (e.g., 6 messages)
8    let mut session = Session::new(6);
9
10    // 2. Create the Gemini client
11    // Get your API key from https://aistudio.google.com/app/apikey
12    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15    println!("--- Basic Chat Example ---");
16
17    // 3. Ask a question
18    let prompt = "What are the benefits of using Rust for systems programming?";
19    println!("User: {}", prompt);
20
21    let response = ai.ask(session.ask(prompt)).await.unwrap();
22
23    // 4. Print the reply
24    // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25    let reply = response.get_chat().get_text_no_think("");
26    println!("\nGemini: {}", reply);
27
28    // 5. The session now contains the interaction
29    println!("\nMessages in history: {}", session.get_history_length());
30}
examples/function_calling.rs (line 46)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
examples/context_caching.rs (line 44)
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}
Source

pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
where F: FnMut(&Session, GeminiResponse) -> StreamType,

§Warning

You must read the response stream to get reply stored context in session. data_extractor is used to extract data that you get as a stream of futures.

§Example
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
     |session, _gemini_response| session.get_last_chat().unwrap().get_text_no_think("\n"))
    .await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk
while let Some(response) = response_stream.next().await {
    println!("{}", response.unwrap());
}
Source

pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>

Sends a prompt to the model and returns a stream of responses.

§Warning

You must exhaust the response stream to ensure the session history is correctly updated.

§Example
use futures::StreamExt;
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();

while let Some(response) = response_stream.next().await {
    if let Ok(response) = response {
        println!("{}", response.get_chat().get_text_no_think("\n"));
    }
}
Examples found in repository?
examples/streaming.rs (line 20)
8async fn main() {
9    let mut session = Session::new(10);
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    println!("--- Streaming Example ---");
14    let prompt = "Write a short poem about crab-like robots on Mars.";
15    println!("User: {}\n", prompt);
16    print!("Gemini: ");
17    stdout().flush().unwrap();
18
19    // Start a streaming request
20    let mut response_stream = ai.ask_as_stream(session.ask(prompt).clone()).await.unwrap();
21
22    while let Some(chunk_result) = response_stream.next().await {
23        match chunk_result {
24            Ok(response) => {
25                // Get the text from the current chunk
26                let text = response.get_chat().get_text_no_think("");
27                print!("{}", text);
28                stdout().flush().unwrap();
29            }
30            Err(e) => {
31                eprintln!("\nError receiving chunk: {:?}", e);
32                break;
33            }
34        }
35    }
36
37    println!("\n\n--- Stream Complete ---");
38    // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}

Trait Implementations§

Source§

impl Clone for Gemini

Source§

fn clone(&self) -> Gemini

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for Gemini

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl Default for Gemini

Source§

fn default() -> Gemini

Returns the “default value” for a type. Read more

Auto Trait Implementations§

§

impl Freeze for Gemini

§

impl !RefUnwindSafe for Gemini

§

impl Send for Gemini

§

impl Sync for Gemini

§

impl Unpin for Gemini

§

impl !UnwindSafe for Gemini

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more