pub struct Gemini { /* private fields */ }Expand description
The main client for interacting with the Gemini API.
Use Gemini::new or Gemini::new_with_timeout to create an instance.
You can configure various aspects of the request like model, system instructions,
generation config, safety settings, and tools using the provided builder-like methods.
Implementations§
Source§impl Gemini
impl Gemini
Sourcepub fn new(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
) -> Self
pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self
Creates a new Gemini client.
§Arguments
api_key- Your Gemini API key. Get one from Google AI studio.model- The model variation to use (e.g., “gemini-2.5-flash”). See model variations.sys_prompt- Optional system instructions. See system instructions.
Examples found in repository?
8async fn raw_multimodal() {
9 let mut session = Session::new(6);
10 let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 session.ask("Where is there in this pdf");
14 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
15
16 let response = ai.ask(&mut session).await.unwrap();
17 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
18}
19#[tokio::main]
20async fn main() {
21 let mut session = Session::new(6);
22 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
23 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
24
25 println!("--- Multimodal (Images/Files) Example ---");
26
27 // Use MarkdownToParts to easily parse a string with image/file markers
28 // It supports both URLs and local file paths!
29 let content = "Describe this image: ";
30 println!("Processing: {}", content);
31
32 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33 .await
34 .process();
35
36 let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}More examples
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29 println!("--- Structured Output (JSON Mode) Example ---");
30
31 // Enable JSON mode by passing the generated schema
32 let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34 let prompt = "Give me a review for the movie Interstellar.";
35 println!("User: {}", prompt);
36
37 let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39 // Extract and deserialize the JSON response
40 if let Ok(review) = response.get_json::<MovieReview>() {
41 println!("\nGemini (Structured):");
42 println!("{:#?}", review);
43 } else {
44 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45 }
46}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15 println!("--- Basic Chat Example ---");
16
17 // 3. Ask a question
18 let prompt = "What are the benefits of using Rust for systems programming?";
19 println!("User: {}", prompt);
20
21 let response = ai.ask(session.ask(prompt)).await.unwrap();
22
23 // 4. Print the reply
24 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25 let reply = response.get_chat().get_text_no_think("");
26 println!("\nGemini: {}", reply);
27
28 // 5. The session now contains the interaction
29 println!("\nMessages in history: {}", session.get_history_length());
30}8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 println!("--- Streaming Example ---");
14 let prompt = "Write a short poem about crab-like robots on Mars.";
15 println!("User: {}\n", prompt);
16 print!("Gemini: ");
17 stdout().flush().unwrap();
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session.ask(prompt).clone()).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{}", text);
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {:?}", e);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}Sourcepub fn new_with_timeout(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
api_timeout: Duration,
) -> Self
pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self
Creates a new Gemini client with a custom API timeout.
§Arguments
api_key- Your Gemini API key.model- The model variation to use.sys_prompt- Optional system instructions.api_timeout- Custom duration for request timeouts.
Sourcepub fn set_generation_config(&mut self) -> &mut Value
pub fn set_generation_config(&mut self) -> &mut Value
Returns a mutable reference to the generation configuration. If not already set, initializes it to an empty object.
See Gemini docs for schema details.
pub fn set_tool_config(self, config: ToolConfig) -> Self
Sourcepub fn set_thinking_config(self, config: ThinkingConfig) -> Self
pub fn set_thinking_config(self, config: ThinkingConfig) -> Self
Examples found in repository?
7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}pub fn set_model(self, model: impl Into<String>) -> Self
pub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self
pub fn set_safety_settings(self, settings: Option<Vec<SafetySetting>>) -> Self
pub fn set_api_key(self, api_key: impl Into<String>) -> Self
Sourcepub fn set_json_mode(self, schema: Value) -> Self
pub fn set_json_mode(self, schema: Value) -> Self
Sets the response format to JSON mode with a specific schema.
To use a Rust struct as a schema, decorate it with #[gemini_schema] and pass
StructName::gemini_schema().
§Arguments
schema- The JSON schema for the response. See Gemini Schema docs.
Examples found in repository?
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29 println!("--- Structured Output (JSON Mode) Example ---");
30
31 // Enable JSON mode by passing the generated schema
32 let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34 let prompt = "Give me a review for the movie Interstellar.";
35 println!("User: {}", prompt);
36
37 let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39 // Extract and deserialize the JSON response
40 if let Ok(review) = response.get_json::<MovieReview>() {
41 println!("\nGemini (Structured):");
42 println!("{:#?}", review);
43 } else {
44 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45 }
46}pub fn remove_json_mode(self) -> Self
Sourcepub fn set_tools(self, tools: Vec<Tool>) -> Self
pub fn set_tools(self, tools: Vec<Tool>) -> Self
Sets the tools (functions) available to the model.
Examples found in repository?
30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}Sourcepub fn remove_tools(self) -> Self
pub fn remove_tools(self) -> Self
Removes all tools.
Sourcepub fn set_cached_content(self, name: impl Into<String>) -> Self
pub fn set_cached_content(self, name: impl Into<String>) -> Self
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18 .display_name("Simulated Large Doc")
19 .contents(
20 session
21 .get_history()
22 .into_iter()
23 .map(|e| e.to_owned())
24 .collect(),
25 )
26 .ttl(Duration::from_secs(300))
27 .build().unwrap();
28
29 println!("Creating cache...");
30 match ai.create_cache(&cached_content_req).await {
31 Ok(cache) => {
32 println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34 // 2. Use the cache in a request
35 let mut session = Session::new(10);
36 let prompt = "Summarize the cached document.";
37 println!("User: {}", prompt);
38
39 // Create a new client instance that uses the cache
40 let ai_with_cache = ai
41 .clone()
42 .set_cached_content(cache.name().as_ref().unwrap());
43
44 match ai_with_cache.ask(session.ask(prompt)).await {
45 Ok(response) => {
46 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47 }
48 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49 }
50
51 // 3. List caches
52 println!("\nListing caches...");
53 match ai.list_caches().await {
54 Ok(list) => {
55 if let Some(caches) = list.cached_contents() {
56 for c in caches {
57 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58 }
59 } else {
60 println!("No caches found.");
61 }
62 }
63 Err(e) => eprintln!("Error listing caches: {:?}", e),
64 }
65
66 // 4. Delete the cache
67 println!("\nDeleting cache...");
68 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69 Ok(_) => println!("Cache deleted."),
70 Err(e) => eprintln!("Error deleting cache: {:?}", e),
71 }
72 }
73 Err(e) => {
74 eprintln!("Failed to create cache: {:?}", e);
75 }
76 }
77}pub fn remove_cached_content(self) -> Self
Sourcepub async fn create_cache(
&self,
cached_content: &CachedContent,
) -> Result<CachedContent, GeminiResponseError>
pub async fn create_cache( &self, cached_content: &CachedContent, ) -> Result<CachedContent, GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18 .display_name("Simulated Large Doc")
19 .contents(
20 session
21 .get_history()
22 .into_iter()
23 .map(|e| e.to_owned())
24 .collect(),
25 )
26 .ttl(Duration::from_secs(300))
27 .build().unwrap();
28
29 println!("Creating cache...");
30 match ai.create_cache(&cached_content_req).await {
31 Ok(cache) => {
32 println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34 // 2. Use the cache in a request
35 let mut session = Session::new(10);
36 let prompt = "Summarize the cached document.";
37 println!("User: {}", prompt);
38
39 // Create a new client instance that uses the cache
40 let ai_with_cache = ai
41 .clone()
42 .set_cached_content(cache.name().as_ref().unwrap());
43
44 match ai_with_cache.ask(session.ask(prompt)).await {
45 Ok(response) => {
46 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47 }
48 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49 }
50
51 // 3. List caches
52 println!("\nListing caches...");
53 match ai.list_caches().await {
54 Ok(list) => {
55 if let Some(caches) = list.cached_contents() {
56 for c in caches {
57 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58 }
59 } else {
60 println!("No caches found.");
61 }
62 }
63 Err(e) => eprintln!("Error listing caches: {:?}", e),
64 }
65
66 // 4. Delete the cache
67 println!("\nDeleting cache...");
68 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69 Ok(_) => println!("Cache deleted."),
70 Err(e) => eprintln!("Error deleting cache: {:?}", e),
71 }
72 }
73 Err(e) => {
74 eprintln!("Failed to create cache: {:?}", e);
75 }
76 }
77}Sourcepub async fn list_caches(
&self,
) -> Result<CachedContentList, GeminiResponseError>
pub async fn list_caches( &self, ) -> Result<CachedContentList, GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18 .display_name("Simulated Large Doc")
19 .contents(
20 session
21 .get_history()
22 .into_iter()
23 .map(|e| e.to_owned())
24 .collect(),
25 )
26 .ttl(Duration::from_secs(300))
27 .build().unwrap();
28
29 println!("Creating cache...");
30 match ai.create_cache(&cached_content_req).await {
31 Ok(cache) => {
32 println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34 // 2. Use the cache in a request
35 let mut session = Session::new(10);
36 let prompt = "Summarize the cached document.";
37 println!("User: {}", prompt);
38
39 // Create a new client instance that uses the cache
40 let ai_with_cache = ai
41 .clone()
42 .set_cached_content(cache.name().as_ref().unwrap());
43
44 match ai_with_cache.ask(session.ask(prompt)).await {
45 Ok(response) => {
46 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47 }
48 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49 }
50
51 // 3. List caches
52 println!("\nListing caches...");
53 match ai.list_caches().await {
54 Ok(list) => {
55 if let Some(caches) = list.cached_contents() {
56 for c in caches {
57 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58 }
59 } else {
60 println!("No caches found.");
61 }
62 }
63 Err(e) => eprintln!("Error listing caches: {:?}", e),
64 }
65
66 // 4. Delete the cache
67 println!("\nDeleting cache...");
68 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69 Ok(_) => println!("Cache deleted."),
70 Err(e) => eprintln!("Error deleting cache: {:?}", e),
71 }
72 }
73 Err(e) => {
74 eprintln!("Failed to create cache: {:?}", e);
75 }
76 }
77}pub async fn get_cache( &self, name: &str, ) -> Result<CachedContent, GeminiResponseError>
pub async fn update_cache( &self, name: &str, update: &CachedContentUpdate, ) -> Result<CachedContent, GeminiResponseError>
Sourcepub async fn delete_cache(&self, name: &str) -> Result<(), GeminiResponseError>
pub async fn delete_cache(&self, name: &str) -> Result<(), GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18 .display_name("Simulated Large Doc")
19 .contents(
20 session
21 .get_history()
22 .into_iter()
23 .map(|e| e.to_owned())
24 .collect(),
25 )
26 .ttl(Duration::from_secs(300))
27 .build().unwrap();
28
29 println!("Creating cache...");
30 match ai.create_cache(&cached_content_req).await {
31 Ok(cache) => {
32 println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34 // 2. Use the cache in a request
35 let mut session = Session::new(10);
36 let prompt = "Summarize the cached document.";
37 println!("User: {}", prompt);
38
39 // Create a new client instance that uses the cache
40 let ai_with_cache = ai
41 .clone()
42 .set_cached_content(cache.name().as_ref().unwrap());
43
44 match ai_with_cache.ask(session.ask(prompt)).await {
45 Ok(response) => {
46 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47 }
48 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49 }
50
51 // 3. List caches
52 println!("\nListing caches...");
53 match ai.list_caches().await {
54 Ok(list) => {
55 if let Some(caches) = list.cached_contents() {
56 for c in caches {
57 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58 }
59 } else {
60 println!("No caches found.");
61 }
62 }
63 Err(e) => eprintln!("Error listing caches: {:?}", e),
64 }
65
66 // 4. Delete the cache
67 println!("\nDeleting cache...");
68 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69 Ok(_) => println!("Cache deleted."),
70 Err(e) => eprintln!("Error deleting cache: {:?}", e),
71 }
72 }
73 Err(e) => {
74 eprintln!("Failed to create cache: {:?}", e);
75 }
76 }
77}Sourcepub async fn ask(
&self,
session: &mut Session,
) -> Result<GeminiResponse, GeminiResponseError>
pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>
Sends a prompt to the model and waits for the full response.
Updates the session history with the model’s reply.
§Errors
Returns GeminiResponseError::NothingToRespond if the last message in history is from the model.
Examples found in repository?
8async fn raw_multimodal() {
9 let mut session = Session::new(6);
10 let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 session.ask("Where is there in this pdf");
14 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
15
16 let response = ai.ask(&mut session).await.unwrap();
17 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
18}
19#[tokio::main]
20async fn main() {
21 let mut session = Session::new(6);
22 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
23 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
24
25 println!("--- Multimodal (Images/Files) Example ---");
26
27 // Use MarkdownToParts to easily parse a string with image/file markers
28 // It supports both URLs and local file paths!
29 let content = "Describe this image: ";
30 println!("Processing: {}", content);
31
32 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33 .await
34 .process();
35
36 let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}More examples
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
28
29 println!("--- Structured Output (JSON Mode) Example ---");
30
31 // Enable JSON mode by passing the generated schema
32 let ai = ai.set_json_mode(MovieReview::gemini_schema());
33
34 let prompt = "Give me a review for the movie Interstellar.";
35 println!("User: {}", prompt);
36
37 let response = ai.ask(session.ask(prompt)).await.unwrap();
38
39 // Extract and deserialize the JSON response
40 if let Ok(review) = response.get_json::<MovieReview>() {
41 println!("\nGemini (Structured):");
42 println!("{:#?}", review);
43 } else {
44 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
45 }
46}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15 println!("--- Basic Chat Example ---");
16
17 // 3. Ask a question
18 let prompt = "What are the benefits of using Rust for systems programming?";
19 println!("User: {}", prompt);
20
21 let response = ai.ask(session.ask(prompt)).await.unwrap();
22
23 // 4. Print the reply
24 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25 let reply = response.get_chat().get_text_no_think("");
26 println!("\nGemini: {}", reply);
27
28 // 5. The session now contains the interaction
29 println!("\nMessages in history: {}", session.get_history_length());
30}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15 session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18 .display_name("Simulated Large Doc")
19 .contents(
20 session
21 .get_history()
22 .into_iter()
23 .map(|e| e.to_owned())
24 .collect(),
25 )
26 .ttl(Duration::from_secs(300))
27 .build().unwrap();
28
29 println!("Creating cache...");
30 match ai.create_cache(&cached_content_req).await {
31 Ok(cache) => {
32 println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34 // 2. Use the cache in a request
35 let mut session = Session::new(10);
36 let prompt = "Summarize the cached document.";
37 println!("User: {}", prompt);
38
39 // Create a new client instance that uses the cache
40 let ai_with_cache = ai
41 .clone()
42 .set_cached_content(cache.name().as_ref().unwrap());
43
44 match ai_with_cache.ask(session.ask(prompt)).await {
45 Ok(response) => {
46 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47 }
48 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49 }
50
51 // 3. List caches
52 println!("\nListing caches...");
53 match ai.list_caches().await {
54 Ok(list) => {
55 if let Some(caches) = list.cached_contents() {
56 for c in caches {
57 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58 }
59 } else {
60 println!("No caches found.");
61 }
62 }
63 Err(e) => eprintln!("Error listing caches: {:?}", e),
64 }
65
66 // 4. Delete the cache
67 println!("\nDeleting cache...");
68 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69 Ok(_) => println!("Cache deleted."),
70 Err(e) => eprintln!("Error deleting cache: {:?}", e),
71 }
72 }
73 Err(e) => {
74 eprintln!("Failed to create cache: {:?}", e);
75 }
76 }
77}Sourcepub async fn ask_as_stream_with_extractor<F, StreamType>(
&self,
session: Session,
data_extractor: F,
) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
§Warning
You must read the response stream to get reply stored context in session.
data_extractor is used to extract data that you get as a stream of futures.
§Example
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
|session, _gemini_response| session.get_last_chat().unwrap().get_text_no_think("\n"))
.await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk
while let Some(response) = response_stream.next().await {
println!("{}", response.unwrap());
}Sourcepub async fn ask_as_stream(
&self,
session: Session,
) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
Sends a prompt to the model and returns a stream of responses.
§Warning
You must exhaust the response stream to ensure the session history is correctly updated.
§Example
use futures::StreamExt;
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response.get_chat().get_text_no_think("\n"));
}
}Examples found in repository?
8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 println!("--- Streaming Example ---");
14 let prompt = "Write a short poem about crab-like robots on Mars.";
15 println!("User: {}\n", prompt);
16 print!("Gemini: ");
17 stdout().flush().unwrap();
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session.ask(prompt).clone()).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{}", text);
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {:?}", e);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}