pub struct Gemini { /* private fields */ }Expand description
The main client for interacting with the Gemini API.
Use Gemini::new or Gemini::new_with_timeout to create an instance.
You can configure various aspects of the request like model, system instructions,
generation config, safety settings, and tools using the provided builder-like methods.
Implementations§
Source§impl Gemini
impl Gemini
Sourcepub fn new(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
) -> Self
pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self
Creates a new Gemini client.
§Arguments
api_key- Your Gemini API key. Get one from Google AI studio.model- The model variation to use (e.g., “gemini-2.5-flash”). See model variations.sys_prompt- Optional system instructions. See system instructions.
Examples found in repository?
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27
28 // Enable JSON mode by passing the generated schema
29 let ai =
30 Gemini::new(api_key, "gemini-2.5-flash", None).set_json_mode(MovieReview::gemini_schema());
31
32 let prompt = "Give me a review for the movie Interstellar.";
33 println!("User: {}", prompt);
34
35 let response = ai.ask(session.ask(prompt)).await.unwrap();
36 let review: MovieReview = response
37 .get_json()
38 .expect("Gemini responded with wrong structure");
39
40 println!("Gemini structured output:\n{review:#?}");
41}More examples
22async fn main() {
23 let mut session = Session::new(6);
24 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
25 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
26
27 // Use MarkdownToParts to easily parse a string with image/file markers
28 // It supports both URLs and local file paths!
29 let content = "Describe this image: ";
30 println!("Processing: {}", content);
31
32 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33 .await
34 .process();
35
36 let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.5+
12 let ai = Gemini::new(api_key, "gemini-3-flash-preview", None)
13 .set_thinking_config(ThinkingConfig::new(true, ThinkingLevel::Low)); //For gemini-2.5 only
14 //budget is allowed. Eg. `.set_thinking_config(ThinkingConfig::new(true, 1024))`
15
16 let prompt = "How many 'r's are in the word strawberry?";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(
14 api_key,
15 "gemini-2.5-flash",
16 Some("You are a senior engineer at google".into()),
17 );
18
19 // 3. Ask a question
20 let prompt = "What are the benefits of using Rust for systems programming?";
21 session.ask(prompt).ask("\nKeep you answer short"); // consecutive asks gets concatenated
22
23 println!("User: {:?}", session.get_last_chat().unwrap().parts());
24 let response = ai.ask(&mut session).await.unwrap();
25
26 // 4. Print the reply
27 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
28 let reply = response.get_chat().get_text_no_think("");
29 println!("\nGemini: {}", reply);
30
31 // 5. The session now contains the interaction
32 println!("\nMessages in history: {}", session.get_history_length());
33}8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 let prompt = "Write a poem about crab-like robots on Mars.";
14 println!("User: {}\n", prompt);
15 print!("Gemini: ");
16 stdout().flush().unwrap();
17 session.ask(prompt);
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{text}");
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {e}",);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39 session = response_stream.get_session_owned();
40 println!("Updated session: {session:?}")
41}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai =
36 Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
37 vec![
38 add_numbers::gemini_schema(),
39 get_temperature::gemini_schema(),
40 ],
41 )]);
42
43 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
44 println!("User: {}\n", prompt);
45
46 // 2. Ask Gemini. It might reply with one or more function calls.
47 let mut response = ai.ask(session.ask(prompt)).await?;
48
49 // 3. Loop to handle potential multiple rounds of function calls
50 loop {
51 if response.get_chat().has_function_call() {
52 println!("Gemini requested function calls...");
53
54 // 4. Use the macro to execute all requested calls and update the session
55 let results = execute_function_calls!(session, add_numbers, get_temperature);
56
57 for (idx, res) in results.iter().enumerate() {
58 if let Some(r) = res {
59 println!(" Call #{} result: {:?}", idx, r);
60 }
61 }
62
63 // 5. Send the results back to Gemini to get the final natural language response
64 response = ai.ask(&mut session).await?;
65 } else {
66 // No more function calls, show the final response
67 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
68 break;
69 }
70 }
71
72 Ok(())
73}Sourcepub fn new_with_timeout(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
api_timeout: Duration,
) -> Self
👎Deprecated
pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self
Creates a new Gemini client with a custom API timeout.
§Arguments
api_key- Your Gemini API key.model- The model variation to use.sys_prompt- Optional system instructions.api_timeout- Custom duration for request timeouts.
Sourcepub fn new_with_client(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
client: Client,
) -> Self
pub fn new_with_client( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, client: Client, ) -> Self
Creates a new Gemini client with a custom API reqwest::Client.
§Arguments
api_key- Your Gemini API key.model- The model variation to use.sys_prompt- Optional system instructions.client- reqwest::Client to request gemini API.
Sourcepub fn set_generation_config(&mut self) -> &mut Value
pub fn set_generation_config(&mut self) -> &mut Value
Returns a mutable reference to the generation configuration. If not already set, initializes it to an empty object.
See Gemini docs for schema details.
pub fn set_tool_config(self, config: ToolConfig) -> Self
Sourcepub fn set_thinking_config(self, config: ThinkingConfig) -> Self
pub fn set_thinking_config(self, config: ThinkingConfig) -> Self
Examples found in repository?
7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.5+
12 let ai = Gemini::new(api_key, "gemini-3-flash-preview", None)
13 .set_thinking_config(ThinkingConfig::new(true, ThinkingLevel::Low)); //For gemini-2.5 only
14 //budget is allowed. Eg. `.set_thinking_config(ThinkingConfig::new(true, 1024))`
15
16 let prompt = "How many 'r's are in the word strawberry?";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}pub fn set_model(self, model: impl Into<String>) -> Self
Sourcepub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self
pub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self
§Warning
Changing sys_prompt in middle of a conversation can confuse the model.
pub fn set_safety_settings(self, settings: Option<Vec<SafetySetting>>) -> Self
pub fn set_api_key(self, api_key: impl Into<String>) -> Self
Sourcepub fn set_json_mode(self, schema: Value) -> Self
pub fn set_json_mode(self, schema: Value) -> Self
Sets the response format to JSON mode with a specific schema.
To use a Rust struct as a schema, decorate it with #[gemini_schema] and pass
StructName::gemini_schema().
§Arguments
schema- The JSON schema for the response. See Gemini Schema docs.
Examples found in repository?
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27
28 // Enable JSON mode by passing the generated schema
29 let ai =
30 Gemini::new(api_key, "gemini-2.5-flash", None).set_json_mode(MovieReview::gemini_schema());
31
32 let prompt = "Give me a review for the movie Interstellar.";
33 println!("User: {}", prompt);
34
35 let response = ai.ask(session.ask(prompt)).await.unwrap();
36 let review: MovieReview = response
37 .get_json()
38 .expect("Gemini responded with wrong structure");
39
40 println!("Gemini structured output:\n{review:#?}");
41}pub fn remove_json_mode(self) -> Self
Sourcepub fn set_tools(self, tools: Vec<Tool>) -> Self
pub fn set_tools(self, tools: Vec<Tool>) -> Self
Sets the tools (functions) available to the model.
Examples found in repository?
30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai =
36 Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
37 vec![
38 add_numbers::gemini_schema(),
39 get_temperature::gemini_schema(),
40 ],
41 )]);
42
43 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
44 println!("User: {}\n", prompt);
45
46 // 2. Ask Gemini. It might reply with one or more function calls.
47 let mut response = ai.ask(session.ask(prompt)).await?;
48
49 // 3. Loop to handle potential multiple rounds of function calls
50 loop {
51 if response.get_chat().has_function_call() {
52 println!("Gemini requested function calls...");
53
54 // 4. Use the macro to execute all requested calls and update the session
55 let results = execute_function_calls!(session, add_numbers, get_temperature);
56
57 for (idx, res) in results.iter().enumerate() {
58 if let Some(r) = res {
59 println!(" Call #{} result: {:?}", idx, r);
60 }
61 }
62
63 // 5. Send the results back to Gemini to get the final natural language response
64 response = ai.ask(&mut session).await?;
65 } else {
66 // No more function calls, show the final response
67 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
68 break;
69 }
70 }
71
72 Ok(())
73}Sourcepub fn remove_tools(self) -> Self
pub fn remove_tools(self) -> Self
Removes all tools.
Sourcepub fn set_cached_content(self, name: impl Into<String>) -> Self
pub fn set_cached_content(self, name: impl Into<String>) -> Self
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 //Faking big context for example
15 session.ask("What is there in this pdf".repeat(200))
16 .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19 .display_name("Simulated Large Doc")
20 .contents(session.get_history_owned().into_iter().collect())
21 .ttl(Duration::from_secs(300))
22 .build()
23 .unwrap();
24
25 println!("Creating cache...");
26 match ai.create_cache(&cached_content_req).await {
27 Ok(cache) => {
28 println!("Cache created: {}", cache.name().as_ref().unwrap());
29
30 // 2. Use the cache in a request
31 let mut session = Session::new(10);
32 let prompt = "Summarize the cached document.";
33 println!("User: {}", prompt);
34
35 // Create a new client instance that uses the cache
36 let ai_with_cache = ai
37 .clone()
38 .set_cached_content(cache.name().as_ref().unwrap());
39
40 match ai_with_cache.ask(session.ask(prompt)).await {
41 Ok(response) => {
42 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
43 }
44 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
45 }
46
47 // 3. List caches
48 println!("\nListing caches...");
49 match ai.list_caches().await {
50 Ok(list) => {
51 if let Some(caches) = list.cached_contents() {
52 for c in caches {
53 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
54 }
55 } else {
56 println!("No caches found.");
57 }
58 }
59 Err(e) => eprintln!("Error listing caches: {:?}", e),
60 }
61
62 // 4. Delete the cache
63 println!("\nDeleting cache...");
64 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
65 Ok(_) => println!("Cache deleted."),
66 Err(e) => eprintln!("Error deleting cache: {:?}", e),
67 }
68 }
69 Err(e) => {
70 eprintln!("Failed to create cache: {:?}", e);
71 }
72 }
73}pub fn remove_cached_content(self) -> Self
Sourcepub async fn create_cache(
&self,
cached_content: &CachedContent,
) -> Result<CachedContent, GeminiResponseError>
pub async fn create_cache( &self, cached_content: &CachedContent, ) -> Result<CachedContent, GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 //Faking big context for example
15 session.ask("What is there in this pdf".repeat(200))
16 .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19 .display_name("Simulated Large Doc")
20 .contents(session.get_history_owned().into_iter().collect())
21 .ttl(Duration::from_secs(300))
22 .build()
23 .unwrap();
24
25 println!("Creating cache...");
26 match ai.create_cache(&cached_content_req).await {
27 Ok(cache) => {
28 println!("Cache created: {}", cache.name().as_ref().unwrap());
29
30 // 2. Use the cache in a request
31 let mut session = Session::new(10);
32 let prompt = "Summarize the cached document.";
33 println!("User: {}", prompt);
34
35 // Create a new client instance that uses the cache
36 let ai_with_cache = ai
37 .clone()
38 .set_cached_content(cache.name().as_ref().unwrap());
39
40 match ai_with_cache.ask(session.ask(prompt)).await {
41 Ok(response) => {
42 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
43 }
44 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
45 }
46
47 // 3. List caches
48 println!("\nListing caches...");
49 match ai.list_caches().await {
50 Ok(list) => {
51 if let Some(caches) = list.cached_contents() {
52 for c in caches {
53 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
54 }
55 } else {
56 println!("No caches found.");
57 }
58 }
59 Err(e) => eprintln!("Error listing caches: {:?}", e),
60 }
61
62 // 4. Delete the cache
63 println!("\nDeleting cache...");
64 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
65 Ok(_) => println!("Cache deleted."),
66 Err(e) => eprintln!("Error deleting cache: {:?}", e),
67 }
68 }
69 Err(e) => {
70 eprintln!("Failed to create cache: {:?}", e);
71 }
72 }
73}Sourcepub async fn list_caches(
&self,
) -> Result<CachedContentList, GeminiResponseError>
pub async fn list_caches( &self, ) -> Result<CachedContentList, GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 //Faking big context for example
15 session.ask("What is there in this pdf".repeat(200))
16 .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19 .display_name("Simulated Large Doc")
20 .contents(session.get_history_owned().into_iter().collect())
21 .ttl(Duration::from_secs(300))
22 .build()
23 .unwrap();
24
25 println!("Creating cache...");
26 match ai.create_cache(&cached_content_req).await {
27 Ok(cache) => {
28 println!("Cache created: {}", cache.name().as_ref().unwrap());
29
30 // 2. Use the cache in a request
31 let mut session = Session::new(10);
32 let prompt = "Summarize the cached document.";
33 println!("User: {}", prompt);
34
35 // Create a new client instance that uses the cache
36 let ai_with_cache = ai
37 .clone()
38 .set_cached_content(cache.name().as_ref().unwrap());
39
40 match ai_with_cache.ask(session.ask(prompt)).await {
41 Ok(response) => {
42 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
43 }
44 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
45 }
46
47 // 3. List caches
48 println!("\nListing caches...");
49 match ai.list_caches().await {
50 Ok(list) => {
51 if let Some(caches) = list.cached_contents() {
52 for c in caches {
53 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
54 }
55 } else {
56 println!("No caches found.");
57 }
58 }
59 Err(e) => eprintln!("Error listing caches: {:?}", e),
60 }
61
62 // 4. Delete the cache
63 println!("\nDeleting cache...");
64 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
65 Ok(_) => println!("Cache deleted."),
66 Err(e) => eprintln!("Error deleting cache: {:?}", e),
67 }
68 }
69 Err(e) => {
70 eprintln!("Failed to create cache: {:?}", e);
71 }
72 }
73}pub async fn get_cache( &self, name: &str, ) -> Result<CachedContent, GeminiResponseError>
pub async fn update_cache( &self, name: &str, update: &CachedContentUpdate, ) -> Result<CachedContent, GeminiResponseError>
Sourcepub async fn delete_cache(&self, name: &str) -> Result<(), GeminiResponseError>
pub async fn delete_cache(&self, name: &str) -> Result<(), GeminiResponseError>
Examples found in repository?
9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 //Faking big context for example
15 session.ask("What is there in this pdf".repeat(200))
16 .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19 .display_name("Simulated Large Doc")
20 .contents(session.get_history_owned().into_iter().collect())
21 .ttl(Duration::from_secs(300))
22 .build()
23 .unwrap();
24
25 println!("Creating cache...");
26 match ai.create_cache(&cached_content_req).await {
27 Ok(cache) => {
28 println!("Cache created: {}", cache.name().as_ref().unwrap());
29
30 // 2. Use the cache in a request
31 let mut session = Session::new(10);
32 let prompt = "Summarize the cached document.";
33 println!("User: {}", prompt);
34
35 // Create a new client instance that uses the cache
36 let ai_with_cache = ai
37 .clone()
38 .set_cached_content(cache.name().as_ref().unwrap());
39
40 match ai_with_cache.ask(session.ask(prompt)).await {
41 Ok(response) => {
42 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
43 }
44 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
45 }
46
47 // 3. List caches
48 println!("\nListing caches...");
49 match ai.list_caches().await {
50 Ok(list) => {
51 if let Some(caches) = list.cached_contents() {
52 for c in caches {
53 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
54 }
55 } else {
56 println!("No caches found.");
57 }
58 }
59 Err(e) => eprintln!("Error listing caches: {:?}", e),
60 }
61
62 // 4. Delete the cache
63 println!("\nDeleting cache...");
64 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
65 Ok(_) => println!("Cache deleted."),
66 Err(e) => eprintln!("Error deleting cache: {:?}", e),
67 }
68 }
69 Err(e) => {
70 eprintln!("Failed to create cache: {:?}", e);
71 }
72 }
73}Sourcepub async fn ask(
&self,
session: &mut Session,
) -> Result<GeminiResponse, GeminiResponseError>
pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>
Sends a prompt to the model and waits for the full response.
Updates the session history with the model’s reply.
§Errors
Returns GeminiResponseError::NothingToRespond if the last message in history is from the model.
Examples found in repository?
24async fn main() {
25 let mut session = Session::new(2).set_remember_reply(false);
26 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
27
28 // Enable JSON mode by passing the generated schema
29 let ai =
30 Gemini::new(api_key, "gemini-2.5-flash", None).set_json_mode(MovieReview::gemini_schema());
31
32 let prompt = "Give me a review for the movie Interstellar.";
33 println!("User: {}", prompt);
34
35 let response = ai.ask(session.ask(prompt)).await.unwrap();
36 let review: MovieReview = response
37 .get_json()
38 .expect("Gemini responded with wrong structure");
39
40 println!("Gemini structured output:\n{review:#?}");
41}More examples
22async fn main() {
23 let mut session = Session::new(6);
24 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
25 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
26
27 // Use MarkdownToParts to easily parse a string with image/file markers
28 // It supports both URLs and local file paths!
29 let content = "Describe this image: ";
30 println!("Processing: {}", content);
31
32 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
33 .await
34 .process();
35
36 let response = ai.ask(session.ask_parts(parts)).await.unwrap();
37
38 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
39}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.5+
12 let ai = Gemini::new(api_key, "gemini-3-flash-preview", None)
13 .set_thinking_config(ThinkingConfig::new(true, ThinkingLevel::Low)); //For gemini-2.5 only
14 //budget is allowed. Eg. `.set_thinking_config(ThinkingConfig::new(true, 1024))`
15
16 let prompt = "How many 'r's are in the word strawberry?";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(
14 api_key,
15 "gemini-2.5-flash",
16 Some("You are a senior engineer at google".into()),
17 );
18
19 // 3. Ask a question
20 let prompt = "What are the benefits of using Rust for systems programming?";
21 session.ask(prompt).ask("\nKeep you answer short"); // consecutive asks gets concatenated
22
23 println!("User: {:?}", session.get_last_chat().unwrap().parts());
24 let response = ai.ask(&mut session).await.unwrap();
25
26 // 4. Print the reply
27 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
28 let reply = response.get_chat().get_text_no_think("");
29 println!("\nGemini: {}", reply);
30
31 // 5. The session now contains the interaction
32 println!("\nMessages in history: {}", session.get_history_length());
33}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai =
36 Gemini::new(api_key, "gemini-2.5-flash", None).set_tools(vec![Tool::FunctionDeclarations(
37 vec![
38 add_numbers::gemini_schema(),
39 get_temperature::gemini_schema(),
40 ],
41 )]);
42
43 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
44 println!("User: {}\n", prompt);
45
46 // 2. Ask Gemini. It might reply with one or more function calls.
47 let mut response = ai.ask(session.ask(prompt)).await?;
48
49 // 3. Loop to handle potential multiple rounds of function calls
50 loop {
51 if response.get_chat().has_function_call() {
52 println!("Gemini requested function calls...");
53
54 // 4. Use the macro to execute all requested calls and update the session
55 let results = execute_function_calls!(session, add_numbers, get_temperature);
56
57 for (idx, res) in results.iter().enumerate() {
58 if let Some(r) = res {
59 println!(" Call #{} result: {:?}", idx, r);
60 }
61 }
62
63 // 5. Send the results back to Gemini to get the final natural language response
64 response = ai.ask(&mut session).await?;
65 } else {
66 // No more function calls, show the final response
67 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
68 break;
69 }
70 }
71
72 Ok(())
73}9async fn main() {
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12 let mut session = Session::new(10);
13
14 //Faking big context for example
15 session.ask("What is there in this pdf".repeat(200))
16 .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18 let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19 .display_name("Simulated Large Doc")
20 .contents(session.get_history_owned().into_iter().collect())
21 .ttl(Duration::from_secs(300))
22 .build()
23 .unwrap();
24
25 println!("Creating cache...");
26 match ai.create_cache(&cached_content_req).await {
27 Ok(cache) => {
28 println!("Cache created: {}", cache.name().as_ref().unwrap());
29
30 // 2. Use the cache in a request
31 let mut session = Session::new(10);
32 let prompt = "Summarize the cached document.";
33 println!("User: {}", prompt);
34
35 // Create a new client instance that uses the cache
36 let ai_with_cache = ai
37 .clone()
38 .set_cached_content(cache.name().as_ref().unwrap());
39
40 match ai_with_cache.ask(session.ask(prompt)).await {
41 Ok(response) => {
42 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
43 }
44 Err(e) => eprintln!("Error asking Gemini: {:?}", e),
45 }
46
47 // 3. List caches
48 println!("\nListing caches...");
49 match ai.list_caches().await {
50 Ok(list) => {
51 if let Some(caches) = list.cached_contents() {
52 for c in caches {
53 println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
54 }
55 } else {
56 println!("No caches found.");
57 }
58 }
59 Err(e) => eprintln!("Error listing caches: {:?}", e),
60 }
61
62 // 4. Delete the cache
63 println!("\nDeleting cache...");
64 match ai.delete_cache(cache.name().as_ref().unwrap()).await {
65 Ok(_) => println!("Cache deleted."),
66 Err(e) => eprintln!("Error deleting cache: {:?}", e),
67 }
68 }
69 Err(e) => {
70 eprintln!("Failed to create cache: {:?}", e);
71 }
72 }
73}Sourcepub async fn ask_as_stream_with_extractor<F, StreamType>(
&self,
session: Session,
data_extractor: F,
) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
§Warning
You must read the response stream to get reply stored context in session.
data_extractor is used to extract data that you get as a stream of futures.
§Example
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
|session, _gemini_response| session.get_last_chat().unwrap().get_text_no_think("\n"))
.await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk
while let Some(response) = response_stream.next().await {
println!("{}", response);
}Sourcepub async fn ask_as_stream(
&self,
session: Session,
) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
Sends a prompt to the model and returns a stream of responses.
§Warning
You must exhaust the response stream to ensure the session history is correctly updated.
§Example
use futures::StreamExt;
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response.get_chat().get_text_no_think("\n"));
}
}Examples found in repository?
8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 let prompt = "Write a poem about crab-like robots on Mars.";
14 println!("User: {}\n", prompt);
15 print!("Gemini: ");
16 stdout().flush().unwrap();
17 session.ask(prompt);
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{text}");
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {e}",);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39 session = response_stream.get_session_owned();
40 println!("Updated session: {session:?}")
41}