pub struct Gemini { /* private fields */ }Expand description
The main client for interacting with the Gemini API.
Use Gemini::new or Gemini::new_with_timeout to create an instance.
You can configure various aspects of the request like model, system instructions,
generation config, safety settings, and tools using the provided builder-like methods.
Implementations§
Source§impl Gemini
impl Gemini
Sourcepub fn new(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
) -> Self
pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self
Creates a new Gemini client.
§Arguments
api_key- Your Gemini API key. Get one from Google AI studio.model- The model variation to use (e.g., “gemini-2.5-flash”). See model variations.sys_prompt- Optional system instructions. See system instructions.
Examples found in repository?
7async fn raw_multimodal() {
8 let mut session = Session::new(6);
9 let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
11
12 session.ask(vec!["Where is there in this pdf".into(), InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap().into()]);
13
14 let response = ai.ask(&mut session).await.unwrap();
15 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
16}
17#[tokio::main]
18async fn main() {
19 let mut session = Session::new(6);
20 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
21 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
22
23 println!("--- Multimodal (Images/Files) Example ---");
24
25 // Use MarkdownToParts to easily parse a string with image/file markers
26 // It supports both URLs and local file paths!
27 let content = "Describe this image: ";
28 println!("Processing: {}", content);
29
30 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
31 .await
32 .process();
33
34 let response = ai.ask(session.ask(parts)).await.unwrap();
35
36 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
37}More examples
23async fn main() {
24 let mut session = Session::new(2).set_remember_reply(false);
25 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28 println!("--- Structured Output (JSON Mode) Example ---");
29
30 // Enable JSON mode by passing the generated schema
31 let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33 let prompt = "Give me a review for the movie Interstellar.";
34 println!("User: {}", prompt);
35
36 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38 // Extract and deserialize the JSON response
39 if let Ok(review) = response.get_json::<MovieReview>() {
40 println!("\nGemini (Structured):");
41 println!("{:#?}", review);
42 } else {
43 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44 }
45}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15 println!("--- Basic Chat Example ---");
16
17 // 3. Ask a question
18 let prompt = "What are the benefits of using Rust for systems programming?";
19 println!("User: {}", prompt);
20
21 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
22
23 // 4. Print the reply
24 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25 let reply = response.get_chat().get_text_no_think("");
26 println!("\nGemini: {}", reply);
27
28 // 5. The session now contains the interaction
29 println!("\nMessages in history: {}", session.get_history_length());
30}8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 println!("--- Streaming Example ---");
14 let prompt = "Write a short poem about crab-like robots on Mars.";
15 println!("User: {}\n", prompt);
16 print!("Gemini: ");
17 stdout().flush().unwrap();
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session.ask_string(prompt).clone()).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{}", text);
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {:?}", e);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}Sourcepub fn new_with_timeout(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
api_timeout: Duration,
) -> Self
pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self
Creates a new Gemini client with a custom API timeout.
§Arguments
api_key- Your Gemini API key.model- The model variation to use.sys_prompt- Optional system instructions.api_timeout- Custom duration for request timeouts.
Sourcepub fn set_generation_config(&mut self) -> &mut Value
pub fn set_generation_config(&mut self) -> &mut Value
Returns a mutable reference to the generation configuration. If not already set, initializes it to an empty object.
See Gemini docs for schema details.
pub fn set_tool_config(self, config: ToolConfig) -> Self
Sourcepub fn set_thinking_config(self, config: ThinkingConfig) -> Self
pub fn set_thinking_config(self, config: ThinkingConfig) -> Self
Examples found in repository?
7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}pub fn set_model(self, model: impl Into<String>) -> Self
pub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self
pub fn set_safety_settings(self, settings: Option<Vec<SafetySetting>>) -> Self
pub fn set_api_key(self, api_key: impl Into<String>) -> Self
Sourcepub fn set_json_mode(self, schema: Value) -> Self
pub fn set_json_mode(self, schema: Value) -> Self
Sets the response format to JSON mode with a specific schema.
To use a Rust struct as a schema, decorate it with #[gemini_schema] and pass
StructName::gemini_schema().
§Arguments
schema- The JSON schema for the response. See Gemini Schema docs.
Examples found in repository?
23async fn main() {
24 let mut session = Session::new(2).set_remember_reply(false);
25 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28 println!("--- Structured Output (JSON Mode) Example ---");
29
30 // Enable JSON mode by passing the generated schema
31 let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33 let prompt = "Give me a review for the movie Interstellar.";
34 println!("User: {}", prompt);
35
36 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38 // Extract and deserialize the JSON response
39 if let Ok(review) = response.get_json::<MovieReview>() {
40 println!("\nGemini (Structured):");
41 println!("{:#?}", review);
42 } else {
43 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44 }
45}pub fn unset_json_mode(self) -> Self
Sourcepub fn set_tools(self, tools: Vec<Tool>) -> Self
pub fn set_tools(self, tools: Vec<Tool>) -> Self
Sets the tools (functions) available to the model.
Examples found in repository?
30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}Sourcepub fn unset_tools(self) -> Self
pub fn unset_tools(self) -> Self
Removes all tools.
Sourcepub async fn ask(
&self,
session: &mut Session,
) -> Result<GeminiResponse, GeminiResponseError>
pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>
Sends a prompt to the model and waits for the full response.
Updates the session history with the model’s reply.
§Errors
Returns GeminiResponseError::NothingToRespond if the last message in history is from the model.
Examples found in repository?
7async fn raw_multimodal() {
8 let mut session = Session::new(6);
9 let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
11
12 session.ask(vec!["Where is there in this pdf".into(), InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap().into()]);
13
14 let response = ai.ask(&mut session).await.unwrap();
15 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
16}
17#[tokio::main]
18async fn main() {
19 let mut session = Session::new(6);
20 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
21 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
22
23 println!("--- Multimodal (Images/Files) Example ---");
24
25 // Use MarkdownToParts to easily parse a string with image/file markers
26 // It supports both URLs and local file paths!
27 let content = "Describe this image: ";
28 println!("Processing: {}", content);
29
30 let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
31 .await
32 .process();
33
34 let response = ai.ask(session.ask(parts)).await.unwrap();
35
36 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
37}More examples
23async fn main() {
24 let mut session = Session::new(2).set_remember_reply(false);
25 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28 println!("--- Structured Output (JSON Mode) Example ---");
29
30 // Enable JSON mode by passing the generated schema
31 let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33 let prompt = "Give me a review for the movie Interstellar.";
34 println!("User: {}", prompt);
35
36 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38 // Extract and deserialize the JSON response
39 if let Ok(review) = response.get_json::<MovieReview>() {
40 println!("\nGemini (Structured):");
41 println!("{:#?}", review);
42 } else {
43 println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44 }
45}7async fn main() {
8 let mut session = Session::new(4);
9 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10
11 // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12 let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13 .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15 println!("--- Thinking Mode Example ---");
16 let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17 println!("User: {}\n", prompt);
18
19 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21 // Show the "thoughts" part separately
22 let thoughts = response.get_chat().get_thoughts("\n");
23 if !thoughts.is_empty() {
24 println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25 }
26
27 // Show the final answer
28 let answer = response.get_chat().get_text_no_think("");
29 println!("--- Gemini's Answer ---\n{}", answer);
30}6async fn main() {
7 // 1. Initialize the session with a history limit (e.g., 6 messages)
8 let mut session = Session::new(6);
9
10 // 2. Create the Gemini client
11 // Get your API key from https://aistudio.google.com/app/apikey
12 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15 println!("--- Basic Chat Example ---");
16
17 // 3. Ask a question
18 let prompt = "What are the benefits of using Rust for systems programming?";
19 println!("User: {}", prompt);
20
21 let response = ai.ask(session.ask_string(prompt)).await.unwrap();
22
23 // 4. Print the reply
24 // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25 let reply = response.get_chat().get_text_no_think("");
26 println!("\nGemini: {}", reply);
27
28 // 5. The session now contains the interaction
29 println!("\nMessages in history: {}", session.get_history_length());
30}30async fn main() -> Result<(), Box<dyn Error>> {
31 let mut session = Session::new(10);
32 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33
34 // 1. Initialize Gemini and register tools
35 let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36 .set_tools(vec![Tool::FunctionDeclarations(vec![
37 add_numbers::gemini_schema(),
38 get_temperature::gemini_schema(),
39 ])]);
40
41 println!("--- Function Calling Example ---");
42 let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43 println!("User: {}\n", prompt);
44
45 // 2. Ask Gemini. It might reply with one or more function calls.
46 let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48 // 3. Loop to handle potential multiple rounds of function calls
49 loop {
50 if response.get_chat().has_function_call() {
51 println!("Gemini requested function calls...");
52
53 // 4. Use the macro to execute all requested calls and update the session
54 let results = execute_function_calls!(session, add_numbers, get_temperature);
55
56 for (idx, res) in results.iter().enumerate() {
57 if let Some(r) = res {
58 println!(" Call #{} result: {:?}", idx, r);
59 }
60 }
61
62 // 5. Send the results back to Gemini to get the final natural language response
63 response = ai.ask(&mut session).await?;
64 } else {
65 // No more function calls, show the final response
66 println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67 break;
68 }
69 }
70
71 Ok(())
72}Sourcepub async fn ask_as_stream_with_extractor<F, StreamType>(
&self,
session: Session,
data_extractor: F,
) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
§Warning
You must read the response stream to get reply stored context in session.
data_extractor is used to extract data that you get as a stream of futures.
§Example
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
|session, _gemini_response| session.get_last_message_text("").unwrap())
.await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response);
}
}Sourcepub async fn ask_as_stream(
&self,
session: Session,
) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>
Sends a prompt to the model and returns a stream of responses.
§Warning
You must exhaust the response stream to ensure the session history is correctly updated.
§Example
use futures::StreamExt;
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response.get_text(""));
}
}Examples found in repository?
8async fn main() {
9 let mut session = Session::new(10);
10 let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11 let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13 println!("--- Streaming Example ---");
14 let prompt = "Write a short poem about crab-like robots on Mars.";
15 println!("User: {}\n", prompt);
16 print!("Gemini: ");
17 stdout().flush().unwrap();
18
19 // Start a streaming request
20 let mut response_stream = ai.ask_as_stream(session.ask_string(prompt).clone()).await.unwrap();
21
22 while let Some(chunk_result) = response_stream.next().await {
23 match chunk_result {
24 Ok(response) => {
25 // Get the text from the current chunk
26 let text = response.get_chat().get_text_no_think("");
27 print!("{}", text);
28 stdout().flush().unwrap();
29 }
30 Err(e) => {
31 eprintln!("\nError receiving chunk: {:?}", e);
32 break;
33 }
34 }
35 }
36
37 println!("\n\n--- Stream Complete ---");
38 // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}