Skip to main content

Gemini

Struct Gemini 

Source
pub struct Gemini { /* private fields */ }
Expand description

The main client for interacting with the Gemini API.

Use Gemini::new or Gemini::new_with_timeout to create an instance. You can configure various aspects of the request like model, system instructions, generation config, safety settings, and tools using the provided builder-like methods.

Implementations§

Source§

impl Gemini

Source

pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self

Creates a new Gemini client.

§Arguments
Examples found in repository?
examples/multimodal.rs (line 10)
7async fn raw_multimodal() {
8    let mut session = Session::new(6);
9    let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
11
12    session.ask(vec!["Where is there in this pdf".into(), InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap().into()]);
13
14    let response = ai.ask(&mut session).await.unwrap();
15    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
16}
17#[tokio::main]
18async fn main() {
19    let mut session = Session::new(6);
20    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
21    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
22
23    println!("--- Multimodal (Images/Files) Example ---");
24
25    // Use MarkdownToParts to easily parse a string with image/file markers
26    // It supports both URLs and local file paths!
27    let content = "Describe this image: ![image](https://www.google.com/images/branding/googlelogo/1x/googlelogo_color_272x92dp.png)";
28    println!("Processing: {}", content);
29
30    let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
31        .await
32        .process();
33
34    let response = ai.ask(session.ask(parts)).await.unwrap();
35
36    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
37}
More examples
Hide additional examples
examples/structured_output.rs (line 26)
23async fn main() {
24    let mut session = Session::new(2).set_remember_reply(false);
25    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28    println!("--- Structured Output (JSON Mode) Example ---");
29
30    // Enable JSON mode by passing the generated schema
31    let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33    let prompt = "Give me a review for the movie Interstellar.";
34    println!("User: {}", prompt);
35
36    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38    // Extract and deserialize the JSON response
39    if let Ok(review) = response.get_json::<MovieReview>() {
40        println!("\nGemini (Structured):");
41        println!("{:#?}", review);
42    } else {
43        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44    }
45}
examples/thinking.rs (line 12)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
examples/basic_chat.rs (line 13)
6async fn main() {
7    // 1. Initialize the session with a history limit (e.g., 6 messages)
8    let mut session = Session::new(6);
9
10    // 2. Create the Gemini client
11    // Get your API key from https://aistudio.google.com/app/apikey
12    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15    println!("--- Basic Chat Example ---");
16
17    // 3. Ask a question
18    let prompt = "What are the benefits of using Rust for systems programming?";
19    println!("User: {}", prompt);
20
21    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
22
23    // 4. Print the reply
24    // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25    let reply = response.get_chat().get_text_no_think("");
26    println!("\nGemini: {}", reply);
27
28    // 5. The session now contains the interaction
29    println!("\nMessages in history: {}", session.get_history_length());
30}
examples/streaming.rs (line 11)
8async fn main() {
9    let mut session = Session::new(10);
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    println!("--- Streaming Example ---");
14    let prompt = "Write a short poem about crab-like robots on Mars.";
15    println!("User: {}\n", prompt);
16    print!("Gemini: ");
17    stdout().flush().unwrap();
18
19    // Start a streaming request
20    let mut response_stream = ai.ask_as_stream(session.ask_string(prompt).clone()).await.unwrap();
21
22    while let Some(chunk_result) = response_stream.next().await {
23        match chunk_result {
24            Ok(response) => {
25                // Get the text from the current chunk
26                let text = response.get_chat().get_text_no_think("");
27                print!("{}", text);
28                stdout().flush().unwrap();
29            }
30            Err(e) => {
31                eprintln!("\nError receiving chunk: {:?}", e);
32                break;
33            }
34        }
35    }
36
37    println!("\n\n--- Stream Complete ---");
38    // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}
examples/function_calling.rs (line 35)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
Source

pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self

Creates a new Gemini client with a custom API timeout.

§Arguments
  • api_key - Your Gemini API key.
  • model - The model variation to use.
  • sys_prompt - Optional system instructions.
  • api_timeout - Custom duration for request timeouts.
Source

pub fn set_generation_config(&mut self) -> &mut Value

Returns a mutable reference to the generation configuration. If not already set, initializes it to an empty object.

See Gemini docs for schema details.

Source

pub fn set_tool_config(self, config: ToolConfig) -> Self

Source

pub fn set_thinking_config(self, config: ThinkingConfig) -> Self

Examples found in repository?
examples/thinking.rs (line 13)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
Source

pub fn set_model(self, model: impl Into<String>) -> Self

Source

pub fn set_sys_prompt(self, sys_prompt: Option<SystemInstruction>) -> Self

Source

pub fn set_safety_settings(self, settings: Option<Vec<SafetySetting>>) -> Self

Source

pub fn set_api_key(self, api_key: impl Into<String>) -> Self

Source

pub fn set_json_mode(self, schema: Value) -> Self

Sets the response format to JSON mode with a specific schema.

To use a Rust struct as a schema, decorate it with #[gemini_schema] and pass StructName::gemini_schema().

§Arguments
Examples found in repository?
examples/structured_output.rs (line 31)
23async fn main() {
24    let mut session = Session::new(2).set_remember_reply(false);
25    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28    println!("--- Structured Output (JSON Mode) Example ---");
29
30    // Enable JSON mode by passing the generated schema
31    let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33    let prompt = "Give me a review for the movie Interstellar.";
34    println!("User: {}", prompt);
35
36    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38    // Extract and deserialize the JSON response
39    if let Ok(review) = response.get_json::<MovieReview>() {
40        println!("\nGemini (Structured):");
41        println!("{:#?}", review);
42    } else {
43        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44    }
45}
Source

pub fn unset_json_mode(self) -> Self

Source

pub fn set_tools(self, tools: Vec<Tool>) -> Self

Sets the tools (functions) available to the model.

Examples found in repository?
examples/function_calling.rs (lines 36-39)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
Source

pub fn unset_tools(self) -> Self

Removes all tools.

Source

pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>

Sends a prompt to the model and waits for the full response.

Updates the session history with the model’s reply.

§Errors

Returns GeminiResponseError::NothingToRespond if the last message in history is from the model.

Examples found in repository?
examples/multimodal.rs (line 14)
7async fn raw_multimodal() {
8    let mut session = Session::new(6);
9    let api_key = std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
11
12    session.ask(vec!["Where is there in this pdf".into(), InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap().into()]);
13
14    let response = ai.ask(&mut session).await.unwrap();
15    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
16}
17#[tokio::main]
18async fn main() {
19    let mut session = Session::new(6);
20    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
21    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
22
23    println!("--- Multimodal (Images/Files) Example ---");
24
25    // Use MarkdownToParts to easily parse a string with image/file markers
26    // It supports both URLs and local file paths!
27    let content = "Describe this image: ![image](https://www.google.com/images/branding/googlelogo/1x/googlelogo_color_272x92dp.png)";
28    println!("Processing: {}", content);
29
30    let parts = MarkdownToParts::new(content, |_| mime::IMAGE_PNG)
31        .await
32        .process();
33
34    let response = ai.ask(session.ask(parts)).await.unwrap();
35
36    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
37}
More examples
Hide additional examples
examples/structured_output.rs (line 36)
23async fn main() {
24    let mut session = Session::new(2).set_remember_reply(false);
25    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
26    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
27
28    println!("--- Structured Output (JSON Mode) Example ---");
29
30    // Enable JSON mode by passing the generated schema
31    let ai = ai.set_json_mode(MovieReview::gemini_schema());
32
33    let prompt = "Give me a review for the movie Interstellar.";
34    println!("User: {}", prompt);
35
36    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
37
38    // Extract and deserialize the JSON response
39    if let Ok(review) = response.get_json::<MovieReview>() {
40        println!("\nGemini (Structured):");
41        println!("{:#?}", review);
42    } else {
43        println!("\nFailed to parse JSON response: {}", response.get_chat().get_text_no_think(""));
44    }
45}
examples/thinking.rs (line 19)
7async fn main() {
8    let mut session = Session::new(4);
9    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
10    
11    // Note: Thinking mode requires a supported model like gemini-2.0-flash-thinking-exp
12    let ai = Gemini::new(api_key, "gemini-2.0-flash-thinking-exp", None)
13        .set_thinking_config(ThinkingConfig::new(true, 1024));
14
15    println!("--- Thinking Mode Example ---");
16    let prompt = "How many 'r's are in the word strawberry? Think step by step.";
17    println!("User: {}\n", prompt);
18
19    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
20
21    // Show the "thoughts" part separately
22    let thoughts = response.get_chat().get_thoughts("\n");
23    if !thoughts.is_empty() {
24        println!("--- Gemini's Thoughts ---\n{}\n", thoughts);
25    }
26
27    // Show the final answer
28    let answer = response.get_chat().get_text_no_think("");
29    println!("--- Gemini's Answer ---\n{}", answer);
30}
examples/basic_chat.rs (line 21)
6async fn main() {
7    // 1. Initialize the session with a history limit (e.g., 6 messages)
8    let mut session = Session::new(6);
9
10    // 2. Create the Gemini client
11    // Get your API key from https://aistudio.google.com/app/apikey
12    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
13    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
14
15    println!("--- Basic Chat Example ---");
16
17    // 3. Ask a question
18    let prompt = "What are the benefits of using Rust for systems programming?";
19    println!("User: {}", prompt);
20
21    let response = ai.ask(session.ask_string(prompt)).await.unwrap();
22
23    // 4. Print the reply
24    // get_text_no_think("") extracts text and ignores "thought" parts (if any)
25    let reply = response.get_chat().get_text_no_think("");
26    println!("\nGemini: {}", reply);
27
28    // 5. The session now contains the interaction
29    println!("\nMessages in history: {}", session.get_history_length());
30}
examples/function_calling.rs (line 46)
30async fn main() -> Result<(), Box<dyn Error>> {
31    let mut session = Session::new(10);
32    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
33    
34    // 1. Initialize Gemini and register tools
35    let ai = Gemini::new(api_key, "gemini-2.5-flash", None)
36        .set_tools(vec![Tool::FunctionDeclarations(vec![
37            add_numbers::gemini_schema(),
38            get_temperature::gemini_schema(),
39        ])]);
40
41    println!("--- Function Calling Example ---");
42    let prompt = "What is 123.45 plus 678.9, and what's the weather like in London?";
43    println!("User: {}\n", prompt);
44
45    // 2. Ask Gemini. It might reply with one or more function calls.
46    let mut response = ai.ask(session.ask_string(prompt)).await?;
47
48    // 3. Loop to handle potential multiple rounds of function calls
49    loop {
50        if response.get_chat().has_function_call() {
51            println!("Gemini requested function calls...");
52            
53            // 4. Use the macro to execute all requested calls and update the session
54            let results = execute_function_calls!(session, add_numbers, get_temperature);
55            
56            for (idx, res) in results.iter().enumerate() {
57                if let Some(r) = res {
58                    println!("  Call #{} result: {:?}", idx, r);
59                }
60            }
61
62            // 5. Send the results back to Gemini to get the final natural language response
63            response = ai.ask(&mut session).await?;
64        } else {
65            // No more function calls, show the final response
66            println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
67            break;
68        }
69    }
70
71    Ok(())
72}
Source

pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, (Session, GeminiResponseError)>
where F: FnMut(&Session, GeminiResponse) -> StreamType,

§Warning

You must read the response stream to get reply stored context in session. data_extractor is used to extract data that you get as a stream of futures.

§Example
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
|session, _gemini_response| session.get_last_message_text("").unwrap())
.await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk

while let Some(response) = response_stream.next().await {
    if let Ok(response) = response {
        println!("{}", response);
    }
}
Source

pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, (Session, GeminiResponseError)>

Sends a prompt to the model and returns a stream of responses.

§Warning

You must exhaust the response stream to ensure the session history is correctly updated.

§Example
use futures::StreamExt;
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();

while let Some(response) = response_stream.next().await {
    if let Ok(response) = response {
        println!("{}", response.get_text(""));
    }
}
Examples found in repository?
examples/streaming.rs (line 20)
8async fn main() {
9    let mut session = Session::new(10);
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12
13    println!("--- Streaming Example ---");
14    let prompt = "Write a short poem about crab-like robots on Mars.";
15    println!("User: {}\n", prompt);
16    print!("Gemini: ");
17    stdout().flush().unwrap();
18
19    // Start a streaming request
20    let mut response_stream = ai.ask_as_stream(session.ask_string(prompt).clone()).await.unwrap();
21
22    while let Some(chunk_result) = response_stream.next().await {
23        match chunk_result {
24            Ok(response) => {
25                // Get the text from the current chunk
26                let text = response.get_chat().get_text_no_think("");
27                print!("{}", text);
28                stdout().flush().unwrap();
29            }
30            Err(e) => {
31                eprintln!("\nError receiving chunk: {:?}", e);
32                break;
33            }
34        }
35    }
36
37    println!("\n\n--- Stream Complete ---");
38    // Note: The session passed to ask_as_stream is updated as you exhaust the stream.
39}

Trait Implementations§

Source§

impl Clone for Gemini

Source§

fn clone(&self) -> Gemini

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for Gemini

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl Default for Gemini

Source§

fn default() -> Gemini

Returns the “default value” for a type. Read more

Auto Trait Implementations§

§

impl Freeze for Gemini

§

impl !RefUnwindSafe for Gemini

§

impl Send for Gemini

§

impl Sync for Gemini

§

impl Unpin for Gemini

§

impl !UnwindSafe for Gemini

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more