pub struct Gemini { /* private fields */ }Implementations§
Source§impl Gemini
impl Gemini
Sourcepub fn new(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
) -> Self
pub fn new( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, ) -> Self
§Arguments
api_key get one from Google AI studio
model should be of those mentioned here in bold black color
sys_prompt should follow gemini doc
Sourcepub fn new_with_timeout(
api_key: impl Into<String>,
model: impl Into<String>,
sys_prompt: Option<SystemInstruction>,
api_timeout: Duration,
) -> Self
pub fn new_with_timeout( api_key: impl Into<String>, model: impl Into<String>, sys_prompt: Option<SystemInstruction>, api_timeout: Duration, ) -> Self
sys_prompt should follow gemini doc
Sourcepub fn set_generation_config(self, generation_config: Value) -> Self
pub fn set_generation_config(self, generation_config: Value) -> Self
The generation config Schema should follow Gemini docs
pub fn set_model(self, model: impl Into<String>) -> Self
pub fn set_api_key(self, api_key: impl Into<String>) -> Self
Sourcepub fn set_json_mode(self, schema: Value) -> Self
pub fn set_json_mode(self, schema: Value) -> Self
schema should follow Schema of gemini
pub fn unset_json_mode(self) -> Self
pub fn set_tools(self, tools: Vec<Tool>) -> Self
pub fn unset_tools(self) -> Self
pub async fn ask( &self, session: &mut Session, ) -> Result<GeminiResponse, GeminiResponseError>
Sourcepub async fn ask_as_stream_with_extractor<F, StreamType>(
&self,
session: Session,
data_extractor: F,
) -> Result<ResponseStream<F, StreamType>, GeminiResponseError>
pub async fn ask_as_stream_with_extractor<F, StreamType>( &self, session: Session, data_extractor: F, ) -> Result<ResponseStream<F, StreamType>, GeminiResponseError>
§Warning
You must read the response stream to get reply stored context in session.
data_extractor is used to extract data that you get as a stream of futures.
§Example
ⓘ
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream_with_extractor(session,
|session, _gemini_response| session.get_last_message_text("").unwrap())
.await.unwrap(); // Use _gemini_response.get_text("") to just get the text received in every chunk
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response);
}
}Sourcepub async fn ask_as_stream(
&self,
session: Session,
) -> Result<GeminiResponseStream, GeminiResponseError>
pub async fn ask_as_stream( &self, session: Session, ) -> Result<GeminiResponseStream, GeminiResponseError>
§Warning
You must read the response stream to get reply stored context in session.
§Example
ⓘ
use futures::StreamExt
let mut response_stream = gemini.ask_as_stream(session).await.unwrap();
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response.get_text(""));
}
}Trait Implementations§
Auto Trait Implementations§
impl Freeze for Gemini
impl !RefUnwindSafe for Gemini
impl Send for Gemini
impl Sync for Gemini
impl Unpin for Gemini
impl !UnwindSafe for Gemini
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more