google_ai_rs/lib.rs
1#![allow(unused_imports)]
2#![deny(clippy::future_not_send)]
3#![allow(clippy::doc_lazy_continuation)]
4
5//! Rust client for Google's Generative AI APIs
6//!
7//! Provides a highly ergonomic, type-safe, and performant interface for
8//! interacting with Google's Generative AI services, including Gemini.
9//!
10//! ## ๐ก Highlights
11//! - **Minimal Overhead**: The core `Client` is tiny and allocation-light.
12//! - **Configurable**: TLS, JWT auth, and dependency minimization via features.
13//! - **Fluent API**: Builder-style configuration for temperature, safety settings, tools, etc.
14//! - **Type-Safe Schemas**: Use `AsSchema` to validate responses at compile-time.
15//! - **Stateful Chat**: The `Session` struct handles conversation history for you.
16//! - **Multi-Modal Input**: Mix text and images with `Part` or your own `TryIntoContents` impl.
17//!
18//! ## ๐ Quickstart (Chat Session)
19//!
20//! A simple example of starting a chat session and streaming a response.
21//!
22//! ```rust,no_run
23//! use google_ai_rs::{Client, GenerativeModel};
24//! use std::io::{stdout, Write};
25//! use tokio::io::AsyncWriteExt;
26//!
27//! #[tokio::main]
28//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
29//! let client = Client::new("YOUR_API_KEY").await?;
30//! let model = client.generative_model("gemini-1.5-pro");
31//!
32//! let mut chat = model.start_chat();
33//! println!("๐ค Initializing chat session...");
34//!
35//! let prompt = "Explain 'Zero-shot learning' with a simple analogy.";
36//!
37//! let mut stream = chat.stream_send_message(prompt).await?;
38//!
39//! print!("๐ค ");
40//! let _ = stdout().flush();
41//! stream.write_to_sync(&mut tokio::io::stdout()).await?;
42//!
43//! println!();
44//! Ok(())
45//! }
46//! ```
47//!
48//! ## ๐ Multi-modal Input with `TryIntoContents`
49//!
50//! Build your own structs that can be turned into model input. Great for combining text + images.
51//!
52//! ```rust,no_run
53//! use google_ai_rs::{Client, Part, Error, content::TryIntoContents, Content};
54//!
55//! struct UserQuery {
56//! text: String,
57//! attachments: Vec<Part>,
58//! }
59//!
60//! impl TryIntoContents for UserQuery {
61//! fn try_into_contents(self) -> Result<Vec<Content>, Error> {
62//! let mut parts = vec![Part::from(self.text)];
63//! parts.extend(self.attachments);
64//! Ok(vec![Content { role: "user".into(), parts }])
65//! }
66//! }
67//!
68//! #[tokio::main]
69//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
70//! let client = Client::new("YOUR_API_KEY").await?;
71//! let model = client.generative_model("gemini-1.5-flash");
72//!
73//! let product_image = std::fs::read("path/to/product.jpg")?;
74//!
75//! let user_query = UserQuery {
76//! text: "Analyze this product shot for defects".into(),
77//! attachments: vec![Part::blob("image/jpeg", product_image)],
78//! };
79//!
80//! let response = model.generate_content(user_query).await?;
81//! println!("{}", response.text());
82//! Ok(())
83//! }
84//! ```
85//!
86//! ## ๐งพ Type-Safe Response Parsing with `AsSchema`
87//!
88//! Strongly typed schemas ensure you get the structure you expect.
89//!
90//! To enable type-safe response parsing, turn on the `serde` feature:
91//!
92//! ```rust,ignore
93//! use google_ai_rs::{AsSchema, AsSchemaWithSerde, Client, Map};
94//! use serde::Deserialize;
95//! use std::collections::HashMap;
96//!
97//! #[derive(AsSchemaWithSerde)]
98//! struct PriceInfo(f32, bool); // (price, in stock)
99//!
100//! #[derive(AsSchema, Deserialize, PartialEq, Eq, Hash)]
101//! struct FashionBag {
102//! brand: String,
103//! style: String,
104//! material: String,
105//! }
106//!
107//! #[tokio::main]
108//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
109//! let client = Client::new("YOUR_API_KEY").await?;
110//!
111//! let model = client.typed_model::<Map<HashMap<FashionBag, PriceInfo>>>("gemini-1.5-flash");
112//!
113//! let inventory = model
114//! .generate_content("List 3 luxury bags with prices and stock status.")
115//! .await?;
116//!
117//! for (bag, price) in &inventory {
118//! println!("{} {}: ${} (in stock: {})", bag.brand, bag.style, price.0, price.1);
119//! }
120//! Ok(())
121//! }
122//! ```
123
124pub mod auth;
125pub mod chat;
126pub mod client;
127pub mod content;
128pub mod embedding;
129pub mod error;
130pub mod genai;
131pub mod schema;
132pub use auth::Auth;
133pub use client::{Client, SharedClient};
134pub use error::Error;
135pub use genai::{GenerativeModel, TypedModel, TypedResponse};
136
137pub use crate::proto::Schema;
138pub use crate::schema::{AsSchema, Map, MapTrait, SchemaType, Tuple};
139
140pub use content::{
141 IntoContent, IntoContents, IntoParts, TryFromCandidates, TryFromContents, TryIntoContent,
142 TryIntoContents,
143};
144pub use proto::{
145 part::Data, CachedContent, Candidate, Content, FunctionCall, GenerationConfig, Part, TaskType,
146 Tool,
147};
148
149extern crate google_ai_schema_derive;
150
151pub use google_ai_schema_derive::AsSchema;
152
153#[cfg(feature = "serde")]
154pub use google_ai_schema_derive::AsSchemaWithSerde;
155
156#[doc(hidden)]
157pub mod proto;
158/// Formats model names to full resource path format
159///
160/// Ensures model names follow `models/{model}` format.
161fn full_model_name(name: &str) -> std::borrow::Cow<'_, str> {
162 if name.contains('/') {
163 name.into()
164 } else {
165 format!("models/{name}").into()
166 }
167}
168
169#[test]
170fn full_model_name_test() {
171 let tests = [
172 ("modelName", "models/modelName"),
173 ("tunedModels/modelName", "tunedModels/modelName"),
174 ];
175
176 for test in tests {
177 assert_eq!(full_model_name(test.0), full_model_name(test.1));
178 }
179}