Skip to main content

context_caching/
context_caching.rs

1use gemini_client_api::gemini::ask::Gemini;
2use gemini_client_api::gemini::types::caching::CachedContentBuilder;
3use gemini_client_api::gemini::types::request::InlineData;
4use gemini_client_api::gemini::types::sessions::Session;
5use std::env;
6use std::time::Duration;
7
8#[tokio::main]
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    //Faking big context for example
15    session.ask("What is there in this pdf".repeat(200))
16        .ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
17
18    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
19        .display_name("Simulated Large Doc")
20        .contents(
21            session
22                .get_history()
23                .into_iter()
24                .map(|e| e.to_owned())
25                .collect(),
26        )
27        .ttl(Duration::from_secs(300))
28        .build()
29        .unwrap();
30
31    println!("Creating cache...");
32    match ai.create_cache(&cached_content_req).await {
33        Ok(cache) => {
34            println!("Cache created: {}", cache.name().as_ref().unwrap());
35
36            // 2. Use the cache in a request
37            let mut session = Session::new(10);
38            let prompt = "Summarize the cached document.";
39            println!("User: {}", prompt);
40
41            // Create a new client instance that uses the cache
42            let ai_with_cache = ai
43                .clone()
44                .set_cached_content(cache.name().as_ref().unwrap());
45
46            match ai_with_cache.ask(session.ask(prompt)).await {
47                Ok(response) => {
48                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
49                }
50                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
51            }
52
53            // 3. List caches
54            println!("\nListing caches...");
55            match ai.list_caches().await {
56                Ok(list) => {
57                    if let Some(caches) = list.cached_contents() {
58                        for c in caches {
59                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
60                        }
61                    } else {
62                        println!("No caches found.");
63                    }
64                }
65                Err(e) => eprintln!("Error listing caches: {:?}", e),
66            }
67
68            // 4. Delete the cache
69            println!("\nDeleting cache...");
70            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
71                Ok(_) => println!("Cache deleted."),
72                Err(e) => eprintln!("Error deleting cache: {:?}", e),
73            }
74        }
75        Err(e) => {
76            eprintln!("Failed to create cache: {:?}", e);
77        }
78    }
79}