Skip to main content

context_caching/
context_caching.rs

1use gemini_client_api::gemini::ask::Gemini;
2use gemini_client_api::gemini::types::caching::CachedContentBuilder;
3use gemini_client_api::gemini::types::request::InlineData;
4use gemini_client_api::gemini::types::sessions::Session;
5use std::env;
6use std::time::Duration;
7
8#[tokio::main]
9async fn main() {
10    let api_key = env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY must be set");
11    let ai = Gemini::new(api_key, "gemini-2.5-flash", None);
12    let mut session = Session::new(10);
13
14    session.ask("What is there in this pdf".repeat(200)); //Faking big context for example
15    session.ask(InlineData::from_url("https://bitmesra.ac.in/UploadedDocuments/admingo/files/221225_List%20of%20Holiday_2026_26.pdf").await.unwrap());
16
17    let cached_content_req = CachedContentBuilder::new("gemini-2.5-flash")
18        .display_name("Simulated Large Doc")
19        .contents(
20            session
21                .get_history()
22                .into_iter()
23                .map(|e| e.to_owned())
24                .collect(),
25        )
26        .ttl(Duration::from_secs(300))
27        .build().unwrap();
28
29    println!("Creating cache...");
30    match ai.create_cache(&cached_content_req).await {
31        Ok(cache) => {
32            println!("Cache created: {}", cache.name().as_ref().unwrap());
33
34            // 2. Use the cache in a request
35            let mut session = Session::new(10);
36            let prompt = "Summarize the cached document.";
37            println!("User: {}", prompt);
38
39            // Create a new client instance that uses the cache
40            let ai_with_cache = ai
41                .clone()
42                .set_cached_content(cache.name().as_ref().unwrap());
43
44            match ai_with_cache.ask(session.ask(prompt)).await {
45                Ok(response) => {
46                    println!("\nGemini: {}", response.get_chat().get_text_no_think(""));
47                }
48                Err(e) => eprintln!("Error asking Gemini: {:?}", e),
49            }
50
51            // 3. List caches
52            println!("\nListing caches...");
53            match ai.list_caches().await {
54                Ok(list) => {
55                    if let Some(caches) = list.cached_contents() {
56                        for c in caches {
57                            println!("- {}", c.name().as_ref().unwrap_or(&"Unknown".to_string()));
58                        }
59                    } else {
60                        println!("No caches found.");
61                    }
62                }
63                Err(e) => eprintln!("Error listing caches: {:?}", e),
64            }
65
66            // 4. Delete the cache
67            println!("\nDeleting cache...");
68            match ai.delete_cache(cache.name().as_ref().unwrap()).await {
69                Ok(_) => println!("Cache deleted."),
70                Err(e) => eprintln!("Error deleting cache: {:?}", e),
71            }
72        }
73        Err(e) => {
74            eprintln!("Failed to create cache: {:?}", e);
75        }
76    }
77}