vertex_text_request_stream/
vertex_text_request_stream.rs1use std::env;
2use std::io::{stdout, Write};
3
4use google_generative_ai_rs::v1::{
5 api::Client,
6 gemini::{request::Request, response::GeminiResponse, Content, Part, Role},
7};
8
9#[tokio::main]
23async fn main() -> Result<(), Box<dyn std::error::Error>> {
24 env_logger::init();
25 let region = env::var("GCP_REGION_NAME").unwrap().to_string();
26 let project_id = env::var("GCP_PROJECT_ID").unwrap().to_string();
27
28 let client = Client::new_from_region_project_id(region.to_string(), project_id.to_string());
29
30 let txt_request = Request {
31 contents: vec![Content {
32 role: Role::User,
33 parts: vec![Part {
34 text: Some("Give me a recipe for banana bread.".to_string()),
35 inline_data: None,
36 file_data: None,
37 video_metadata: None,
38 }],
39 }],
40 tools: vec![],
41 safety_settings: vec![],
42 generation_config: None,
43
44 #[cfg(feature = "beta")]
45 system_instruction: None,
46 };
47
48 let response = client.post(30, &txt_request).await?;
49
50 println!("output streaming content");
51
52 if let Some(stream_response) = response.streamed() {
53 if let Some(json_stream) = stream_response.response_stream {
54 Client::for_each_async(json_stream, move |response: GeminiResponse| async move {
55 let mut lock = stdout().lock();
56 write!(
57 lock,
58 "{}",
59 response.candidates[0].content.parts[0]
60 .text
61 .clone()
62 .unwrap()
63 .as_str()
64 )
65 .unwrap();
66 })
67 .await
68 }
69 }
70
71 Ok(())
72}