parallel_example/
parallel_example.rs

1//! This example demonstrates advanced chain logic: conditional steps, parallel execution, and fallbacks.
2//!
3//! Pre-requisites:
4//! 1. Run `prompt-store new` to create the following prompts:
5//!    - Title: "Extract Topic", Content: "What is the main topic of this text? Text: {{query}}"
6//!    - Title: "Summarizer", Content: "Summarize this text about {{topic}}: {{query}}"
7//!    - Title: "Keyword Extractor", Content: "List 5 keywords for this text about {{topic}}: {{query}}"
8//!    - Title: "Basic Keyword Extractor", Content: "List basic keywords for: {{query}}"
9//!    - Title: "Generate Tweet", Content: "Write a tweet about this summary: {{summary}}"
10//! 2. Set `OPENAI_API_KEY` environment variable.
11
12use llm::builder::{LLMBackend, LLMBuilder};
13use llm::chain::LLMRegistry;
14use prompt_store::{PromptStore, RunError, RunOutput};
15
16#[tokio::main]
17async fn main() -> Result<(), RunError> {
18    let store = PromptStore::init()?;
19
20    let openai_llm = LLMBuilder::new()
21        .backend(LLMBackend::OpenAI)
22        .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23        .model("gpt-4o-mini")
24        .build()
25        .unwrap();
26
27    let mut registry = LLMRegistry::new();
28    registry.insert("openai", openai_llm);
29
30    let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32    println!("--- Running Advanced Chain ---");
33
34    let outputs = store
35        .chain(&registry)
36        // 1. First step runs sequentially
37        .step("topic", "Extract Topic")
38        .with_provider("openai")
39        // 2. These two steps run in parallel, as they only depend on the previous context
40        .parallel(|group| {
41            group
42                .step("summary", "Summarizer")
43                // This step will fail because the provider doesn't exist
44                .step("keywords", "Keyword Extractor")
45                .with_provider("failing_provider")
46        })
47        .with_provider("openai") // Default provider for the group
48        // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49        .on_error_stored("Basic Keyword Extractor")
50        .with_provider("openai")
51        // 4. This step runs only if the summary contains the word "safety"
52        .step_if("tweet", "Generate Tweet", |ctx| {
53            ctx.get("summary")
54                .map_or(false, |s| s.to_lowercase().contains("safety"))
55        })
56        .with_provider("openai")
57        .vars([("query", user_query)])
58        .run()
59        .await?;
60
61    if let RunOutput::Chain(map) = outputs {
62        println!("\n--- Chain Execution Complete ---");
63        println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64        println!(
65            "\n[2a] Summary: {}",
66            map.get("summary").unwrap_or(&"N/A".into())
67        );
68        println!(
69            "\n[2b] Keywords (used fallback): {}",
70            map.get("keywords").unwrap_or(&"N/A".into())
71        );
72
73        if let Some(tweet) = map.get("tweet") {
74            println!("\n[3] Conditional Tweet: {}", tweet);
75        } else {
76            println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77        }
78    }
79
80    Ok(())
81}