simple_example/
simple_example.rs

1//! Simple example demonstrating basic usage of the prompt store with OpenAI.
2//!
3//! This example shows how to:
4//! - Initialize a prompt store
5//! - Configure an OpenAI LLM backend
6//! - Execute a prompt with variables
7//! - Handle the result
8
9use llm::builder::{LLMBackend, LLMBuilder};
10use prompt_store::PromptStore;
11
12#[tokio::main]
13async fn main() {
14    // Get the OpenAI API key from environment variables
15    let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set");
16
17    // Configure the OpenAI LLM backend
18    let openai_llm = LLMBuilder::new()
19        .backend(LLMBackend::OpenAI)
20        .api_key(api_key)
21        .model("gpt-4o")
22        .build()
23        .unwrap();
24
25    // Initialize the prompt store
26    let store = PromptStore::init().unwrap();
27
28    // Execute a prompt with variables and get the result
29    let result = store
30        .prompt("prompt-store-example::nin5pgu6")
31        .vars([("name", "Alice")])
32        .backend(openai_llm.as_ref())
33        .run()
34        .await
35        .expect("Prompt execution failed");
36
37    println!("Result: {:?}", result);
38}