pub struct PromptStore { /* private fields */ }Expand description
The main entry point for interacting with the prompt store.
This structure is designed to be created once and shared throughout your application. It holds the necessary context, including the encryption cipher.
Implementations§
Source§impl PromptStore
impl PromptStore
Sourcepub fn init() -> Result<Self, StoreError>
pub fn init() -> Result<Self, StoreError>
Initializes the PromptStore by prompting for a password if the key is encrypted.
This function will locate ~/.prompt-store, load the encryption key,
and interactively prompt for a password if required.
Examples found in repository?
examples/simple_example.rs (line 26)
13async fn main() {
14 // Get the OpenAI API key from environment variables
15 let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set");
16
17 // Configure the OpenAI LLM backend
18 let openai_llm = LLMBuilder::new()
19 .backend(LLMBackend::OpenAI)
20 .api_key(api_key)
21 .model("gpt-4o")
22 .build()
23 .unwrap();
24
25 // Initialize the prompt store
26 let store = PromptStore::init().unwrap();
27
28 // Execute a prompt with variables and get the result
29 let result = store
30 .prompt("prompt-store-example::nin5pgu6")
31 .vars([("name", "Alice")])
32 .backend(openai_llm.as_ref())
33 .run()
34 .await
35 .expect("Prompt execution failed");
36
37 println!("Result: {:?}", result);
38}More examples
examples/parallel_example.rs (line 18)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}Sourcepub fn with_password(password: &str) -> Result<Self, StoreError>
pub fn with_password(password: &str) -> Result<Self, StoreError>
Initializes the PromptStore non-interactively with a password.
This is useful for server environments where interactive prompts are not possible. The password can be provided from an environment variable or a secret manager.
§Arguments
password- The password to decrypt the master key.
Examples found in repository?
examples/advanced_chain_example.rs (line 19)
16async fn main() -> Result<(), RunError> {
17 let password = std::env::var("PROMPT_STORE_PASSWORD")
18 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
19 let store = PromptStore::with_password(&password)?;
20
21 let openai_llm = LLMBuilder::new()
22 .backend(LLMBackend::OpenAI)
23 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
24 .model("gpt-4o-mini")
25 .build()
26 .unwrap();
27
28 let mut registry = LLMRegistry::new();
29 registry.insert("openai", openai_llm);
30
31 // --- Test with negative feedback ---
32 println!("--- Testing with NEGATIVE feedback ---");
33 let user_feedback_negative = "The app keeps crashing, it's unusable!";
34 let outputs_neg = run_chain(&store, ®istry, user_feedback_negative).await?;
35 if let RunOutput::Chain(map) = outputs_neg {
36 // We expect `negative_reply` to exist, but `positive_reply` should not.
37 assert!(map.contains_key("negative_reply"));
38 assert!(!map.contains_key("positive_reply"));
39 println!("\nFinal Response:\n{}", map.get("negative_reply").unwrap());
40 }
41
42 // --- Test with positive feedback ---
43 println!("\n--- Testing with POSITIVE feedback ---");
44 let user_feedback_positive = "I love the new update, it's so fast!";
45 let outputs_pos = run_chain(&store, ®istry, user_feedback_positive).await?;
46 if let RunOutput::Chain(map) = outputs_pos {
47 // We expect `positive_reply` to exist, but `negative_reply` should not.
48 assert!(map.contains_key("positive_reply"));
49 assert!(!map.contains_key("negative_reply"));
50 println!("\nFinal Response:\n{}", map.get("positive_reply").unwrap());
51 }
52
53 Ok(())
54}More examples
examples/chain_example.rs (line 21)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub fn prompt<'a>(&'a self, id_or_title: &'a str) -> PromptRunner<'a>
pub fn prompt<'a>(&'a self, id_or_title: &'a str) -> PromptRunner<'a>
Creates a runner for executing a single prompt.
§Arguments
id_or_title- The ID or exact title of the prompt to run.
Examples found in repository?
examples/simple_example.rs (line 30)
13async fn main() {
14 // Get the OpenAI API key from environment variables
15 let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set");
16
17 // Configure the OpenAI LLM backend
18 let openai_llm = LLMBuilder::new()
19 .backend(LLMBackend::OpenAI)
20 .api_key(api_key)
21 .model("gpt-4o")
22 .build()
23 .unwrap();
24
25 // Initialize the prompt store
26 let store = PromptStore::init().unwrap();
27
28 // Execute a prompt with variables and get the result
29 let result = store
30 .prompt("prompt-store-example::nin5pgu6")
31 .vars([("name", "Alice")])
32 .backend(openai_llm.as_ref())
33 .run()
34 .await
35 .expect("Prompt execution failed");
36
37 println!("Result: {:?}", result);
38}Sourcepub fn chain<'a, B: Into<LLMBackendRef<'a>>>(
&'a self,
backend: B,
) -> ChainRunner<'a>
pub fn chain<'a, B: Into<LLMBackendRef<'a>>>( &'a self, backend: B, ) -> ChainRunner<'a>
Creates a runner to define and execute a chain of prompts.
§Arguments
backend- The LLM backend to use for the chain. This must be a type that can be converted intoLLMBackendRef, typically a&LLMRegistry.
Examples found in repository?
examples/advanced_chain_example.rs (line 62)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (line 35)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}examples/chain_example.rs (line 50)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Auto Trait Implementations§
impl Freeze for PromptStore
impl RefUnwindSafe for PromptStore
impl Send for PromptStore
impl Sync for PromptStore
impl Unpin for PromptStore
impl UnwindSafe for PromptStore
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> Downcast for Twhere
T: Any,
impl<T> Downcast for Twhere
T: Any,
Source§fn into_any(self: Box<T>) -> Box<dyn Any>
fn into_any(self: Box<T>) -> Box<dyn Any>
Convert
Box<dyn Trait> (where Trait: Downcast) to Box<dyn Any>. Box<dyn Any> can
then be further downcast into Box<ConcreteType> where ConcreteType implements Trait.Source§fn into_any_rc(self: Rc<T>) -> Rc<dyn Any>
fn into_any_rc(self: Rc<T>) -> Rc<dyn Any>
Convert
Rc<Trait> (where Trait: Downcast) to Rc<Any>. Rc<Any> can then be
further downcast into Rc<ConcreteType> where ConcreteType implements Trait.Source§fn as_any(&self) -> &(dyn Any + 'static)
fn as_any(&self) -> &(dyn Any + 'static)
Convert
&Trait (where Trait: Downcast) to &Any. This is needed since Rust cannot
generate &Any’s vtable from &Trait’s.Source§fn as_any_mut(&mut self) -> &mut (dyn Any + 'static)
fn as_any_mut(&mut self) -> &mut (dyn Any + 'static)
Convert
&mut Trait (where Trait: Downcast) to &Any. This is needed since Rust cannot
generate &mut Any’s vtable from &mut Trait’s.Source§impl<T> DowncastSync for T
impl<T> DowncastSync for T
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more