pub struct ChainRunner<'a> { /* private fields */ }Expand description
A fluent builder to define and execute a multi-step prompt chain.
Implementations§
Source§impl<'a> ChainRunner<'a>
impl<'a> ChainRunner<'a>
Sourcepub fn step(self, output_key: &str, prompt_id_or_title: &str) -> Self
pub fn step(self, output_key: &str, prompt_id_or_title: &str) -> Self
Adds a sequential step from the store.
Examples found in repository?
examples/advanced_chain_example.rs (line 64)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (line 37)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}examples/chain_example.rs (line 53)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub fn step_raw(self, output_key: &str, prompt_content: &str) -> Self
pub fn step_raw(self, output_key: &str, prompt_content: &str) -> Self
Adds a sequential step with a raw prompt.
Examples found in repository?
examples/chain_example.rs (lines 66-69)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub fn step_if<F>(
self,
output_key: &str,
prompt_id_or_title: &str,
condition: F,
) -> Self
pub fn step_if<F>( self, output_key: &str, prompt_id_or_title: &str, condition: F, ) -> Self
Adds a conditional step from the store. It runs only if the condition is met.
Examples found in repository?
examples/advanced_chain_example.rs (lines 67-69)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (lines 52-55)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}Sourcepub fn parallel<F>(self, build_group: F) -> Selfwhere
F: for<'b> FnOnce(ParallelGroupBuilder<'b>) -> ParallelGroupBuilder<'b>,
pub fn parallel<F>(self, build_group: F) -> Selfwhere
F: for<'b> FnOnce(ParallelGroupBuilder<'b>) -> ParallelGroupBuilder<'b>,
Adds a group of steps that will be executed in parallel.
Examples found in repository?
examples/parallel_example.rs (lines 40-46)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}Sourcepub fn on_error_stored(self, fallback_id_or_title: &str) -> Self
pub fn on_error_stored(self, fallback_id_or_title: &str) -> Self
Sets a fallback prompt from the store for the last added step. This is executed if the primary prompt execution fails.
Examples found in repository?
examples/parallel_example.rs (line 49)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}Sourcepub fn on_error_raw(self, fallback_content: &str) -> Self
pub fn on_error_raw(self, fallback_content: &str) -> Self
Sets a raw fallback prompt for the last added step.
Sourcepub fn with_provider(self, provider_id: &str) -> Self
pub fn with_provider(self, provider_id: &str) -> Self
Specifies the provider for the last added step or all steps in the last parallel group.
Examples found in repository?
examples/advanced_chain_example.rs (line 65)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (line 38)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}examples/chain_example.rs (line 55)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub fn with_mode(self, mode: MultiChainStepMode) -> Self
pub fn with_mode(self, mode: MultiChainStepMode) -> Self
Sets the execution mode for the last added step.
Examples found in repository?
examples/chain_example.rs (line 54)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub fn vars(
self,
vars: impl IntoIterator<Item = (impl Into<String>, impl Into<String>)>,
) -> Self
pub fn vars( self, vars: impl IntoIterator<Item = (impl Into<String>, impl Into<String>)>, ) -> Self
Sets initial variables for the chain.
Examples found in repository?
examples/advanced_chain_example.rs (line 78)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (line 57)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}examples/chain_example.rs (line 73)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Sourcepub async fn run(self) -> Result<RunOutput, RunError>
pub async fn run(self) -> Result<RunOutput, RunError>
Executes the chain.
Examples found in repository?
examples/advanced_chain_example.rs (line 79)
56async fn run_chain(
57 store: &PromptStore,
58 registry: &LLMRegistry,
59 feedback: &str,
60) -> Result<RunOutput, RunError> {
61 store
62 .chain(registry)
63 // Step 1: Always run sentiment analysis.
64 .step("sentiment", "Sentiment Check")
65 .with_provider("openai")
66 // Step 2 (Conditional): Only run if the sentiment is "positive".
67 .step_if("positive_reply", "Positive Reply", |prev_outputs| {
68 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("positive"))
69 })
70 .with_provider("openai")
71
72 // Step 3 (Conditional): Only run if the sentiment is "negative".
73 .step_if("negative_reply", "Negative Reply", |prev_outputs| {
74 matches!(prev_outputs.get("sentiment"), Some(s) if s.trim().eq_ignore_ascii_case("negative"))
75 })
76 .with_provider("openai")
77
78 .vars([("feedback", feedback)])
79 .run()
80 .await
81}More examples
examples/parallel_example.rs (line 58)
17async fn main() -> Result<(), RunError> {
18 let store = PromptStore::init()?;
19
20 let openai_llm = LLMBuilder::new()
21 .backend(LLMBackend::OpenAI)
22 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
23 .model("gpt-4o-mini")
24 .build()
25 .unwrap();
26
27 let mut registry = LLMRegistry::new();
28 registry.insert("openai", openai_llm);
29
30 let user_query = "Rust is a systems programming language focused on safety, speed, and concurrency. It achieves these goals without a garbage collector, using a unique ownership model with a borrow checker.";
31
32 println!("--- Running Advanced Chain ---");
33
34 let outputs = store
35 .chain(®istry)
36 // 1. First step runs sequentially
37 .step("topic", "Extract Topic")
38 .with_provider("openai")
39 // 2. These two steps run in parallel, as they only depend on the previous context
40 .parallel(|group| {
41 group
42 .step("summary", "Summarizer")
43 // This step will fail because the provider doesn't exist
44 .step("keywords", "Keyword Extractor")
45 .with_provider("failing_provider")
46 })
47 .with_provider("openai") // Default provider for the group
48 // 3. This is a fallback for the "keywords" step. It runs only if the main step fails.
49 .on_error_stored("Basic Keyword Extractor")
50 .with_provider("openai")
51 // 4. This step runs only if the summary contains the word "safety"
52 .step_if("tweet", "Generate Tweet", |ctx| {
53 ctx.get("summary")
54 .map_or(false, |s| s.to_lowercase().contains("safety"))
55 })
56 .with_provider("openai")
57 .vars([("query", user_query)])
58 .run()
59 .await?;
60
61 if let RunOutput::Chain(map) = outputs {
62 println!("\n--- Chain Execution Complete ---");
63 println!("\n[1] Topic: {}", map.get("topic").unwrap_or(&"N/A".into()));
64 println!(
65 "\n[2a] Summary: {}",
66 map.get("summary").unwrap_or(&"N/A".into())
67 );
68 println!(
69 "\n[2b] Keywords (used fallback): {}",
70 map.get("keywords").unwrap_or(&"N/A".into())
71 );
72
73 if let Some(tweet) = map.get("tweet") {
74 println!("\n[3] Conditional Tweet: {}", tweet);
75 } else {
76 println!("\n[3] Conditional Tweet: SKIPPED (condition not met)");
77 }
78 }
79
80 Ok(())
81}examples/chain_example.rs (line 74)
16async fn main() -> Result<(), RunError> {
17 // 1. Initialize the store once. This loads keys and configuration.
18 // Or use with_password("password") to use a password to decrypt the vault of the prompts.
19 let password = std::env::var("PROMPT_STORE_PASSWORD")
20 .expect("PROMPT_STORE_PASSWORD must be set for this example.");
21 let store = PromptStore::with_password(&password)?;
22
23 // 2. Set up the LLM providers and a registry to hold them.
24 let openai_llm = LLMBuilder::new()
25 .backend(LLMBackend::OpenAI)
26 .api_key(std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY must be set"))
27 .model("gpt-4o-mini")
28 .max_tokens(1000)
29 .build()
30 .unwrap();
31
32 let anthropic_llm = LLMBuilder::new()
33 .backend(LLMBackend::Anthropic)
34 .api_key(std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY must be set"))
35 .model("claude-3-5-sonnet-20240620")
36 .max_tokens(1000)
37 .build()
38 .unwrap();
39
40 let mut registry = LLMRegistry::new();
41 registry.insert("openai_fast", openai_llm);
42 registry.insert("anthropic_strong", anthropic_llm);
43
44 // 3. Define and run the chain fluently, loading prompts from the store.
45 let user_question = "How does photosynthesis work at the molecular level?";
46
47 println!("Executing prompt chain for: \"{}\"", user_question);
48
49 let outputs = store
50 .chain(®istry) // Start a chain with the provider registry.
51 // Step 1: uses the prompt with id "9k6zezem".
52 // Its output will be available as the `{{analyse}}` variable.
53 .step("analyse", "9k6zezem")
54 .with_mode(MultiChainStepMode::Chat)
55 .with_provider("openai_fast")
56 // Step 2: uses the prompt with id "uetgwnq1".
57 // It implicitly uses the `{{analyse}}` output from the previous step.
58 .step("suggestions", "uetgwnq1")
59 .with_mode(MultiChainStepMode::Chat)
60 .with_provider("anthropic_strong")
61 // Step 3: uses the prompt with id "dkeodfyp".
62 // It can use both the initial `{{query}}` and `{{suggestions}}`.
63 .step("final_response", "dkeodfyp")
64 .with_mode(MultiChainStepMode::Chat)
65 .with_provider("anthropic_strong")
66 .step_raw(
67 "raw",
68 "Synthesize the following: {{final_response}} in 2 sentences.",
69 )
70 .with_mode(MultiChainStepMode::Chat)
71 .with_provider("anthropic_strong")
72 // Provide the initial variable for the first step.
73 .vars([("query", user_question)])
74 .run()
75 .await?;
76
77 // 4. Process the results.
78 if let RunOutput::Chain(map) = outputs {
79 println!("\n--- Chain Execution Complete ---");
80 println!(
81 "\n[✅] Final Answer (from 'final_response' step):\n{}",
82 map.get("final_response").unwrap_or(&"N/A".to_string())
83 );
84 println!("\n--- Intermediate Steps ---");
85 println!(
86 "\n[1] Analysis ('analyse'):\n{}",
87 map.get("analyse").unwrap_or(&"N/A".to_string())
88 );
89 println!(
90 "\n[2] Suggestions ('suggestions'):\n{}",
91 map.get("suggestions").unwrap_or(&"N/A".to_string())
92 );
93 println!(
94 "\n[3] Raw ('raw'):\n{}",
95 map.get("raw").unwrap_or(&"N/A".to_string())
96 );
97 }
98
99 Ok(())
100}Auto Trait Implementations§
impl<'a> Freeze for ChainRunner<'a>
impl<'a> !RefUnwindSafe for ChainRunner<'a>
impl<'a> Send for ChainRunner<'a>
impl<'a> Sync for ChainRunner<'a>
impl<'a> Unpin for ChainRunner<'a>
impl<'a> !UnwindSafe for ChainRunner<'a>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> Downcast for Twhere
T: Any,
impl<T> Downcast for Twhere
T: Any,
Source§fn into_any(self: Box<T>) -> Box<dyn Any>
fn into_any(self: Box<T>) -> Box<dyn Any>
Convert
Box<dyn Trait> (where Trait: Downcast) to Box<dyn Any>. Box<dyn Any> can
then be further downcast into Box<ConcreteType> where ConcreteType implements Trait.Source§fn into_any_rc(self: Rc<T>) -> Rc<dyn Any>
fn into_any_rc(self: Rc<T>) -> Rc<dyn Any>
Convert
Rc<Trait> (where Trait: Downcast) to Rc<Any>. Rc<Any> can then be
further downcast into Rc<ConcreteType> where ConcreteType implements Trait.Source§fn as_any(&self) -> &(dyn Any + 'static)
fn as_any(&self) -> &(dyn Any + 'static)
Convert
&Trait (where Trait: Downcast) to &Any. This is needed since Rust cannot
generate &Any’s vtable from &Trait’s.Source§fn as_any_mut(&mut self) -> &mut (dyn Any + 'static)
fn as_any_mut(&mut self) -> &mut (dyn Any + 'static)
Convert
&mut Trait (where Trait: Downcast) to &Any. This is needed since Rust cannot
generate &mut Any’s vtable from &mut Trait’s.Source§impl<T> DowncastSync for T
impl<T> DowncastSync for T
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more