LMBuilder

Struct LMBuilder 

Source
pub struct LMBuilder<S: State = Empty> { /* private fields */ }
Expand description

Use builder syntax to set the inputs and finish with __internal_build().

Implementations§

Source§

impl<S: State> LMBuilder<S>

Source

pub fn base_url(self, value: String) -> LMBuilder<SetBaseUrl<S>>
where S::BaseUrl: IsUnset,

Optional (Some / Option setters).

Source

pub fn maybe_base_url(self, value: Option<String>) -> LMBuilder<SetBaseUrl<S>>
where S::BaseUrl: IsUnset,

Optional (Some / Option setters).

Source

pub fn api_key(self, value: String) -> LMBuilder<SetApiKey<S>>
where S::ApiKey: IsUnset,

Optional (Some / Option setters).

Source

pub fn maybe_api_key(self, value: Option<String>) -> LMBuilder<SetApiKey<S>>
where S::ApiKey: IsUnset,

Optional (Some / Option setters).

Source

pub fn model(self, value: String) -> LMBuilder<SetModel<S>>
where S::Model: IsUnset,

Optional (Some / Option setters). Default: "openai:gpt-4o-mini".to_string().

Examples found in repository?
examples/01-simple.rs (line 74)
71async fn main() -> Result<()> {
72    configure(
73        LM::builder()
74            .model("openai:gpt-4o-mini".to_string())
75            .build()
76            .await
77            .unwrap(),
78        ChatAdapter,
79    );
80
81    let example = example! {
82        "question": "input" => "What is the capital of France?",
83    };
84
85    let qa_rater = QARater::builder().build();
86    let prediction = qa_rater.forward(example).await.unwrap();
87    println!("{prediction:?}");
88
89    Ok(())
90}
More examples
Hide additional examples
examples/05-heterogenous-examples.rs (line 16)
13async fn main() -> anyhow::Result<()> {
14    configure(
15        LM::builder()
16            .model("openai:gpt-4o-mini".to_string())
17            .build()
18            .await
19            .unwrap(),
20        ChatAdapter {},
21    );
22
23    let exp = example! {
24        "number": "input" => 10,
25    };
26    let predict = Predict::new(sign! {
27        (number: i32) -> number_squared: i32, number_cubed: i32
28    });
29
30    let prediction = predict.forward(exp).await?;
31    println!("{prediction:?}");
32
33    Ok(())
34}
examples/07-inspect-history.rs (line 32)
30async fn main() {
31    let lm = LM::builder()
32        .model("openai:gpt-4o-mini".to_string())
33        .build()
34        .await
35        .unwrap();
36    configure(lm, ChatAdapter);
37
38    let example = example! {
39        "question": "input" => "What is the capital of France?",
40    };
41
42    let qa_rater = QARater::builder().build();
43    let prediction = qa_rater.forward(example.clone()).await.unwrap();
44    println!("Prediction: {prediction:?}");
45
46    let history = get_lm().inspect_history(1).await;
47    println!("History: {history:?}");
48}
examples/03-evaluate-hotpotqa.rs (line 67)
64async fn main() -> anyhow::Result<()> {
65    configure(
66        LM::builder()
67            .model("openai:gpt-4o-mini".to_string())
68            .build()
69            .await?,
70        ChatAdapter {},
71    );
72
73    let examples = DataLoader::load_hf(
74        "hotpotqa/hotpot_qa",
75        vec!["question".to_string()],
76        vec!["answer".to_string()],
77        "fullwiki",
78        "validation",
79        true,
80    )?[..128]
81        .to_vec();
82
83    let evaluator = QARater::builder().build();
84    let metric = evaluator.evaluate(examples).await;
85
86    println!("Metric: {metric}");
87    Ok(())
88}
examples/04-optimize-hotpotqa.rs (line 63)
60async fn main() -> anyhow::Result<()> {
61    configure(
62        LM::builder()
63            .model("openai:gpt-4o-mini".to_string())
64            .build()
65            .await
66            .unwrap(),
67        ChatAdapter {},
68    );
69
70    let examples = DataLoader::load_hf(
71        "hotpotqa/hotpot_qa",
72        vec!["question".to_string()],
73        vec!["answer".to_string()],
74        "fullwiki",
75        "validation",
76        true,
77    )?[..10]
78        .to_vec();
79
80    let mut rater = QARater::builder().build();
81    let optimizer = COPRO::builder().breadth(10).depth(1).build();
82
83    println!("Rater: {:?}", rater.answerer.get_signature().instruction());
84
85    optimizer.compile(&mut rater, examples.clone()).await?;
86
87    println!("Rater: {:?}", rater.answerer.get_signature().instruction());
88
89    Ok(())
90}
examples/06-other-providers-batch.rs (line 81)
77async fn main() {
78    // Anthropic
79    configure(
80        LM::builder()
81            .model("anthropic:claude-sonnet-4-5-20250929".to_string())
82            .build()
83            .await
84            .unwrap(),
85        ChatAdapter,
86    );
87
88    let example = vec![
89        example! {
90            "question": "input" => "What is the capital of France?",
91        },
92        example! {
93            "question": "input" => "What is the capital of Germany?",
94        },
95        example! {
96            "question": "input" => "What is the capital of Italy?",
97        },
98    ];
99
100    let qa_rater = QARater::builder().build();
101    let prediction = qa_rater.batch(example.clone(), 2, true).await.unwrap();
102    println!("Anthropic: {prediction:?}");
103
104    // Gemini
105    configure(
106        LM::builder()
107            .model("gemini:gemini-2.0-flash".to_string())
108            .build()
109            .await
110            .unwrap(),
111        ChatAdapter,
112    );
113
114    let prediction = qa_rater.batch(example, 2, true).await.unwrap();
115    println!("Gemini: {prediction:?}");
116}
Source

pub fn maybe_model(self, value: Option<String>) -> LMBuilder<SetModel<S>>
where S::Model: IsUnset,

Optional (Some / Option setters). Default: "openai:gpt-4o-mini".to_string().

Source

pub fn temperature(self, value: f32) -> LMBuilder<SetTemperature<S>>
where S::Temperature: IsUnset,

Optional (Some / Option setters). Default: 0.7.

Examples found in repository?
examples/09-gepa-sentiment.rs (line 120)
116async fn main() -> Result<()> {
117    println!("GEPA Sentiment Analysis Optimization Example\n");
118
119    // Setup LM
120    let lm = LM::builder().temperature(0.7).build().await.unwrap();
121
122    configure(lm.clone(), ChatAdapter);
123
124    // Create training examples with diverse sentiments
125    let trainset = vec![
126        example! {
127            "text": "input" => "This movie was absolutely fantastic! I loved every minute of it.",
128            "expected_sentiment": "input" => "positive"
129        },
130        example! {
131            "text": "input" => "Terrible service, will never come back again.",
132            "expected_sentiment": "input" => "negative"
133        },
134        example! {
135            "text": "input" => "The weather is okay, nothing special.",
136            "expected_sentiment": "input" => "neutral"
137        },
138        example! {
139            "text": "input" => "Despite some minor issues, I'm quite happy with the purchase.",
140            "expected_sentiment": "input" => "positive"
141        },
142        example! {
143            "text": "input" => "I have mixed feelings about this product.",
144            "expected_sentiment": "input" => "neutral"
145        },
146        example! {
147            "text": "input" => "This is the worst experience I've ever had!",
148            "expected_sentiment": "input" => "negative"
149        },
150        example! {
151            "text": "input" => "It's fine. Does what it's supposed to do.",
152            "expected_sentiment": "input" => "neutral"
153        },
154        example! {
155            "text": "input" => "Exceeded all my expectations! Highly recommend!",
156            "expected_sentiment": "input" => "positive"
157        },
158        example! {
159            "text": "input" => "Disappointed and frustrated with the outcome.",
160            "expected_sentiment": "input" => "negative"
161        },
162        example! {
163            "text": "input" => "Standard quality, nothing remarkable.",
164            "expected_sentiment": "input" => "neutral"
165        },
166    ];
167
168    // Create module
169    let mut module = SentimentAnalyzer::builder()
170        .predictor(Predict::new(SentimentSignature::new()))
171        .build();
172
173    // Evaluate baseline performance
174    println!("Baseline Performance:");
175    let baseline_score = module.evaluate(trainset.clone()).await;
176    println!("  Average score: {:.3}\n", baseline_score);
177
178    // Configure GEPA optimizer
179    let gepa = GEPA::builder()
180        .num_iterations(5)
181        .minibatch_size(5)
182        .num_trials(3)
183        .temperature(0.9)
184        .track_stats(true)
185        .build();
186
187    // Run optimization
188    println!("Starting GEPA optimization...\n");
189    let result = gepa
190        .compile_with_feedback(&mut module, trainset.clone())
191        .await?;
192
193    // Display results
194    println!("\nOptimization Results:");
195    println!(
196        "  Best average score: {:.3}",
197        result.best_candidate.average_score()
198    );
199    println!("  Total rollouts: {}", result.total_rollouts);
200    println!("  Total LM calls: {}", result.total_lm_calls);
201    println!("  Generations: {}", result.evolution_history.len());
202
203    println!("\nBest Instruction:");
204    println!("  {}", result.best_candidate.instruction);
205
206    if !result.evolution_history.is_empty() {
207        println!("\nEvolution History:");
208        for entry in &result.evolution_history {
209            println!("  Generation {}: {:.3}", entry.0, entry.1);
210        }
211    }
212
213    // Test optimized module on a new example
214    println!("\nTesting Optimized Module:");
215    let test_example = example! {
216        "text": "input" => "This product changed my life! Absolutely amazing!",
217        "expected_sentiment": "input" => "positive"
218    };
219
220    let test_prediction = module.forward(test_example.clone()).await?;
221    let test_feedback = module
222        .feedback_metric(&test_example, &test_prediction)
223        .await;
224
225    println!(
226        "  Test prediction: {}",
227        test_prediction.get("sentiment", None)
228    );
229    println!("  Test score: {:.3}", test_feedback.score);
230    println!("  Feedback:\n{}", test_feedback.feedback);
231
232    Ok(())
233}
More examples
Hide additional examples
examples/10-gepa-llm-judge.rs (line 226)
219async fn main() -> Result<()> {
220    println!("GEPA with LLM-as-a-Judge Example\n");
221    println!("This example shows how to use an LLM judge to automatically");
222    println!("generate rich feedback for optimizing a math solver.\n");
223
224    // Setup: Configure the LLM
225    // Main LM for the task
226    let task_lm = LM::builder().temperature(0.7).build().await.unwrap();
227
228    // Judge LM (could use a different/cheaper model)
229    let judge_lm = LM::builder().temperature(0.3).build().await.unwrap();
230
231    configure(task_lm, ChatAdapter);
232
233    // Create training examples
234    let trainset = vec![
235        example! {
236            "problem": "input" => "Sarah has 12 apples. She gives 3 to her friend and buys 5 more. How many apples does she have now?",
237            "expected_answer": "input" => "14"
238        },
239        example! {
240            "problem": "input" => "A train travels 60 miles in 1 hour. How far will it travel in 3.5 hours at the same speed?",
241            "expected_answer": "input" => "210"
242        },
243        example! {
244            "problem": "input" => "There are 24 students in a class. If 1/3 of them are absent, how many students are present?",
245            "expected_answer": "input" => "16"
246        },
247        example! {
248            "problem": "input" => "A rectangle has length 8 cm and width 5 cm. What is its area?",
249            "expected_answer": "input" => "40"
250        },
251        example! {
252            "problem": "input" => "John has $50. He spends $12 on lunch and $8 on a book. How much money does he have left?",
253            "expected_answer": "input" => "30"
254        },
255    ];
256
257    // Create the module
258    let mut module = MathSolver::builder()
259        .solver(Predict::new(MathWordProblem::new()))
260        .judge(Predict::new(MathJudge::new()))
261        .judge_lm(Arc::new(judge_lm))
262        .build();
263
264    // Evaluate baseline performance
265    println!("Step 1: Baseline Performance");
266    println!("Testing the solver before optimization...\n");
267    let baseline_score = module.evaluate(trainset.clone()).await;
268    println!("  Baseline average score: {:.3}\n", baseline_score);
269
270    // Configure GEPA optimizer
271    println!("Step 2: Configure GEPA");
272    println!("Setting up the optimizer with budget controls...\n");
273
274    let gepa = GEPA::builder()
275        .num_iterations(3) // Fewer iterations for demo
276        .minibatch_size(3) // Smaller batches
277        .temperature(0.9)
278        .track_stats(true)
279        .maybe_max_lm_calls(Some(100)) // Important: we're using 2x LM calls (task + judge)
280        .build();
281
282    // Run GEPA optimization
283    println!("Step 3: Run GEPA Optimization");
284    println!("The judge will analyze reasoning quality and provide feedback...\n");
285
286    let result = gepa
287        .compile_with_feedback(&mut module, trainset.clone())
288        .await?;
289
290    // Display results
291    println!("\nStep 4: Results");
292    println!("===============\n");
293    println!("Optimization complete!");
294    println!(
295        "  Best average score: {:.3}",
296        result.best_candidate.average_score()
297    );
298    println!(
299        "  Improvement: {:.3}",
300        result.best_candidate.average_score() - baseline_score
301    );
302    println!("  Total rollouts: {}", result.total_rollouts);
303    println!(
304        "  Total LM calls: {} (includes judge evaluations)",
305        result.total_lm_calls
306    );
307
308    println!("\nEvolution over time:");
309    for (generation, score) in &result.evolution_history {
310        println!("  Generation {}: {:.3}", generation, score);
311    }
312
313    println!("\nOptimized instruction:");
314    println!("  {}", result.best_candidate.instruction);
315
316    // Test the optimized solver
317    println!("\nStep 5: Test Optimized Solver");
318    println!("==============================\n");
319
320    let test_problem = example! {
321        "problem": "input" => "A store sells pencils for $0.25 each. If you buy 8 pencils, how much will you pay?",
322        "expected_answer": "input" => "2"
323    };
324
325    let test_prediction = module.forward(test_problem.clone()).await?;
326    let test_feedback = module
327        .feedback_metric(&test_problem, &test_prediction)
328        .await;
329
330    println!(
331        "Test problem: A store sells pencils for $0.25 each. If you buy 8 pencils, how much will you pay?"
332    );
333    println!("\nAnswer: {}", test_prediction.get("answer", None));
334    println!("Score: {:.3}\n", test_feedback.score);
335    println!("Detailed Feedback from Judge:");
336    println!("{}", test_feedback.feedback);
337
338    Ok(())
339}
Source

pub fn maybe_temperature( self, value: Option<f32>, ) -> LMBuilder<SetTemperature<S>>
where S::Temperature: IsUnset,

Optional (Some / Option setters). Default: 0.7.

Source

pub fn max_tokens(self, value: u32) -> LMBuilder<SetMaxTokens<S>>
where S::MaxTokens: IsUnset,

Optional (Some / Option setters). Default: 512.

Source

pub fn maybe_max_tokens(self, value: Option<u32>) -> LMBuilder<SetMaxTokens<S>>
where S::MaxTokens: IsUnset,

Optional (Some / Option setters). Default: 512.

Source

pub fn max_tool_iterations( self, value: u32, ) -> LMBuilder<SetMaxToolIterations<S>>
where S::MaxToolIterations: IsUnset,

Optional (Some / Option setters). Default: 10.

Source

pub fn maybe_max_tool_iterations( self, value: Option<u32>, ) -> LMBuilder<SetMaxToolIterations<S>>
where S::MaxToolIterations: IsUnset,

Optional (Some / Option setters). Default: 10.

Source

pub fn cache(self, value: bool) -> LMBuilder<SetCache<S>>
where S::Cache: IsUnset,

Optional (Some / Option setters). Default: false.

Source

pub fn maybe_cache(self, value: Option<bool>) -> LMBuilder<SetCache<S>>
where S::Cache: IsUnset,

Optional (Some / Option setters). Default: false.

Source

pub fn cache_handler( self, value: Arc<Mutex<ResponseCache>>, ) -> LMBuilder<SetCacheHandler<S>>
where S::CacheHandler: IsUnset,

Optional (Some / Option setters).

Source

pub fn maybe_cache_handler( self, value: Option<Arc<Mutex<ResponseCache>>>, ) -> LMBuilder<SetCacheHandler<S>>
where S::CacheHandler: IsUnset,

Optional (Some / Option setters).

Source§

impl<S: State> LMBuilder<S>

Source

pub async fn build(self) -> Result<LM>

Builds the LM instance with proper client initialization

Supports 3 build cases:

  1. OpenAI-compatible with auth: base_url + api_key provided
  2. Local OpenAI-compatible: base_url only (for vLLM, etc.)
  3. Provider via model string: model in “provider:model” format
Examples found in repository?
examples/01-simple.rs (line 75)
71async fn main() -> Result<()> {
72    configure(
73        LM::builder()
74            .model("openai:gpt-4o-mini".to_string())
75            .build()
76            .await
77            .unwrap(),
78        ChatAdapter,
79    );
80
81    let example = example! {
82        "question": "input" => "What is the capital of France?",
83    };
84
85    let qa_rater = QARater::builder().build();
86    let prediction = qa_rater.forward(example).await.unwrap();
87    println!("{prediction:?}");
88
89    Ok(())
90}
More examples
Hide additional examples
examples/05-heterogenous-examples.rs (line 17)
13async fn main() -> anyhow::Result<()> {
14    configure(
15        LM::builder()
16            .model("openai:gpt-4o-mini".to_string())
17            .build()
18            .await
19            .unwrap(),
20        ChatAdapter {},
21    );
22
23    let exp = example! {
24        "number": "input" => 10,
25    };
26    let predict = Predict::new(sign! {
27        (number: i32) -> number_squared: i32, number_cubed: i32
28    });
29
30    let prediction = predict.forward(exp).await?;
31    println!("{prediction:?}");
32
33    Ok(())
34}
examples/07-inspect-history.rs (line 33)
30async fn main() {
31    let lm = LM::builder()
32        .model("openai:gpt-4o-mini".to_string())
33        .build()
34        .await
35        .unwrap();
36    configure(lm, ChatAdapter);
37
38    let example = example! {
39        "question": "input" => "What is the capital of France?",
40    };
41
42    let qa_rater = QARater::builder().build();
43    let prediction = qa_rater.forward(example.clone()).await.unwrap();
44    println!("Prediction: {prediction:?}");
45
46    let history = get_lm().inspect_history(1).await;
47    println!("History: {history:?}");
48}
examples/03-evaluate-hotpotqa.rs (line 68)
64async fn main() -> anyhow::Result<()> {
65    configure(
66        LM::builder()
67            .model("openai:gpt-4o-mini".to_string())
68            .build()
69            .await?,
70        ChatAdapter {},
71    );
72
73    let examples = DataLoader::load_hf(
74        "hotpotqa/hotpot_qa",
75        vec!["question".to_string()],
76        vec!["answer".to_string()],
77        "fullwiki",
78        "validation",
79        true,
80    )?[..128]
81        .to_vec();
82
83    let evaluator = QARater::builder().build();
84    let metric = evaluator.evaluate(examples).await;
85
86    println!("Metric: {metric}");
87    Ok(())
88}
examples/04-optimize-hotpotqa.rs (line 64)
60async fn main() -> anyhow::Result<()> {
61    configure(
62        LM::builder()
63            .model("openai:gpt-4o-mini".to_string())
64            .build()
65            .await
66            .unwrap(),
67        ChatAdapter {},
68    );
69
70    let examples = DataLoader::load_hf(
71        "hotpotqa/hotpot_qa",
72        vec!["question".to_string()],
73        vec!["answer".to_string()],
74        "fullwiki",
75        "validation",
76        true,
77    )?[..10]
78        .to_vec();
79
80    let mut rater = QARater::builder().build();
81    let optimizer = COPRO::builder().breadth(10).depth(1).build();
82
83    println!("Rater: {:?}", rater.answerer.get_signature().instruction());
84
85    optimizer.compile(&mut rater, examples.clone()).await?;
86
87    println!("Rater: {:?}", rater.answerer.get_signature().instruction());
88
89    Ok(())
90}
examples/06-other-providers-batch.rs (line 82)
77async fn main() {
78    // Anthropic
79    configure(
80        LM::builder()
81            .model("anthropic:claude-sonnet-4-5-20250929".to_string())
82            .build()
83            .await
84            .unwrap(),
85        ChatAdapter,
86    );
87
88    let example = vec![
89        example! {
90            "question": "input" => "What is the capital of France?",
91        },
92        example! {
93            "question": "input" => "What is the capital of Germany?",
94        },
95        example! {
96            "question": "input" => "What is the capital of Italy?",
97        },
98    ];
99
100    let qa_rater = QARater::builder().build();
101    let prediction = qa_rater.batch(example.clone(), 2, true).await.unwrap();
102    println!("Anthropic: {prediction:?}");
103
104    // Gemini
105    configure(
106        LM::builder()
107            .model("gemini:gemini-2.0-flash".to_string())
108            .build()
109            .await
110            .unwrap(),
111        ChatAdapter,
112    );
113
114    let prediction = qa_rater.batch(example, 2, true).await.unwrap();
115    println!("Gemini: {prediction:?}");
116}

Auto Trait Implementations§

§

impl<S> Freeze for LMBuilder<S>

§

impl<S = Empty> !RefUnwindSafe for LMBuilder<S>

§

impl<S> Send for LMBuilder<S>

§

impl<S> Sync for LMBuilder<S>

§

impl<S> Unpin for LMBuilder<S>

§

impl<S = Empty> !UnwindSafe for LMBuilder<S>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> Scope for T

Source§

fn with<F, R>(self, f: F) -> R
where Self: Sized, F: FnOnce(Self) -> R,

Scoped with ownership.
Source§

fn with_ref<F, R>(&self, f: F) -> R
where F: FnOnce(&Self) -> R,

Scoped with reference.
Source§

fn with_mut<F, R>(&mut self, f: F) -> R
where F: FnOnce(&mut Self) -> R,

Scoped with mutable reference.
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> ErasedDestructor for T
where T: 'static,

Source§

impl<T> Value for T
where T: Send + Sync + 'static,

Source§

impl<T> WasmCompatSend for T
where T: Send,

Source§

impl<T> WasmCompatSync for T
where T: Sync,