LearningCurve

Struct LearningCurve 

Source
pub struct LearningCurve<F: Float + Debug + Display> {
    pub train_sizes: Array1<usize>,
    pub train_scores: Array2<F>,
    pub val_scores: Array2<F>,
    pub train_mean: Array1<F>,
    pub train_std: Array1<F>,
    pub val_mean: Array1<F>,
    pub val_std: Array1<F>,
}
Expand description

Learning curve data structure for visualizing model performance

This structure represents learning curves that show how model performance changes as the training set size increases, comparing training and validation metrics to help diagnose overfitting, underfitting, and other training issues.

Fields§

§train_sizes: Array1<usize>

Training set sizes used for evaluation

§train_scores: Array2<F>

Training scores for each size and fold (rows=sizes, cols=folds)

§val_scores: Array2<F>

Validation scores for each size and fold (rows=sizes, cols=folds)

§train_mean: Array1<F>

Mean training scores across folds

§train_std: Array1<F>

Standard deviation of training scores

§val_mean: Array1<F>

Mean validation scores across folds

§val_std: Array1<F>

Standard deviation of validation scores

Implementations§

Source§

impl<F: Float + Debug + Display + FromPrimitive> LearningCurve<F>

Source

pub fn new( train_sizes: Array1<usize>, train_scores: Array2<F>, val_scores: Array2<F>, ) -> Result<Self>

Create a new learning curve from training and validation scores

§Arguments
  • train_sizes - Array of training set sizes
  • train_scores - 2D array of training scores (rows=sizes, cols=cv folds)
  • val_scores - 2D array of validation scores (rows=sizes, cols=cv folds)
§Returns
  • Result<LearningCurve<F>> - Learning curve data
§Example
use scirs2_core::ndarray::{Array1, Array2};
use scirs2_neural::utils::evaluation::LearningCurve;
// Create sample data
let train_sizes = Array1::from_vec(vec![100, 200, 300, 400, 500]);
let train_scores = Array2::from_shape_vec((5, 3), vec![
    0.6, 0.62, 0.58,    // 100 samples, 3 folds
    0.7, 0.72, 0.68,    // 200 samples, 3 folds
    0.8, 0.78, 0.79,    // 300 samples, 3 folds
    0.85, 0.83, 0.84,   // 400 samples, 3 folds
    0.87, 0.88, 0.86,   // 500 samples, 3 folds
]).unwrap();
let val_scores = Array2::from_shape_vec((5, 3), vec![
    0.55, 0.53, 0.54,   // 100 samples, 3 folds
    0.65, 0.63, 0.64,   // 200 samples, 3 folds
    0.75, 0.73, 0.74,   // 300 samples, 3 folds
    0.76, 0.74, 0.75,   // 400 samples, 3 folds
    0.77, 0.76, 0.76,   // 500 samples, 3 folds
]).unwrap();
// Create learning curve
let curve = LearningCurve::<f64>::new(train_sizes, train_scores, val_scores).unwrap();
Examples found in repository?
examples/colored_curve_visualization.rs (line 68)
9fn main() {
10    // Create a reproducible random number generator
11    let mut rng = SmallRng::from_seed([42; 32]);
12    // Example 1: ROC Curve with color
13    println!("Example 1: ROC Curve Visualization (with color)\n");
14    // Generate synthetic binary classification data
15    let n_samples = 200;
16    // Generate true labels: 0 or 1
17    let y_true: Vec<usize> = (0..n_samples)
18        .map(|_| if rng.random::<f64>() > 0.5 { 1 } else { 0 })
19        .collect();
20    // Generate scores with some separability
21    let y_score: Vec<f64> = y_true
22        .iter()
23        .map(|&label| {
24            if label == 1 {
25                0.7 + 0.3 * rng.sample::<f64, _>(StandardNormal)
26            } else {
27                0.3 + 0.3 * rng.sample::<f64, _>(StandardNormal)
28            }
29        })
30        .collect();
31    // Convert to ndarray views
32    let y_true_array = Array1::from(y_true.clone());
33    let y_score_array = Array1::from(y_score.clone());
34    // Create ROC curve
35    let roc = ROCCurve::new(&y_true_array.view(), &y_score_array.view()).unwrap();
36    // Enable color options
37    let color_options = ColorOptions {
38        enabled: true,
39        use_bright: true,
40        use_background: false,
41    };
42    // Plot ROC curve with color
43    let roc_plot = roc.to_ascii_with_options(
44        Some("Binary Classification ROC Curve"),
45        60,
46        20,
47        &color_options,
48    );
49    println!("{roc_plot}");
50    // Example 2: Learning Curve with color
51    println!("\nExample 2: Learning Curve Visualization (with color)\n");
52    // Simulate learning curves for different training set sizes
53    let train_sizes = Array1::from(vec![100, 200, 300, 400, 500]);
54    // Simulated training scores for each size (5 sizes, 3 CV folds)
55    let train_scores = Array2::from_shape_fn((5, 3), |(i, _j)| {
56        let base = 0.5 + 0.4 * (i as f64 / 4.0);
57        let noise = 0.05 * rng.sample::<f64, _>(StandardNormal);
58        base + noise
59    });
60    // Simulated validation scores (typically lower than training)
61    let val_scores = Array2::from_shape_fn((5, 3), |(i, _j)| {
62        let base = 0.4 + 0.3 * (i as f64 / 4.0);
63        let noise = 0.07 * rng.sample::<f64, _>(StandardNormal);
64        base + noise
65    });
66
67    // Create learning curve
68    let learning_curve = LearningCurve::new(train_sizes, train_scores, val_scores).unwrap();
69
70    // Plot learning curve with color
71    let learning_plot = learning_curve.to_ascii_with_options(
72        Some("Neural Network Training"),
73        70,
74        20,
75        "Accuracy",
76        &color_options,
77    );
78
79    println!("{learning_plot}");
80}
More examples
Hide additional examples
examples/colored_eval_visualization.rs (line 186)
11fn main() -> Result<()> {
12    println!(
13        "{}",
14        stylize("Neural Network Model Evaluation with Color", Style::Bold)
15    );
16    println!("{}", "-".repeat(50));
17    // Set up color options
18    let color_options = ColorOptions {
19        enabled: true,
20        use_background: false,
21        use_bright: true,
22    };
23    // Generate some example data
24    let n_samples = 500;
25    let n_features = 10;
26    let n_classes = 4;
27    println!(
28        "\n{} {} {} {} {} {}",
29        colorize("Generating", Color::BrightGreen),
30        colorize(n_samples.to_string(), Color::BrightYellow),
31        colorize("samples with", Color::BrightGreen),
32        colorize(n_features.to_string(), Color::BrightYellow),
33        colorize("features for", Color::BrightGreen),
34        colorize(n_classes.to_string(), Color::BrightYellow),
35    );
36
37    // Create a deterministic RNG for reproducibility
38    let mut rng = SmallRng::from_seed([42; 32]);
39
40    // 1. Confusion Matrix Example
41    println!(
42        "\n{}",
43        stylize("1. CONFUSION MATRIX VISUALIZATION", Style::Bold)
44    );
45    // Generate random predictions and true labels
46    let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
47    // Create slightly correlated predictions (not completely random)
48    let y_pred = Array::from_shape_fn(n_samples, |i| {
49        if rng.random::<f32>() < 0.7 {
50            // 70% chance of correct prediction
51            y_true[i]
52        } else {
53            // 30% chance of random class
54            rng.random_range(0..n_classes)
55        }
56    });
57    // Create confusion matrix
58    let class_labels = vec![
59        "Class A".to_string(),
60        "Class B".to_string(),
61        "Class C".to_string(),
62        "Class D".to_string(),
63    ];
64    let cm = ConfusionMatrix::<f32>::new(
65        &y_true.view(),
66        &y_pred.view(),
67        Some(n_classes),
68        Some(class_labels),
69    )?;
70    // Print raw and normalized confusion matrices with color
71    println!("\n{}", colorize("Raw Confusion Matrix:", Color::BrightCyan));
72    println!(
73        "{}",
74        cm.to_ascii_with_options(Some("Confusion Matrix"), false, &color_options)
75    );
76    println!(
77        "\n{}",
78        colorize("Normalized Confusion Matrix:", Color::BrightCyan)
79    );
80    println!(
81        "{}",
82        cm.to_ascii_with_options(Some("Normalized Confusion Matrix"), true, &color_options)
83    );
84    // Print metrics
85    println!(
86        "\n{} {:.3}",
87        colorize("Overall Accuracy:", Color::BrightMagenta),
88        cm.accuracy()
89    );
90    let precision = cm.precision();
91    let recall = cm.recall();
92    let f1 = cm.f1_score();
93    println!("{}", colorize("Per-class metrics:", Color::BrightMagenta));
94    for i in 0..n_classes {
95        println!(
96            "  {}: {}={:.3}, {}={:.3}, {}={:.3}",
97            colorize(format!("Class {i}"), Color::BrightYellow),
98            colorize("Precision", Color::BrightCyan),
99            precision[i],
100            colorize("Recall", Color::BrightGreen),
101            recall[i],
102            colorize("F1", Color::BrightBlue),
103            f1[i]
104        );
105    }
106    println!(
107        "{} {:.3}",
108        colorize("Macro F1 Score:", Color::BrightMagenta),
109        cm.macro_f1()
110    );
111    // 2. Feature Importance Visualization
112    println!(
113        "{}",
114        stylize("2. FEATURE IMPORTANCE VISUALIZATION", Style::Bold)
115    );
116    // Generate random feature importance scores
117    let feature_names = (0..n_features)
118        .map(|i| format!("Feature_{i}"))
119        .collect::<Vec<String>>();
120    let importance = Array1::from_shape_fn(n_features, |i| {
121        // Make some features more important than others
122        let base = (n_features - i) as f32 / n_features as f32;
123        base + 0.2 * rng.random::<f32>()
124    });
125
126    let fi = FeatureImportance::new(feature_names, importance)?;
127
128    // Print full feature importance with color
129    println!(
130        "{}",
131        fi.to_ascii_with_options(Some("Feature Importance"), 60, None, &color_options)
132    );
133
134    // Print top-5 features with color
135    println!(
136        "{}",
137        colorize("Top 5 Most Important Features:", Color::BrightCyan)
138    );
139    println!(
140        "{}",
141        fi.to_ascii_with_options(Some("Top 5 Features"), 60, Some(5), &color_options)
142    );
143    // 3. ROC Curve for Binary Classification
144    println!("\n{}", stylize("3. ROC CURVE VISUALIZATION", Style::Bold));
145    // Generate binary classification data
146    let n_binary = 200;
147    let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
148    // Generate scores with some predictive power
149    let y_scores = Array1::from_shape_fn(n_binary, |i| {
150        if y_true_binary[i] == 1 {
151            // Higher scores for positive class
152            0.6 + 0.4 * rng.random::<f32>()
153        } else {
154            // Lower scores for negative class
155            0.4 * rng.random::<f32>()
156        }
157    });
158
159    let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
160    println!(
161        "{}: {:.3}",
162        colorize("ROC AUC:", Color::BrightMagenta),
163        roc.auc
164    );
165    println!("\n{}", roc.to_ascii(None, 50, 20));
166
167    // 4. Learning Curve Visualization
168    println!(
169        "\n{}",
170        stylize("4. LEARNING CURVE VISUALIZATION", Style::Bold)
171    );
172    // Generate learning curve data
173    let n_points = 10;
174    let n_cv = 5;
175    let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
176    // Generate training scores (decreasing with size due to overfitting)
177    let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
178        0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
179    });
180
181    // Generate validation scores (increasing with size)
182    let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
183        0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
184    });
185
186    let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
187    println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
188
189    // Print final message with color
190    println!(
191        "{}",
192        colorize(
193            "Model evaluation visualizations completed successfully!",
194            Color::BrightGreen
195        )
196    );
197    Ok(())
198}
Source

pub fn to_ascii( &self, title: Option<&str>, width: usize, height: usize, metric_name: &str, ) -> String

Create an ASCII line plot of the learning curve

  • title - Optional title for the plot
  • width - Width of the plot
  • height - Height of the plot
  • metric_name - Name of the metric (e.g., “Accuracy”)
  • String - ASCII line plot
Examples found in repository?
examples/colored_eval_visualization.rs (line 187)
11fn main() -> Result<()> {
12    println!(
13        "{}",
14        stylize("Neural Network Model Evaluation with Color", Style::Bold)
15    );
16    println!("{}", "-".repeat(50));
17    // Set up color options
18    let color_options = ColorOptions {
19        enabled: true,
20        use_background: false,
21        use_bright: true,
22    };
23    // Generate some example data
24    let n_samples = 500;
25    let n_features = 10;
26    let n_classes = 4;
27    println!(
28        "\n{} {} {} {} {} {}",
29        colorize("Generating", Color::BrightGreen),
30        colorize(n_samples.to_string(), Color::BrightYellow),
31        colorize("samples with", Color::BrightGreen),
32        colorize(n_features.to_string(), Color::BrightYellow),
33        colorize("features for", Color::BrightGreen),
34        colorize(n_classes.to_string(), Color::BrightYellow),
35    );
36
37    // Create a deterministic RNG for reproducibility
38    let mut rng = SmallRng::from_seed([42; 32]);
39
40    // 1. Confusion Matrix Example
41    println!(
42        "\n{}",
43        stylize("1. CONFUSION MATRIX VISUALIZATION", Style::Bold)
44    );
45    // Generate random predictions and true labels
46    let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
47    // Create slightly correlated predictions (not completely random)
48    let y_pred = Array::from_shape_fn(n_samples, |i| {
49        if rng.random::<f32>() < 0.7 {
50            // 70% chance of correct prediction
51            y_true[i]
52        } else {
53            // 30% chance of random class
54            rng.random_range(0..n_classes)
55        }
56    });
57    // Create confusion matrix
58    let class_labels = vec![
59        "Class A".to_string(),
60        "Class B".to_string(),
61        "Class C".to_string(),
62        "Class D".to_string(),
63    ];
64    let cm = ConfusionMatrix::<f32>::new(
65        &y_true.view(),
66        &y_pred.view(),
67        Some(n_classes),
68        Some(class_labels),
69    )?;
70    // Print raw and normalized confusion matrices with color
71    println!("\n{}", colorize("Raw Confusion Matrix:", Color::BrightCyan));
72    println!(
73        "{}",
74        cm.to_ascii_with_options(Some("Confusion Matrix"), false, &color_options)
75    );
76    println!(
77        "\n{}",
78        colorize("Normalized Confusion Matrix:", Color::BrightCyan)
79    );
80    println!(
81        "{}",
82        cm.to_ascii_with_options(Some("Normalized Confusion Matrix"), true, &color_options)
83    );
84    // Print metrics
85    println!(
86        "\n{} {:.3}",
87        colorize("Overall Accuracy:", Color::BrightMagenta),
88        cm.accuracy()
89    );
90    let precision = cm.precision();
91    let recall = cm.recall();
92    let f1 = cm.f1_score();
93    println!("{}", colorize("Per-class metrics:", Color::BrightMagenta));
94    for i in 0..n_classes {
95        println!(
96            "  {}: {}={:.3}, {}={:.3}, {}={:.3}",
97            colorize(format!("Class {i}"), Color::BrightYellow),
98            colorize("Precision", Color::BrightCyan),
99            precision[i],
100            colorize("Recall", Color::BrightGreen),
101            recall[i],
102            colorize("F1", Color::BrightBlue),
103            f1[i]
104        );
105    }
106    println!(
107        "{} {:.3}",
108        colorize("Macro F1 Score:", Color::BrightMagenta),
109        cm.macro_f1()
110    );
111    // 2. Feature Importance Visualization
112    println!(
113        "{}",
114        stylize("2. FEATURE IMPORTANCE VISUALIZATION", Style::Bold)
115    );
116    // Generate random feature importance scores
117    let feature_names = (0..n_features)
118        .map(|i| format!("Feature_{i}"))
119        .collect::<Vec<String>>();
120    let importance = Array1::from_shape_fn(n_features, |i| {
121        // Make some features more important than others
122        let base = (n_features - i) as f32 / n_features as f32;
123        base + 0.2 * rng.random::<f32>()
124    });
125
126    let fi = FeatureImportance::new(feature_names, importance)?;
127
128    // Print full feature importance with color
129    println!(
130        "{}",
131        fi.to_ascii_with_options(Some("Feature Importance"), 60, None, &color_options)
132    );
133
134    // Print top-5 features with color
135    println!(
136        "{}",
137        colorize("Top 5 Most Important Features:", Color::BrightCyan)
138    );
139    println!(
140        "{}",
141        fi.to_ascii_with_options(Some("Top 5 Features"), 60, Some(5), &color_options)
142    );
143    // 3. ROC Curve for Binary Classification
144    println!("\n{}", stylize("3. ROC CURVE VISUALIZATION", Style::Bold));
145    // Generate binary classification data
146    let n_binary = 200;
147    let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
148    // Generate scores with some predictive power
149    let y_scores = Array1::from_shape_fn(n_binary, |i| {
150        if y_true_binary[i] == 1 {
151            // Higher scores for positive class
152            0.6 + 0.4 * rng.random::<f32>()
153        } else {
154            // Lower scores for negative class
155            0.4 * rng.random::<f32>()
156        }
157    });
158
159    let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
160    println!(
161        "{}: {:.3}",
162        colorize("ROC AUC:", Color::BrightMagenta),
163        roc.auc
164    );
165    println!("\n{}", roc.to_ascii(None, 50, 20));
166
167    // 4. Learning Curve Visualization
168    println!(
169        "\n{}",
170        stylize("4. LEARNING CURVE VISUALIZATION", Style::Bold)
171    );
172    // Generate learning curve data
173    let n_points = 10;
174    let n_cv = 5;
175    let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
176    // Generate training scores (decreasing with size due to overfitting)
177    let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
178        0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
179    });
180
181    // Generate validation scores (increasing with size)
182    let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
183        0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
184    });
185
186    let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
187    println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
188
189    // Print final message with color
190    println!(
191        "{}",
192        colorize(
193            "Model evaluation visualizations completed successfully!",
194            Color::BrightGreen
195        )
196    );
197    Ok(())
198}
Source

pub fn to_ascii_with_options( &self, title: Option<&str>, width: usize, height: usize, metric_name: &str, color_options: &ColorOptions, ) -> String

Create an ASCII line plot of the learning curve with customizable colors This method allows fine-grained control over the color scheme using the provided ColorOptions parameter.

  • color_options - Color options for visualization
  • String - ASCII line plot with colors
Examples found in repository?
examples/colored_curve_visualization.rs (lines 71-77)
9fn main() {
10    // Create a reproducible random number generator
11    let mut rng = SmallRng::from_seed([42; 32]);
12    // Example 1: ROC Curve with color
13    println!("Example 1: ROC Curve Visualization (with color)\n");
14    // Generate synthetic binary classification data
15    let n_samples = 200;
16    // Generate true labels: 0 or 1
17    let y_true: Vec<usize> = (0..n_samples)
18        .map(|_| if rng.random::<f64>() > 0.5 { 1 } else { 0 })
19        .collect();
20    // Generate scores with some separability
21    let y_score: Vec<f64> = y_true
22        .iter()
23        .map(|&label| {
24            if label == 1 {
25                0.7 + 0.3 * rng.sample::<f64, _>(StandardNormal)
26            } else {
27                0.3 + 0.3 * rng.sample::<f64, _>(StandardNormal)
28            }
29        })
30        .collect();
31    // Convert to ndarray views
32    let y_true_array = Array1::from(y_true.clone());
33    let y_score_array = Array1::from(y_score.clone());
34    // Create ROC curve
35    let roc = ROCCurve::new(&y_true_array.view(), &y_score_array.view()).unwrap();
36    // Enable color options
37    let color_options = ColorOptions {
38        enabled: true,
39        use_bright: true,
40        use_background: false,
41    };
42    // Plot ROC curve with color
43    let roc_plot = roc.to_ascii_with_options(
44        Some("Binary Classification ROC Curve"),
45        60,
46        20,
47        &color_options,
48    );
49    println!("{roc_plot}");
50    // Example 2: Learning Curve with color
51    println!("\nExample 2: Learning Curve Visualization (with color)\n");
52    // Simulate learning curves for different training set sizes
53    let train_sizes = Array1::from(vec![100, 200, 300, 400, 500]);
54    // Simulated training scores for each size (5 sizes, 3 CV folds)
55    let train_scores = Array2::from_shape_fn((5, 3), |(i, _j)| {
56        let base = 0.5 + 0.4 * (i as f64 / 4.0);
57        let noise = 0.05 * rng.sample::<f64, _>(StandardNormal);
58        base + noise
59    });
60    // Simulated validation scores (typically lower than training)
61    let val_scores = Array2::from_shape_fn((5, 3), |(i, _j)| {
62        let base = 0.4 + 0.3 * (i as f64 / 4.0);
63        let noise = 0.07 * rng.sample::<f64, _>(StandardNormal);
64        base + noise
65    });
66
67    // Create learning curve
68    let learning_curve = LearningCurve::new(train_sizes, train_scores, val_scores).unwrap();
69
70    // Plot learning curve with color
71    let learning_plot = learning_curve.to_ascii_with_options(
72        Some("Neural Network Training"),
73        70,
74        20,
75        "Accuracy",
76        &color_options,
77    );
78
79    println!("{learning_plot}");
80}

Auto Trait Implementations§

§

impl<F> Freeze for LearningCurve<F>

§

impl<F> RefUnwindSafe for LearningCurve<F>
where F: RefUnwindSafe,

§

impl<F> Send for LearningCurve<F>
where F: Send,

§

impl<F> Sync for LearningCurve<F>
where F: Sync,

§

impl<F> Unpin for LearningCurve<F>

§

impl<F> UnwindSafe for LearningCurve<F>
where F: RefUnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V