PreprocessingConfig

Struct PreprocessingConfig 

Source
pub struct PreprocessingConfig {
    pub image_size: (usize, usize),
    pub normalize: bool,
    pub mean: Vec<f64>,
    pub std: Vec<f64>,
    pub augmentation: AugmentationConfig,
    pub color_space: ColorSpace,
}
Expand description

Preprocessing configuration

Fields§

§image_size: (usize, usize)

Target image size

§normalize: bool

Normalization parameters

§mean: Vec<f64>§std: Vec<f64>§augmentation: AugmentationConfig

Data augmentation

§color_space: ColorSpace

Color space

Implementations§

Source§

impl PreprocessingConfig

Source

pub fn default() -> Self

Default preprocessing

Examples found in repository?
examples/computer_vision.rs (line 166)
131fn vision_backbone_demo() -> Result<()> {
132    println!("   Testing quantum vision backbone architectures...");
133
134    // Different backbone configurations
135    let backbones = vec![
136        (
137            "Quantum CNN",
138            QuantumVisionConfig {
139                num_qubits: 12,
140                encoding_method: ImageEncodingMethod::AmplitudeEncoding,
141                backbone: VisionBackbone::QuantumCNN {
142                    conv_layers: vec![
143                        ConvolutionalConfig {
144                            num_filters: 32,
145                            kernel_size: 3,
146                            stride: 1,
147                            padding: 1,
148                            quantum_kernel: true,
149                            circuit_depth: 4,
150                        },
151                        ConvolutionalConfig {
152                            num_filters: 64,
153                            kernel_size: 3,
154                            stride: 2,
155                            padding: 1,
156                            quantum_kernel: true,
157                            circuit_depth: 6,
158                        },
159                    ],
160                    pooling_type: PoolingType::Quantum,
161                },
162                task_config: VisionTaskConfig::Classification {
163                    num_classes: 10,
164                    multi_label: false,
165                },
166                preprocessing: PreprocessingConfig::default(),
167                quantum_enhancement: QuantumEnhancement::Medium,
168            },
169        ),
170        (
171            "Quantum ViT",
172            QuantumVisionConfig {
173                num_qubits: 16,
174                encoding_method: ImageEncodingMethod::QPIE,
175                backbone: VisionBackbone::QuantumViT {
176                    patch_size: 16,
177                    embed_dim: 768,
178                    num_heads: 12,
179                    depth: 12,
180                },
181                task_config: VisionTaskConfig::Classification {
182                    num_classes: 10,
183                    multi_label: false,
184                },
185                preprocessing: PreprocessingConfig::default(),
186                quantum_enhancement: QuantumEnhancement::High,
187            },
188        ),
189        (
190            "Hybrid CNN-Transformer",
191            QuantumVisionConfig {
192                num_qubits: 14,
193                encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
194                backbone: VisionBackbone::HybridBackbone {
195                    cnn_layers: 4,
196                    transformer_layers: 2,
197                },
198                task_config: VisionTaskConfig::Classification {
199                    num_classes: 10,
200                    multi_label: false,
201                },
202                preprocessing: PreprocessingConfig::default(),
203                quantum_enhancement: QuantumEnhancement::High,
204            },
205        ),
206    ];
207
208    for (name, config) in backbones {
209        println!("\n   --- {name} Backbone ---");
210
211        let mut pipeline = QuantumVisionPipeline::new(config)?;
212
213        // Test forward pass
214        let test_images = create_test_image(2, 3, 224, 224)?;
215        let output = pipeline.forward(&test_images)?;
216
217        if let TaskOutput::Classification {
218            logits,
219            probabilities,
220        } = &output
221        {
222            println!("   Output shape: {:?}", logits.dim());
223            println!("   Probability shape: {:?}", probabilities.dim());
224        }
225
226        // Get metrics
227        let metrics = pipeline.metrics();
228        println!("   Quantum metrics:");
229        println!(
230            "   - Circuit depth: {}",
231            metrics.quantum_metrics.circuit_depth
232        );
233        println!(
234            "   - Quantum advantage: {:.2}x",
235            metrics.quantum_metrics.quantum_advantage
236        );
237        println!(
238            "   - Coherence utilization: {:.1}%",
239            metrics.quantum_metrics.coherence_utilization * 100.0
240        );
241
242        // Architecture-specific properties
243        match name {
244            "Quantum CNN" => {
245                println!("   ✓ Hierarchical feature extraction with quantum convolutions");
246            }
247            "Quantum ViT" => {
248                println!("   ✓ Global context modeling with quantum attention");
249            }
250            "Hybrid CNN-Transformer" => {
251                println!("   ✓ Local features + global context integration");
252            }
253            _ => {}
254        }
255    }
256
257    Ok(())
258}
259
260/// Demonstrate image classification
261fn classification_demo() -> Result<()> {
262    println!("   Quantum image classification demo...");
263
264    // Create classification pipeline
265    let config = QuantumVisionConfig::default();
266    let mut pipeline = QuantumVisionPipeline::new(config)?;
267
268    // Create synthetic dataset
269    let num_classes = 10;
270    let num_samples = 20;
271    let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
272
273    println!(
274        "   Dataset: {} training, {} validation samples",
275        train_data.len(),
276        val_data.len()
277    );
278
279    // Train the model (simplified)
280    println!("\n   Training quantum classifier...");
281    let history = pipeline.train(
282        &train_data,
283        &val_data,
284        5, // epochs
285        OptimizationMethod::Adam,
286    )?;
287
288    // Display training results
289    println!("\n   Training results:");
290    for (epoch, train_loss, val_loss) in history
291        .epochs
292        .iter()
293        .zip(history.train_losses.iter())
294        .zip(history.val_losses.iter())
295        .map(|((e, t), v)| (e, t, v))
296    {
297        println!(
298            "   Epoch {}: train_loss={:.4}, val_loss={:.4}",
299            epoch + 1,
300            train_loss,
301            val_loss
302        );
303    }
304
305    // Test on new images
306    println!("\n   Testing on new images...");
307    let test_images = create_test_image(5, 3, 224, 224)?;
308    let predictions = pipeline.forward(&test_images)?;
309
310    if let TaskOutput::Classification { probabilities, .. } = predictions {
311        for (i, prob_row) in probabilities.outer_iter().enumerate() {
312            let (predicted_class, confidence) = prob_row
313                .iter()
314                .enumerate()
315                .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
316                .map_or((0, 0.0), |(idx, &prob)| (idx, prob));
317
318            println!(
319                "   Image {}: Class {} (confidence: {:.2}%)",
320                i + 1,
321                predicted_class,
322                confidence * 100.0
323            );
324        }
325    }
326
327    // Analyze quantum advantage
328    let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
329    println!("\n   Quantum advantage analysis:");
330    println!(
331        "   - Parameter efficiency: {:.2}x classical",
332        quantum_advantage.param_efficiency
333    );
334    println!(
335        "   - Feature expressiveness: {:.2}x",
336        quantum_advantage.expressiveness
337    );
338    println!(
339        "   - Training speedup: {:.2}x",
340        quantum_advantage.training_speedup
341    );
342
343    Ok(())
344}
345
346/// Demonstrate object detection
347fn object_detection_demo() -> Result<()> {
348    println!("   Quantum object detection demo...");
349
350    // Create detection pipeline
351    let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
352    let mut pipeline = QuantumVisionPipeline::new(config)?;
353
354    // Test image
355    let test_images = create_test_image(2, 3, 416, 416)?;
356
357    println!(
358        "   Processing {} images for object detection...",
359        test_images.dim().0
360    );
361
362    // Run detection
363    let detections = pipeline.forward(&test_images)?;
364
365    if let TaskOutput::Detection {
366        boxes,
367        scores,
368        classes,
369    } = detections
370    {
371        println!("   Detection results:");
372
373        for batch_idx in 0..boxes.dim().0 {
374            println!("\n   Image {}:", batch_idx + 1);
375
376            // Filter detections by score threshold
377            let threshold = 0.5;
378            let mut num_detections = 0;
379
380            for det_idx in 0..boxes.dim().1 {
381                let score = scores[[batch_idx, det_idx]];
382
383                if score > threshold {
384                    let class_id = classes[[batch_idx, det_idx]];
385                    let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
386
387                    println!(
388                        "   - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
389                        num_detections + 1,
390                        class_id,
391                        score,
392                        bbox[0],
393                        bbox[1],
394                        bbox[2],
395                        bbox[3]
396                    );
397
398                    num_detections += 1;
399                }
400            }
401
402            if num_detections == 0 {
403                println!("   - No objects detected above threshold");
404            } else {
405                println!("   Total objects detected: {num_detections}");
406            }
407        }
408    }
409
410    // Analyze detection performance
411    println!("\n   Detection performance analysis:");
412    println!("   - Quantum anchor generation improves localization");
413    println!("   - Entangled features enhance multi-scale detection");
414    println!("   - Quantum NMS reduces redundant detections");
415
416    Ok(())
417}
418
419/// Demonstrate semantic segmentation
420fn segmentation_demo() -> Result<()> {
421    println!("   Quantum semantic segmentation demo...");
422
423    // Create segmentation pipeline
424    let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
425    let mut pipeline = QuantumVisionPipeline::new(config)?;
426
427    // Test images
428    let test_images = create_test_image(1, 3, 512, 512)?;
429
430    println!("   Processing image for semantic segmentation...");
431
432    // Run segmentation
433    let segmentation = pipeline.forward(&test_images)?;
434
435    if let TaskOutput::Segmentation {
436        masks,
437        class_scores,
438    } = segmentation
439    {
440        println!("   Segmentation results:");
441        println!("   - Mask shape: {:?}", masks.dim());
442        println!("   - Class scores shape: {:?}", class_scores.dim());
443
444        // Analyze segmentation quality
445        let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
446        println!("\n   Segmentation metrics:");
447        println!("   - Mean IoU: {:.3}", seg_metrics.mean_iou);
448        println!(
449            "   - Pixel accuracy: {:.1}%",
450            seg_metrics.pixel_accuracy * 100.0
451        );
452        println!(
453            "   - Boundary precision: {:.3}",
454            seg_metrics.boundary_precision
455        );
456
457        // Class distribution
458        println!("\n   Predicted class distribution:");
459        let class_counts = compute_class_distribution(&masks)?;
460        for (class_id, count) in class_counts.iter().take(5) {
461            let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
462            println!("   - Class {class_id}: {percentage:.1}% of pixels");
463        }
464    }
465
466    // Quantum advantages for segmentation
467    println!("\n   Quantum segmentation advantages:");
468    println!("   - Quantum attention captures long-range dependencies");
469    println!("   - Hierarchical encoding preserves multi-scale features");
470    println!("   - Entanglement enables pixel-to-pixel correlations");
471
472    Ok(())
473}
474
475/// Demonstrate feature extraction
476fn feature_extraction_demo() -> Result<()> {
477    println!("   Quantum feature extraction demo...");
478
479    // Create feature extraction pipeline
480    let config = QuantumVisionConfig {
481        num_qubits: 14,
482        encoding_method: ImageEncodingMethod::QPIE,
483        backbone: VisionBackbone::QuantumResNet {
484            blocks: vec![
485                ResidualBlock {
486                    channels: 64,
487                    kernel_size: 3,
488                    stride: 1,
489                    quantum_conv: true,
490                },
491                ResidualBlock {
492                    channels: 128,
493                    kernel_size: 3,
494                    stride: 2,
495                    quantum_conv: true,
496                },
497            ],
498            skip_connections: true,
499        },
500        task_config: VisionTaskConfig::FeatureExtraction {
501            feature_dim: 512,
502            normalize: true,
503        },
504        preprocessing: PreprocessingConfig::default(),
505        quantum_enhancement: QuantumEnhancement::High,
506    };
507
508    let mut pipeline = QuantumVisionPipeline::new(config)?;
509
510    // Extract features from multiple images
511    let num_images = 10;
512    let test_images = create_test_image(num_images, 3, 224, 224)?;
513
514    println!("   Extracting features from {num_images} images...");
515
516    let features_output = pipeline.forward(&test_images)?;
517
518    if let TaskOutput::Features {
519        features,
520        attention_maps,
521    } = features_output
522    {
523        println!("   Feature extraction results:");
524        println!("   - Feature dimension: {}", features.dim().1);
525        println!("   - Features normalized: Yes");
526
527        // Compute feature statistics
528        let feature_stats = compute_feature_statistics(&features)?;
529        println!("\n   Feature statistics:");
530        println!("   - Mean magnitude: {:.4}", feature_stats.mean_magnitude);
531        println!("   - Variance: {:.4}", feature_stats.variance);
532        println!("   - Sparsity: {:.1}%", feature_stats.sparsity * 100.0);
533
534        // Compute pairwise similarities
535        println!("\n   Feature similarity matrix (first 5 images):");
536        let similarities = compute_cosine_similarities(&features)?;
537
538        print!("       ");
539        for i in 0..5.min(num_images) {
540            print!("Img{}  ", i + 1);
541        }
542        println!();
543
544        for i in 0..5.min(num_images) {
545            print!("   Img{} ", i + 1);
546            for j in 0..5.min(num_images) {
547                print!("{:.3} ", similarities[[i, j]]);
548            }
549            println!();
550        }
551
552        // Quantum feature properties
553        println!("\n   Quantum feature properties:");
554        println!("   - Entanglement enhances discriminative power");
555        println!("   - Quantum superposition encodes multiple views");
556        println!("   - Phase information captures subtle variations");
557    }
558
559    Ok(())
560}
561
562/// Demonstrate multi-task learning
563fn multitask_demo() -> Result<()> {
564    println!("   Multi-task quantum vision demo...");
565
566    // Create a pipeline that can handle multiple tasks
567    let tasks = vec![
568        (
569            "Classification",
570            VisionTaskConfig::Classification {
571                num_classes: 10,
572                multi_label: false,
573            },
574        ),
575        (
576            "Detection",
577            VisionTaskConfig::ObjectDetection {
578                num_classes: 20,
579                anchor_sizes: vec![(32, 32), (64, 64)],
580                iou_threshold: 0.5,
581            },
582        ),
583        (
584            "Segmentation",
585            VisionTaskConfig::Segmentation {
586                num_classes: 10,
587                output_stride: 8,
588            },
589        ),
590    ];
591
592    println!(
593        "   Testing {} vision tasks with shared backbone...",
594        tasks.len()
595    );
596
597    // Use same backbone for all tasks
598    let base_config = QuantumVisionConfig {
599        num_qubits: 16,
600        encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
601        backbone: VisionBackbone::HybridBackbone {
602            cnn_layers: 4,
603            transformer_layers: 2,
604        },
605        task_config: tasks[0].1.clone(), // Will be replaced for each task
606        preprocessing: PreprocessingConfig::default(),
607        quantum_enhancement: QuantumEnhancement::High,
608    };
609
610    // Test each task
611    let test_images = create_test_image(2, 3, 416, 416)?;
612
613    for (task_name, task_config) in tasks {
614        println!("\n   --- {task_name} Task ---");
615
616        let mut config = base_config.clone();
617        config.task_config = task_config;
618
619        let mut pipeline = QuantumVisionPipeline::new(config)?;
620        let output = pipeline.forward(&test_images)?;
621
622        match output {
623            TaskOutput::Classification { logits, .. } => {
624                println!("   Classification output shape: {:?}", logits.dim());
625            }
626            TaskOutput::Detection { boxes, scores, .. } => {
627                println!(
628                    "   Detection: {} anchors, score shape: {:?}",
629                    boxes.dim().1,
630                    scores.dim()
631                );
632            }
633            TaskOutput::Segmentation { masks, .. } => {
634                println!("   Segmentation mask shape: {:?}", masks.dim());
635            }
636            _ => {}
637        }
638
639        // Task-specific quantum advantages
640        match task_name {
641            "Classification" => {
642                println!("   ✓ Quantum features improve class discrimination");
643            }
644            "Detection" => {
645                println!("   ✓ Quantum anchors adapt to object scales");
646            }
647            "Segmentation" => {
648                println!("   ✓ Quantum correlations enhance boundary detection");
649            }
650            _ => {}
651        }
652    }
653
654    println!("\n   Multi-task benefits:");
655    println!("   - Shared quantum backbone reduces parameters");
656    println!("   - Task-specific quantum heads optimize performance");
657    println!("   - Quantum entanglement enables cross-task learning");
658
659    Ok(())
660}
661
662/// Demonstrate performance analysis
663fn performance_analysis_demo() -> Result<()> {
664    println!("   Analyzing quantum vision performance...");
665
666    // Compare different quantum enhancement levels
667    let enhancement_levels = vec![
668        ("Low", QuantumEnhancement::Low),
669        ("Medium", QuantumEnhancement::Medium),
670        ("High", QuantumEnhancement::High),
671        (
672            "Custom",
673            QuantumEnhancement::Custom {
674                quantum_layers: vec![0, 2, 4, 6],
675                entanglement_strength: 0.8,
676            },
677        ),
678    ];
679
680    println!("\n   Quantum Enhancement Level Comparison:");
681    println!("   Level    | FLOPs   | Memory  | Accuracy | Q-Advantage");
682    println!("   ---------|---------|---------|----------|------------");
683
684    for (level_name, enhancement) in enhancement_levels {
685        let config = QuantumVisionConfig {
686            num_qubits: 12,
687            encoding_method: ImageEncodingMethod::AmplitudeEncoding,
688            backbone: VisionBackbone::QuantumCNN {
689                conv_layers: vec![ConvolutionalConfig {
690                    num_filters: 32,
691                    kernel_size: 3,
692                    stride: 1,
693                    padding: 1,
694                    quantum_kernel: true,
695                    circuit_depth: 4,
696                }],
697                pooling_type: PoolingType::Quantum,
698            },
699            task_config: VisionTaskConfig::Classification {
700                num_classes: 10,
701                multi_label: false,
702            },
703            preprocessing: PreprocessingConfig::default(),
704            quantum_enhancement: enhancement,
705        };
706
707        let pipeline = QuantumVisionPipeline::new(config)?;
708        let metrics = pipeline.metrics();
709
710        // Simulate performance metrics
711        let (flops, memory, accuracy, q_advantage) = match level_name {
712            "Low" => (1.2, 50.0, 0.85, 1.2),
713            "Medium" => (2.5, 80.0, 0.88, 1.5),
714            "High" => (4.1, 120.0, 0.91, 2.1),
715            "Custom" => (3.2, 95.0, 0.90, 1.8),
716            _ => (0.0, 0.0, 0.0, 0.0),
717        };
718
719        println!(
720            "   {:<8} | {:.1}G | {:.0}MB | {:.1}%  | {:.1}x",
721            level_name,
722            flops,
723            memory,
724            accuracy * 100.0,
725            q_advantage
726        );
727    }
728
729    // Scalability analysis
730    println!("\n   Scalability Analysis:");
731    let image_sizes = vec![64, 128, 224, 416, 512];
732
733    println!("   Image Size | Inference Time | Throughput");
734    println!("   -----------|----------------|------------");
735
736    for size in image_sizes {
737        let inference_time = (f64::from(size) / 100.0).mul_add(f64::from(size) / 100.0, 5.0);
738        let throughput = 1000.0 / inference_time;
739
740        println!("   {size}x{size}   | {inference_time:.1}ms        | {throughput:.0} img/s");
741    }
742
743    // Quantum advantages summary
744    println!("\n   Quantum Computer Vision Advantages:");
745    println!("   1. Exponential feature space with limited qubits");
746    println!("   2. Natural multi-scale representation via entanglement");
747    println!("   3. Quantum attention for global context modeling");
748    println!("   4. Phase encoding for rotation-invariant features");
749    println!("   5. Quantum pooling preserves superposition information");
750
751    // Hardware requirements
752    println!("\n   Hardware Requirements:");
753    println!("   - Minimum qubits: 10 (basic tasks)");
754    println!("   - Recommended: 16-20 qubits (complex tasks)");
755    println!("   - Coherence time: >100μs for deep networks");
756    println!("   - Gate fidelity: >99.9% for accurate predictions");
757
758    Ok(())
759}
Source

pub fn detection_default() -> Self

Detection preprocessing

Source

pub fn segmentation_default() -> Self

Segmentation preprocessing

Trait Implementations§

Source§

impl Clone for PreprocessingConfig

Source§

fn clone(&self) -> PreprocessingConfig

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for PreprocessingConfig

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> DynClone for T
where T: Clone,

Source§

fn __clone_box(&self, _: Private) -> *mut ()

Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V