PreprocessingConfig

Struct PreprocessingConfig 

Source
pub struct PreprocessingConfig {
    pub image_size: (usize, usize),
    pub normalize: bool,
    pub mean: Vec<f64>,
    pub std: Vec<f64>,
    pub augmentation: AugmentationConfig,
    pub color_space: ColorSpace,
}
Expand description

Preprocessing configuration

Fields§

§image_size: (usize, usize)

Target image size

§normalize: bool

Normalization parameters

§mean: Vec<f64>§std: Vec<f64>§augmentation: AugmentationConfig

Data augmentation

§color_space: ColorSpace

Color space

Implementations§

Source§

impl PreprocessingConfig

Source

pub fn default() -> Self

Default preprocessing

Examples found in repository?
examples/computer_vision.rs (line 176)
141fn vision_backbone_demo() -> Result<()> {
142    println!("   Testing quantum vision backbone architectures...");
143
144    // Different backbone configurations
145    let backbones = vec![
146        (
147            "Quantum CNN",
148            QuantumVisionConfig {
149                num_qubits: 12,
150                encoding_method: ImageEncodingMethod::AmplitudeEncoding,
151                backbone: VisionBackbone::QuantumCNN {
152                    conv_layers: vec![
153                        ConvolutionalConfig {
154                            num_filters: 32,
155                            kernel_size: 3,
156                            stride: 1,
157                            padding: 1,
158                            quantum_kernel: true,
159                            circuit_depth: 4,
160                        },
161                        ConvolutionalConfig {
162                            num_filters: 64,
163                            kernel_size: 3,
164                            stride: 2,
165                            padding: 1,
166                            quantum_kernel: true,
167                            circuit_depth: 6,
168                        },
169                    ],
170                    pooling_type: PoolingType::Quantum,
171                },
172                task_config: VisionTaskConfig::Classification {
173                    num_classes: 10,
174                    multi_label: false,
175                },
176                preprocessing: PreprocessingConfig::default(),
177                quantum_enhancement: QuantumEnhancement::Medium,
178            },
179        ),
180        (
181            "Quantum ViT",
182            QuantumVisionConfig {
183                num_qubits: 16,
184                encoding_method: ImageEncodingMethod::QPIE,
185                backbone: VisionBackbone::QuantumViT {
186                    patch_size: 16,
187                    embed_dim: 768,
188                    num_heads: 12,
189                    depth: 12,
190                },
191                task_config: VisionTaskConfig::Classification {
192                    num_classes: 10,
193                    multi_label: false,
194                },
195                preprocessing: PreprocessingConfig::default(),
196                quantum_enhancement: QuantumEnhancement::High,
197            },
198        ),
199        (
200            "Hybrid CNN-Transformer",
201            QuantumVisionConfig {
202                num_qubits: 14,
203                encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
204                backbone: VisionBackbone::HybridBackbone {
205                    cnn_layers: 4,
206                    transformer_layers: 2,
207                },
208                task_config: VisionTaskConfig::Classification {
209                    num_classes: 10,
210                    multi_label: false,
211                },
212                preprocessing: PreprocessingConfig::default(),
213                quantum_enhancement: QuantumEnhancement::High,
214            },
215        ),
216    ];
217
218    for (name, config) in backbones {
219        println!("\n   --- {name} Backbone ---");
220
221        let mut pipeline = QuantumVisionPipeline::new(config)?;
222
223        // Test forward pass
224        let test_images = create_test_image(2, 3, 224, 224)?;
225        let output = pipeline.forward(&test_images)?;
226
227        if let TaskOutput::Classification {
228            logits,
229            probabilities,
230        } = &output
231        {
232            println!("   Output shape: {:?}", logits.dim());
233            println!("   Probability shape: {:?}", probabilities.dim());
234        }
235
236        // Get metrics
237        let metrics = pipeline.metrics();
238        println!("   Quantum metrics:");
239        println!(
240            "   - Circuit depth: {}",
241            metrics.quantum_metrics.circuit_depth
242        );
243        println!(
244            "   - Quantum advantage: {:.2}x",
245            metrics.quantum_metrics.quantum_advantage
246        );
247        println!(
248            "   - Coherence utilization: {:.1}%",
249            metrics.quantum_metrics.coherence_utilization * 100.0
250        );
251
252        // Architecture-specific properties
253        match name {
254            "Quantum CNN" => {
255                println!("   ✓ Hierarchical feature extraction with quantum convolutions");
256            }
257            "Quantum ViT" => {
258                println!("   ✓ Global context modeling with quantum attention");
259            }
260            "Hybrid CNN-Transformer" => {
261                println!("   ✓ Local features + global context integration");
262            }
263            _ => {}
264        }
265    }
266
267    Ok(())
268}
269
270/// Demonstrate image classification
271fn classification_demo() -> Result<()> {
272    println!("   Quantum image classification demo...");
273
274    // Create classification pipeline
275    let config = QuantumVisionConfig::default();
276    let mut pipeline = QuantumVisionPipeline::new(config)?;
277
278    // Create synthetic dataset
279    let num_classes = 10;
280    let num_samples = 20;
281    let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
282
283    println!(
284        "   Dataset: {} training, {} validation samples",
285        train_data.len(),
286        val_data.len()
287    );
288
289    // Train the model (simplified)
290    println!("\n   Training quantum classifier...");
291    let history = pipeline.train(
292        &train_data,
293        &val_data,
294        5, // epochs
295        OptimizationMethod::Adam,
296    )?;
297
298    // Display training results
299    println!("\n   Training results:");
300    for (epoch, train_loss, val_loss) in history
301        .epochs
302        .iter()
303        .zip(history.train_losses.iter())
304        .zip(history.val_losses.iter())
305        .map(|((e, t), v)| (e, t, v))
306    {
307        println!(
308            "   Epoch {}: train_loss={:.4}, val_loss={:.4}",
309            epoch + 1,
310            train_loss,
311            val_loss
312        );
313    }
314
315    // Test on new images
316    println!("\n   Testing on new images...");
317    let test_images = create_test_image(5, 3, 224, 224)?;
318    let predictions = pipeline.forward(&test_images)?;
319
320    if let TaskOutput::Classification { probabilities, .. } = predictions {
321        for (i, prob_row) in probabilities.outer_iter().enumerate() {
322            let (predicted_class, confidence) = prob_row
323                .iter()
324                .enumerate()
325                .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
326                .map_or((0, 0.0), |(idx, &prob)| (idx, prob));
327
328            println!(
329                "   Image {}: Class {} (confidence: {:.2}%)",
330                i + 1,
331                predicted_class,
332                confidence * 100.0
333            );
334        }
335    }
336
337    // Analyze quantum advantage
338    let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
339    println!("\n   Quantum advantage analysis:");
340    println!(
341        "   - Parameter efficiency: {:.2}x classical",
342        quantum_advantage.param_efficiency
343    );
344    println!(
345        "   - Feature expressiveness: {:.2}x",
346        quantum_advantage.expressiveness
347    );
348    println!(
349        "   - Training speedup: {:.2}x",
350        quantum_advantage.training_speedup
351    );
352
353    Ok(())
354}
355
356/// Demonstrate object detection
357fn object_detection_demo() -> Result<()> {
358    println!("   Quantum object detection demo...");
359
360    // Create detection pipeline
361    let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
362    let mut pipeline = QuantumVisionPipeline::new(config)?;
363
364    // Test image
365    let test_images = create_test_image(2, 3, 416, 416)?;
366
367    println!(
368        "   Processing {} images for object detection...",
369        test_images.dim().0
370    );
371
372    // Run detection
373    let detections = pipeline.forward(&test_images)?;
374
375    if let TaskOutput::Detection {
376        boxes,
377        scores,
378        classes,
379    } = detections
380    {
381        println!("   Detection results:");
382
383        for batch_idx in 0..boxes.dim().0 {
384            println!("\n   Image {}:", batch_idx + 1);
385
386            // Filter detections by score threshold
387            let threshold = 0.5;
388            let mut num_detections = 0;
389
390            for det_idx in 0..boxes.dim().1 {
391                let score = scores[[batch_idx, det_idx]];
392
393                if score > threshold {
394                    let class_id = classes[[batch_idx, det_idx]];
395                    let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
396
397                    println!(
398                        "   - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
399                        num_detections + 1,
400                        class_id,
401                        score,
402                        bbox[0],
403                        bbox[1],
404                        bbox[2],
405                        bbox[3]
406                    );
407
408                    num_detections += 1;
409                }
410            }
411
412            if num_detections == 0 {
413                println!("   - No objects detected above threshold");
414            } else {
415                println!("   Total objects detected: {num_detections}");
416            }
417        }
418    }
419
420    // Analyze detection performance
421    println!("\n   Detection performance analysis:");
422    println!("   - Quantum anchor generation improves localization");
423    println!("   - Entangled features enhance multi-scale detection");
424    println!("   - Quantum NMS reduces redundant detections");
425
426    Ok(())
427}
428
429/// Demonstrate semantic segmentation
430fn segmentation_demo() -> Result<()> {
431    println!("   Quantum semantic segmentation demo...");
432
433    // Create segmentation pipeline
434    let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
435    let mut pipeline = QuantumVisionPipeline::new(config)?;
436
437    // Test images
438    let test_images = create_test_image(1, 3, 512, 512)?;
439
440    println!("   Processing image for semantic segmentation...");
441
442    // Run segmentation
443    let segmentation = pipeline.forward(&test_images)?;
444
445    if let TaskOutput::Segmentation {
446        masks,
447        class_scores,
448    } = segmentation
449    {
450        println!("   Segmentation results:");
451        println!("   - Mask shape: {:?}", masks.dim());
452        println!("   - Class scores shape: {:?}", class_scores.dim());
453
454        // Analyze segmentation quality
455        let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
456        println!("\n   Segmentation metrics:");
457        println!("   - Mean IoU: {:.3}", seg_metrics.mean_iou);
458        println!(
459            "   - Pixel accuracy: {:.1}%",
460            seg_metrics.pixel_accuracy * 100.0
461        );
462        println!(
463            "   - Boundary precision: {:.3}",
464            seg_metrics.boundary_precision
465        );
466
467        // Class distribution
468        println!("\n   Predicted class distribution:");
469        let class_counts = compute_class_distribution(&masks)?;
470        for (class_id, count) in class_counts.iter().take(5) {
471            let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
472            println!("   - Class {class_id}: {percentage:.1}% of pixels");
473        }
474    }
475
476    // Quantum advantages for segmentation
477    println!("\n   Quantum segmentation advantages:");
478    println!("   - Quantum attention captures long-range dependencies");
479    println!("   - Hierarchical encoding preserves multi-scale features");
480    println!("   - Entanglement enables pixel-to-pixel correlations");
481
482    Ok(())
483}
484
485/// Demonstrate feature extraction
486fn feature_extraction_demo() -> Result<()> {
487    println!("   Quantum feature extraction demo...");
488
489    // Create feature extraction pipeline
490    let config = QuantumVisionConfig {
491        num_qubits: 14,
492        encoding_method: ImageEncodingMethod::QPIE,
493        backbone: VisionBackbone::QuantumResNet {
494            blocks: vec![
495                ResidualBlock {
496                    channels: 64,
497                    kernel_size: 3,
498                    stride: 1,
499                    quantum_conv: true,
500                },
501                ResidualBlock {
502                    channels: 128,
503                    kernel_size: 3,
504                    stride: 2,
505                    quantum_conv: true,
506                },
507            ],
508            skip_connections: true,
509        },
510        task_config: VisionTaskConfig::FeatureExtraction {
511            feature_dim: 512,
512            normalize: true,
513        },
514        preprocessing: PreprocessingConfig::default(),
515        quantum_enhancement: QuantumEnhancement::High,
516    };
517
518    let mut pipeline = QuantumVisionPipeline::new(config)?;
519
520    // Extract features from multiple images
521    let num_images = 10;
522    let test_images = create_test_image(num_images, 3, 224, 224)?;
523
524    println!("   Extracting features from {num_images} images...");
525
526    let features_output = pipeline.forward(&test_images)?;
527
528    if let TaskOutput::Features {
529        features,
530        attention_maps,
531    } = features_output
532    {
533        println!("   Feature extraction results:");
534        println!("   - Feature dimension: {}", features.dim().1);
535        println!("   - Features normalized: Yes");
536
537        // Compute feature statistics
538        let feature_stats = compute_feature_statistics(&features)?;
539        println!("\n   Feature statistics:");
540        println!("   - Mean magnitude: {:.4}", feature_stats.mean_magnitude);
541        println!("   - Variance: {:.4}", feature_stats.variance);
542        println!("   - Sparsity: {:.1}%", feature_stats.sparsity * 100.0);
543
544        // Compute pairwise similarities
545        println!("\n   Feature similarity matrix (first 5 images):");
546        let similarities = compute_cosine_similarities(&features)?;
547
548        print!("       ");
549        for i in 0..5.min(num_images) {
550            print!("Img{}  ", i + 1);
551        }
552        println!();
553
554        for i in 0..5.min(num_images) {
555            print!("   Img{} ", i + 1);
556            for j in 0..5.min(num_images) {
557                print!("{:.3} ", similarities[[i, j]]);
558            }
559            println!();
560        }
561
562        // Quantum feature properties
563        println!("\n   Quantum feature properties:");
564        println!("   - Entanglement enhances discriminative power");
565        println!("   - Quantum superposition encodes multiple views");
566        println!("   - Phase information captures subtle variations");
567    }
568
569    Ok(())
570}
571
572/// Demonstrate multi-task learning
573fn multitask_demo() -> Result<()> {
574    println!("   Multi-task quantum vision demo...");
575
576    // Create a pipeline that can handle multiple tasks
577    let tasks = vec![
578        (
579            "Classification",
580            VisionTaskConfig::Classification {
581                num_classes: 10,
582                multi_label: false,
583            },
584        ),
585        (
586            "Detection",
587            VisionTaskConfig::ObjectDetection {
588                num_classes: 20,
589                anchor_sizes: vec![(32, 32), (64, 64)],
590                iou_threshold: 0.5,
591            },
592        ),
593        (
594            "Segmentation",
595            VisionTaskConfig::Segmentation {
596                num_classes: 10,
597                output_stride: 8,
598            },
599        ),
600    ];
601
602    println!(
603        "   Testing {} vision tasks with shared backbone...",
604        tasks.len()
605    );
606
607    // Use same backbone for all tasks
608    let base_config = QuantumVisionConfig {
609        num_qubits: 16,
610        encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
611        backbone: VisionBackbone::HybridBackbone {
612            cnn_layers: 4,
613            transformer_layers: 2,
614        },
615        task_config: tasks[0].1.clone(), // Will be replaced for each task
616        preprocessing: PreprocessingConfig::default(),
617        quantum_enhancement: QuantumEnhancement::High,
618    };
619
620    // Test each task
621    let test_images = create_test_image(2, 3, 416, 416)?;
622
623    for (task_name, task_config) in tasks {
624        println!("\n   --- {task_name} Task ---");
625
626        let mut config = base_config.clone();
627        config.task_config = task_config;
628
629        let mut pipeline = QuantumVisionPipeline::new(config)?;
630        let output = pipeline.forward(&test_images)?;
631
632        match output {
633            TaskOutput::Classification { logits, .. } => {
634                println!("   Classification output shape: {:?}", logits.dim());
635            }
636            TaskOutput::Detection { boxes, scores, .. } => {
637                println!(
638                    "   Detection: {} anchors, score shape: {:?}",
639                    boxes.dim().1,
640                    scores.dim()
641                );
642            }
643            TaskOutput::Segmentation { masks, .. } => {
644                println!("   Segmentation mask shape: {:?}", masks.dim());
645            }
646            _ => {}
647        }
648
649        // Task-specific quantum advantages
650        match task_name {
651            "Classification" => {
652                println!("   ✓ Quantum features improve class discrimination");
653            }
654            "Detection" => {
655                println!("   ✓ Quantum anchors adapt to object scales");
656            }
657            "Segmentation" => {
658                println!("   ✓ Quantum correlations enhance boundary detection");
659            }
660            _ => {}
661        }
662    }
663
664    println!("\n   Multi-task benefits:");
665    println!("   - Shared quantum backbone reduces parameters");
666    println!("   - Task-specific quantum heads optimize performance");
667    println!("   - Quantum entanglement enables cross-task learning");
668
669    Ok(())
670}
671
672/// Demonstrate performance analysis
673fn performance_analysis_demo() -> Result<()> {
674    println!("   Analyzing quantum vision performance...");
675
676    // Compare different quantum enhancement levels
677    let enhancement_levels = vec![
678        ("Low", QuantumEnhancement::Low),
679        ("Medium", QuantumEnhancement::Medium),
680        ("High", QuantumEnhancement::High),
681        (
682            "Custom",
683            QuantumEnhancement::Custom {
684                quantum_layers: vec![0, 2, 4, 6],
685                entanglement_strength: 0.8,
686            },
687        ),
688    ];
689
690    println!("\n   Quantum Enhancement Level Comparison:");
691    println!("   Level    | FLOPs   | Memory  | Accuracy | Q-Advantage");
692    println!("   ---------|---------|---------|----------|------------");
693
694    for (level_name, enhancement) in enhancement_levels {
695        let config = QuantumVisionConfig {
696            num_qubits: 12,
697            encoding_method: ImageEncodingMethod::AmplitudeEncoding,
698            backbone: VisionBackbone::QuantumCNN {
699                conv_layers: vec![ConvolutionalConfig {
700                    num_filters: 32,
701                    kernel_size: 3,
702                    stride: 1,
703                    padding: 1,
704                    quantum_kernel: true,
705                    circuit_depth: 4,
706                }],
707                pooling_type: PoolingType::Quantum,
708            },
709            task_config: VisionTaskConfig::Classification {
710                num_classes: 10,
711                multi_label: false,
712            },
713            preprocessing: PreprocessingConfig::default(),
714            quantum_enhancement: enhancement,
715        };
716
717        let pipeline = QuantumVisionPipeline::new(config)?;
718        let metrics = pipeline.metrics();
719
720        // Simulate performance metrics
721        let (flops, memory, accuracy, q_advantage) = match level_name {
722            "Low" => (1.2, 50.0, 0.85, 1.2),
723            "Medium" => (2.5, 80.0, 0.88, 1.5),
724            "High" => (4.1, 120.0, 0.91, 2.1),
725            "Custom" => (3.2, 95.0, 0.90, 1.8),
726            _ => (0.0, 0.0, 0.0, 0.0),
727        };
728
729        println!(
730            "   {:<8} | {:.1}G | {:.0}MB | {:.1}%  | {:.1}x",
731            level_name,
732            flops,
733            memory,
734            accuracy * 100.0,
735            q_advantage
736        );
737    }
738
739    // Scalability analysis
740    println!("\n   Scalability Analysis:");
741    let image_sizes = vec![64, 128, 224, 416, 512];
742
743    println!("   Image Size | Inference Time | Throughput");
744    println!("   -----------|----------------|------------");
745
746    for size in image_sizes {
747        let inference_time = (f64::from(size) / 100.0).mul_add(f64::from(size) / 100.0, 5.0);
748        let throughput = 1000.0 / inference_time;
749
750        println!("   {size}x{size}   | {inference_time:.1}ms        | {throughput:.0} img/s");
751    }
752
753    // Quantum advantages summary
754    println!("\n   Quantum Computer Vision Advantages:");
755    println!("   1. Exponential feature space with limited qubits");
756    println!("   2. Natural multi-scale representation via entanglement");
757    println!("   3. Quantum attention for global context modeling");
758    println!("   4. Phase encoding for rotation-invariant features");
759    println!("   5. Quantum pooling preserves superposition information");
760
761    // Hardware requirements
762    println!("\n   Hardware Requirements:");
763    println!("   - Minimum qubits: 10 (basic tasks)");
764    println!("   - Recommended: 16-20 qubits (complex tasks)");
765    println!("   - Coherence time: >100μs for deep networks");
766    println!("   - Gate fidelity: >99.9% for accurate predictions");
767
768    Ok(())
769}
Source

pub fn detection_default() -> Self

Detection preprocessing

Source

pub fn segmentation_default() -> Self

Segmentation preprocessing

Trait Implementations§

Source§

impl Clone for PreprocessingConfig

Source§

fn clone(&self) -> PreprocessingConfig

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for PreprocessingConfig

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> DynClone for T
where T: Clone,

Source§

fn __clone_box(&self, _: Private) -> *mut ()

Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V