PreprocessingConfig

Struct PreprocessingConfig 

Source
pub struct PreprocessingConfig {
    pub image_size: (usize, usize),
    pub normalize: bool,
    pub mean: Vec<f64>,
    pub std: Vec<f64>,
    pub augmentation: AugmentationConfig,
    pub color_space: ColorSpace,
}
Expand description

Preprocessing configuration

Fields§

§image_size: (usize, usize)

Target image size

§normalize: bool

Normalization parameters

§mean: Vec<f64>§std: Vec<f64>§augmentation: AugmentationConfig

Data augmentation

§color_space: ColorSpace

Color space

Implementations§

Source§

impl PreprocessingConfig

Source

pub fn default() -> Self

Default preprocessing

Examples found in repository?
examples/computer_vision.rs (line 165)
130fn vision_backbone_demo() -> Result<()> {
131    println!("   Testing quantum vision backbone architectures...");
132
133    // Different backbone configurations
134    let backbones = vec![
135        (
136            "Quantum CNN",
137            QuantumVisionConfig {
138                num_qubits: 12,
139                encoding_method: ImageEncodingMethod::AmplitudeEncoding,
140                backbone: VisionBackbone::QuantumCNN {
141                    conv_layers: vec![
142                        ConvolutionalConfig {
143                            num_filters: 32,
144                            kernel_size: 3,
145                            stride: 1,
146                            padding: 1,
147                            quantum_kernel: true,
148                            circuit_depth: 4,
149                        },
150                        ConvolutionalConfig {
151                            num_filters: 64,
152                            kernel_size: 3,
153                            stride: 2,
154                            padding: 1,
155                            quantum_kernel: true,
156                            circuit_depth: 6,
157                        },
158                    ],
159                    pooling_type: PoolingType::Quantum,
160                },
161                task_config: VisionTaskConfig::Classification {
162                    num_classes: 10,
163                    multi_label: false,
164                },
165                preprocessing: PreprocessingConfig::default(),
166                quantum_enhancement: QuantumEnhancement::Medium,
167            },
168        ),
169        (
170            "Quantum ViT",
171            QuantumVisionConfig {
172                num_qubits: 16,
173                encoding_method: ImageEncodingMethod::QPIE,
174                backbone: VisionBackbone::QuantumViT {
175                    patch_size: 16,
176                    embed_dim: 768,
177                    num_heads: 12,
178                    depth: 12,
179                },
180                task_config: VisionTaskConfig::Classification {
181                    num_classes: 10,
182                    multi_label: false,
183                },
184                preprocessing: PreprocessingConfig::default(),
185                quantum_enhancement: QuantumEnhancement::High,
186            },
187        ),
188        (
189            "Hybrid CNN-Transformer",
190            QuantumVisionConfig {
191                num_qubits: 14,
192                encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
193                backbone: VisionBackbone::HybridBackbone {
194                    cnn_layers: 4,
195                    transformer_layers: 2,
196                },
197                task_config: VisionTaskConfig::Classification {
198                    num_classes: 10,
199                    multi_label: false,
200                },
201                preprocessing: PreprocessingConfig::default(),
202                quantum_enhancement: QuantumEnhancement::High,
203            },
204        ),
205    ];
206
207    for (name, config) in backbones {
208        println!("\n   --- {} Backbone ---", name);
209
210        let mut pipeline = QuantumVisionPipeline::new(config)?;
211
212        // Test forward pass
213        let test_images = create_test_image(2, 3, 224, 224)?;
214        let output = pipeline.forward(&test_images)?;
215
216        match &output {
217            TaskOutput::Classification {
218                logits,
219                probabilities,
220            } => {
221                println!("   Output shape: {:?}", logits.dim());
222                println!("   Probability shape: {:?}", probabilities.dim());
223            }
224            _ => {}
225        }
226
227        // Get metrics
228        let metrics = pipeline.metrics();
229        println!("   Quantum metrics:");
230        println!(
231            "   - Circuit depth: {}",
232            metrics.quantum_metrics.circuit_depth
233        );
234        println!(
235            "   - Quantum advantage: {:.2}x",
236            metrics.quantum_metrics.quantum_advantage
237        );
238        println!(
239            "   - Coherence utilization: {:.1}%",
240            metrics.quantum_metrics.coherence_utilization * 100.0
241        );
242
243        // Architecture-specific properties
244        match name {
245            "Quantum CNN" => {
246                println!("   ✓ Hierarchical feature extraction with quantum convolutions");
247            }
248            "Quantum ViT" => {
249                println!("   ✓ Global context modeling with quantum attention");
250            }
251            "Hybrid CNN-Transformer" => {
252                println!("   ✓ Local features + global context integration");
253            }
254            _ => {}
255        }
256    }
257
258    Ok(())
259}
260
261/// Demonstrate image classification
262fn classification_demo() -> Result<()> {
263    println!("   Quantum image classification demo...");
264
265    // Create classification pipeline
266    let config = QuantumVisionConfig::default();
267    let mut pipeline = QuantumVisionPipeline::new(config)?;
268
269    // Create synthetic dataset
270    let num_classes = 10;
271    let num_samples = 20;
272    let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
273
274    println!(
275        "   Dataset: {} training, {} validation samples",
276        train_data.len(),
277        val_data.len()
278    );
279
280    // Train the model (simplified)
281    println!("\n   Training quantum classifier...");
282    let history = pipeline.train(
283        &train_data,
284        &val_data,
285        5, // epochs
286        OptimizationMethod::Adam,
287    )?;
288
289    // Display training results
290    println!("\n   Training results:");
291    for (epoch, train_loss, val_loss) in history
292        .epochs
293        .iter()
294        .zip(history.train_losses.iter())
295        .zip(history.val_losses.iter())
296        .map(|((e, t), v)| (e, t, v))
297    {
298        println!(
299            "   Epoch {}: train_loss={:.4}, val_loss={:.4}",
300            epoch + 1,
301            train_loss,
302            val_loss
303        );
304    }
305
306    // Test on new images
307    println!("\n   Testing on new images...");
308    let test_images = create_test_image(5, 3, 224, 224)?;
309    let predictions = pipeline.forward(&test_images)?;
310
311    match predictions {
312        TaskOutput::Classification { probabilities, .. } => {
313            for (i, prob_row) in probabilities.outer_iter().enumerate() {
314                let (predicted_class, confidence) = prob_row
315                    .iter()
316                    .enumerate()
317                    .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
318                    .map(|(idx, &prob)| (idx, prob))
319                    .unwrap_or((0, 0.0));
320
321                println!(
322                    "   Image {}: Class {} (confidence: {:.2}%)",
323                    i + 1,
324                    predicted_class,
325                    confidence * 100.0
326                );
327            }
328        }
329        _ => {}
330    }
331
332    // Analyze quantum advantage
333    let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
334    println!("\n   Quantum advantage analysis:");
335    println!(
336        "   - Parameter efficiency: {:.2}x classical",
337        quantum_advantage.param_efficiency
338    );
339    println!(
340        "   - Feature expressiveness: {:.2}x",
341        quantum_advantage.expressiveness
342    );
343    println!(
344        "   - Training speedup: {:.2}x",
345        quantum_advantage.training_speedup
346    );
347
348    Ok(())
349}
350
351/// Demonstrate object detection
352fn object_detection_demo() -> Result<()> {
353    println!("   Quantum object detection demo...");
354
355    // Create detection pipeline
356    let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
357    let mut pipeline = QuantumVisionPipeline::new(config)?;
358
359    // Test image
360    let test_images = create_test_image(2, 3, 416, 416)?;
361
362    println!(
363        "   Processing {} images for object detection...",
364        test_images.dim().0
365    );
366
367    // Run detection
368    let detections = pipeline.forward(&test_images)?;
369
370    match detections {
371        TaskOutput::Detection {
372            boxes,
373            scores,
374            classes,
375        } => {
376            println!("   Detection results:");
377
378            for batch_idx in 0..boxes.dim().0 {
379                println!("\n   Image {}:", batch_idx + 1);
380
381                // Filter detections by score threshold
382                let threshold = 0.5;
383                let mut num_detections = 0;
384
385                for det_idx in 0..boxes.dim().1 {
386                    let score = scores[[batch_idx, det_idx]];
387
388                    if score > threshold {
389                        let class_id = classes[[batch_idx, det_idx]];
390                        let bbox = boxes.slice(ndarray::s![batch_idx, det_idx, ..]);
391
392                        println!("   - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
393                            num_detections + 1, class_id, score,
394                            bbox[0], bbox[1], bbox[2], bbox[3]);
395
396                        num_detections += 1;
397                    }
398                }
399
400                if num_detections == 0 {
401                    println!("   - No objects detected above threshold");
402                } else {
403                    println!("   Total objects detected: {}", num_detections);
404                }
405            }
406        }
407        _ => {}
408    }
409
410    // Analyze detection performance
411    println!("\n   Detection performance analysis:");
412    println!("   - Quantum anchor generation improves localization");
413    println!("   - Entangled features enhance multi-scale detection");
414    println!("   - Quantum NMS reduces redundant detections");
415
416    Ok(())
417}
418
419/// Demonstrate semantic segmentation
420fn segmentation_demo() -> Result<()> {
421    println!("   Quantum semantic segmentation demo...");
422
423    // Create segmentation pipeline
424    let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
425    let mut pipeline = QuantumVisionPipeline::new(config)?;
426
427    // Test images
428    let test_images = create_test_image(1, 3, 512, 512)?;
429
430    println!("   Processing image for semantic segmentation...");
431
432    // Run segmentation
433    let segmentation = pipeline.forward(&test_images)?;
434
435    match segmentation {
436        TaskOutput::Segmentation {
437            masks,
438            class_scores,
439        } => {
440            println!("   Segmentation results:");
441            println!("   - Mask shape: {:?}", masks.dim());
442            println!("   - Class scores shape: {:?}", class_scores.dim());
443
444            // Analyze segmentation quality
445            let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
446            println!("\n   Segmentation metrics:");
447            println!("   - Mean IoU: {:.3}", seg_metrics.mean_iou);
448            println!(
449                "   - Pixel accuracy: {:.1}%",
450                seg_metrics.pixel_accuracy * 100.0
451            );
452            println!(
453                "   - Boundary precision: {:.3}",
454                seg_metrics.boundary_precision
455            );
456
457            // Class distribution
458            println!("\n   Predicted class distribution:");
459            let class_counts = compute_class_distribution(&masks)?;
460            for (class_id, count) in class_counts.iter().take(5) {
461                let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
462                println!("   - Class {}: {:.1}% of pixels", class_id, percentage);
463            }
464        }
465        _ => {}
466    }
467
468    // Quantum advantages for segmentation
469    println!("\n   Quantum segmentation advantages:");
470    println!("   - Quantum attention captures long-range dependencies");
471    println!("   - Hierarchical encoding preserves multi-scale features");
472    println!("   - Entanglement enables pixel-to-pixel correlations");
473
474    Ok(())
475}
476
477/// Demonstrate feature extraction
478fn feature_extraction_demo() -> Result<()> {
479    println!("   Quantum feature extraction demo...");
480
481    // Create feature extraction pipeline
482    let config = QuantumVisionConfig {
483        num_qubits: 14,
484        encoding_method: ImageEncodingMethod::QPIE,
485        backbone: VisionBackbone::QuantumResNet {
486            blocks: vec![
487                ResidualBlock {
488                    channels: 64,
489                    kernel_size: 3,
490                    stride: 1,
491                    quantum_conv: true,
492                },
493                ResidualBlock {
494                    channels: 128,
495                    kernel_size: 3,
496                    stride: 2,
497                    quantum_conv: true,
498                },
499            ],
500            skip_connections: true,
501        },
502        task_config: VisionTaskConfig::FeatureExtraction {
503            feature_dim: 512,
504            normalize: true,
505        },
506        preprocessing: PreprocessingConfig::default(),
507        quantum_enhancement: QuantumEnhancement::High,
508    };
509
510    let mut pipeline = QuantumVisionPipeline::new(config)?;
511
512    // Extract features from multiple images
513    let num_images = 10;
514    let test_images = create_test_image(num_images, 3, 224, 224)?;
515
516    println!("   Extracting features from {} images...", num_images);
517
518    let features_output = pipeline.forward(&test_images)?;
519
520    match features_output {
521        TaskOutput::Features {
522            features,
523            attention_maps,
524        } => {
525            println!("   Feature extraction results:");
526            println!("   - Feature dimension: {}", features.dim().1);
527            println!("   - Features normalized: Yes");
528
529            // Compute feature statistics
530            let feature_stats = compute_feature_statistics(&features)?;
531            println!("\n   Feature statistics:");
532            println!("   - Mean magnitude: {:.4}", feature_stats.mean_magnitude);
533            println!("   - Variance: {:.4}", feature_stats.variance);
534            println!("   - Sparsity: {:.1}%", feature_stats.sparsity * 100.0);
535
536            // Compute pairwise similarities
537            println!("\n   Feature similarity matrix (first 5 images):");
538            let similarities = compute_cosine_similarities(&features)?;
539
540            print!("       ");
541            for i in 0..5.min(num_images) {
542                print!("Img{}  ", i + 1);
543            }
544            println!();
545
546            for i in 0..5.min(num_images) {
547                print!("   Img{} ", i + 1);
548                for j in 0..5.min(num_images) {
549                    print!("{:.3} ", similarities[[i, j]]);
550                }
551                println!();
552            }
553
554            // Quantum feature properties
555            println!("\n   Quantum feature properties:");
556            println!("   - Entanglement enhances discriminative power");
557            println!("   - Quantum superposition encodes multiple views");
558            println!("   - Phase information captures subtle variations");
559        }
560        _ => {}
561    }
562
563    Ok(())
564}
565
566/// Demonstrate multi-task learning
567fn multitask_demo() -> Result<()> {
568    println!("   Multi-task quantum vision demo...");
569
570    // Create a pipeline that can handle multiple tasks
571    let tasks = vec![
572        (
573            "Classification",
574            VisionTaskConfig::Classification {
575                num_classes: 10,
576                multi_label: false,
577            },
578        ),
579        (
580            "Detection",
581            VisionTaskConfig::ObjectDetection {
582                num_classes: 20,
583                anchor_sizes: vec![(32, 32), (64, 64)],
584                iou_threshold: 0.5,
585            },
586        ),
587        (
588            "Segmentation",
589            VisionTaskConfig::Segmentation {
590                num_classes: 10,
591                output_stride: 8,
592            },
593        ),
594    ];
595
596    println!(
597        "   Testing {} vision tasks with shared backbone...",
598        tasks.len()
599    );
600
601    // Use same backbone for all tasks
602    let base_config = QuantumVisionConfig {
603        num_qubits: 16,
604        encoding_method: ImageEncodingMethod::HierarchicalEncoding { levels: 3 },
605        backbone: VisionBackbone::HybridBackbone {
606            cnn_layers: 4,
607            transformer_layers: 2,
608        },
609        task_config: tasks[0].1.clone(), // Will be replaced for each task
610        preprocessing: PreprocessingConfig::default(),
611        quantum_enhancement: QuantumEnhancement::High,
612    };
613
614    // Test each task
615    let test_images = create_test_image(2, 3, 416, 416)?;
616
617    for (task_name, task_config) in tasks {
618        println!("\n   --- {} Task ---", task_name);
619
620        let mut config = base_config.clone();
621        config.task_config = task_config;
622
623        let mut pipeline = QuantumVisionPipeline::new(config)?;
624        let output = pipeline.forward(&test_images)?;
625
626        match output {
627            TaskOutput::Classification { logits, .. } => {
628                println!("   Classification output shape: {:?}", logits.dim());
629            }
630            TaskOutput::Detection { boxes, scores, .. } => {
631                println!(
632                    "   Detection: {} anchors, score shape: {:?}",
633                    boxes.dim().1,
634                    scores.dim()
635                );
636            }
637            TaskOutput::Segmentation { masks, .. } => {
638                println!("   Segmentation mask shape: {:?}", masks.dim());
639            }
640            _ => {}
641        }
642
643        // Task-specific quantum advantages
644        match task_name {
645            "Classification" => {
646                println!("   ✓ Quantum features improve class discrimination");
647            }
648            "Detection" => {
649                println!("   ✓ Quantum anchors adapt to object scales");
650            }
651            "Segmentation" => {
652                println!("   ✓ Quantum correlations enhance boundary detection");
653            }
654            _ => {}
655        }
656    }
657
658    println!("\n   Multi-task benefits:");
659    println!("   - Shared quantum backbone reduces parameters");
660    println!("   - Task-specific quantum heads optimize performance");
661    println!("   - Quantum entanglement enables cross-task learning");
662
663    Ok(())
664}
665
666/// Demonstrate performance analysis
667fn performance_analysis_demo() -> Result<()> {
668    println!("   Analyzing quantum vision performance...");
669
670    // Compare different quantum enhancement levels
671    let enhancement_levels = vec![
672        ("Low", QuantumEnhancement::Low),
673        ("Medium", QuantumEnhancement::Medium),
674        ("High", QuantumEnhancement::High),
675        (
676            "Custom",
677            QuantumEnhancement::Custom {
678                quantum_layers: vec![0, 2, 4, 6],
679                entanglement_strength: 0.8,
680            },
681        ),
682    ];
683
684    println!("\n   Quantum Enhancement Level Comparison:");
685    println!("   Level    | FLOPs   | Memory  | Accuracy | Q-Advantage");
686    println!("   ---------|---------|---------|----------|------------");
687
688    for (level_name, enhancement) in enhancement_levels {
689        let config = QuantumVisionConfig {
690            num_qubits: 12,
691            encoding_method: ImageEncodingMethod::AmplitudeEncoding,
692            backbone: VisionBackbone::QuantumCNN {
693                conv_layers: vec![ConvolutionalConfig {
694                    num_filters: 32,
695                    kernel_size: 3,
696                    stride: 1,
697                    padding: 1,
698                    quantum_kernel: true,
699                    circuit_depth: 4,
700                }],
701                pooling_type: PoolingType::Quantum,
702            },
703            task_config: VisionTaskConfig::Classification {
704                num_classes: 10,
705                multi_label: false,
706            },
707            preprocessing: PreprocessingConfig::default(),
708            quantum_enhancement: enhancement,
709        };
710
711        let pipeline = QuantumVisionPipeline::new(config)?;
712        let metrics = pipeline.metrics();
713
714        // Simulate performance metrics
715        let (flops, memory, accuracy, q_advantage) = match level_name {
716            "Low" => (1.2, 50.0, 0.85, 1.2),
717            "Medium" => (2.5, 80.0, 0.88, 1.5),
718            "High" => (4.1, 120.0, 0.91, 2.1),
719            "Custom" => (3.2, 95.0, 0.90, 1.8),
720            _ => (0.0, 0.0, 0.0, 0.0),
721        };
722
723        println!(
724            "   {:<8} | {:.1}G | {:.0}MB | {:.1}%  | {:.1}x",
725            level_name,
726            flops,
727            memory,
728            accuracy * 100.0,
729            q_advantage
730        );
731    }
732
733    // Scalability analysis
734    println!("\n   Scalability Analysis:");
735    let image_sizes = vec![64, 128, 224, 416, 512];
736
737    println!("   Image Size | Inference Time | Throughput");
738    println!("   -----------|----------------|------------");
739
740    for size in image_sizes {
741        let inference_time = 5.0 + (size as f64 / 100.0).powi(2);
742        let throughput = 1000.0 / inference_time;
743
744        println!(
745            "   {}x{}   | {:.1}ms        | {:.0} img/s",
746            size, size, inference_time, throughput
747        );
748    }
749
750    // Quantum advantages summary
751    println!("\n   Quantum Computer Vision Advantages:");
752    println!("   1. Exponential feature space with limited qubits");
753    println!("   2. Natural multi-scale representation via entanglement");
754    println!("   3. Quantum attention for global context modeling");
755    println!("   4. Phase encoding for rotation-invariant features");
756    println!("   5. Quantum pooling preserves superposition information");
757
758    // Hardware requirements
759    println!("\n   Hardware Requirements:");
760    println!("   - Minimum qubits: 10 (basic tasks)");
761    println!("   - Recommended: 16-20 qubits (complex tasks)");
762    println!("   - Coherence time: >100μs for deep networks");
763    println!("   - Gate fidelity: >99.9% for accurate predictions");
764
765    Ok(())
766}
Source

pub fn detection_default() -> Self

Detection preprocessing

Source

pub fn segmentation_default() -> Self

Segmentation preprocessing

Trait Implementations§

Source§

impl Clone for PreprocessingConfig

Source§

fn clone(&self) -> PreprocessingConfig

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for PreprocessingConfig

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> Ungil for T
where T: Send,