pub struct QuantumVisionConfig {
pub num_qubits: usize,
pub encoding_method: ImageEncodingMethod,
pub backbone: VisionBackbone,
pub task_config: VisionTaskConfig,
pub preprocessing: PreprocessingConfig,
pub quantum_enhancement: QuantumEnhancement,
}Expand description
Quantum computer vision pipeline configuration
Fields§
§num_qubits: usizeNumber of qubits for encoding
encoding_method: ImageEncodingMethodImage encoding method
backbone: VisionBackboneVision backbone type
task_config: VisionTaskConfigTask-specific configuration
preprocessing: PreprocessingConfigPreprocessing configuration
quantum_enhancement: QuantumEnhancementQuantum enhancement level
Implementations§
Source§impl QuantumVisionConfig
impl QuantumVisionConfig
Sourcepub fn default() -> Self
pub fn default() -> Self
Create default configuration
Examples found in repository?
examples/computer_vision.rs (line 275)
271fn classification_demo() -> Result<()> {
272 println!(" Quantum image classification demo...");
273
274 // Create classification pipeline
275 let config = QuantumVisionConfig::default();
276 let mut pipeline = QuantumVisionPipeline::new(config)?;
277
278 // Create synthetic dataset
279 let num_classes = 10;
280 let num_samples = 20;
281 let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
282
283 println!(
284 " Dataset: {} training, {} validation samples",
285 train_data.len(),
286 val_data.len()
287 );
288
289 // Train the model (simplified)
290 println!("\n Training quantum classifier...");
291 let history = pipeline.train(
292 &train_data,
293 &val_data,
294 5, // epochs
295 OptimizationMethod::Adam,
296 )?;
297
298 // Display training results
299 println!("\n Training results:");
300 for (epoch, train_loss, val_loss) in history
301 .epochs
302 .iter()
303 .zip(history.train_losses.iter())
304 .zip(history.val_losses.iter())
305 .map(|((e, t), v)| (e, t, v))
306 {
307 println!(
308 " Epoch {}: train_loss={:.4}, val_loss={:.4}",
309 epoch + 1,
310 train_loss,
311 val_loss
312 );
313 }
314
315 // Test on new images
316 println!("\n Testing on new images...");
317 let test_images = create_test_image(5, 3, 224, 224)?;
318 let predictions = pipeline.forward(&test_images)?;
319
320 if let TaskOutput::Classification { probabilities, .. } = predictions {
321 for (i, prob_row) in probabilities.outer_iter().enumerate() {
322 let (predicted_class, confidence) = prob_row
323 .iter()
324 .enumerate()
325 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
326 .map_or((0, 0.0), |(idx, &prob)| (idx, prob));
327
328 println!(
329 " Image {}: Class {} (confidence: {:.2}%)",
330 i + 1,
331 predicted_class,
332 confidence * 100.0
333 );
334 }
335 }
336
337 // Analyze quantum advantage
338 let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
339 println!("\n Quantum advantage analysis:");
340 println!(
341 " - Parameter efficiency: {:.2}x classical",
342 quantum_advantage.param_efficiency
343 );
344 println!(
345 " - Feature expressiveness: {:.2}x",
346 quantum_advantage.expressiveness
347 );
348 println!(
349 " - Training speedup: {:.2}x",
350 quantum_advantage.training_speedup
351 );
352
353 Ok(())
354}Sourcepub fn object_detection(num_classes: usize) -> Self
pub fn object_detection(num_classes: usize) -> Self
Create configuration for object detection
Examples found in repository?
examples/computer_vision.rs (line 361)
357fn object_detection_demo() -> Result<()> {
358 println!(" Quantum object detection demo...");
359
360 // Create detection pipeline
361 let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
362 let mut pipeline = QuantumVisionPipeline::new(config)?;
363
364 // Test image
365 let test_images = create_test_image(2, 3, 416, 416)?;
366
367 println!(
368 " Processing {} images for object detection...",
369 test_images.dim().0
370 );
371
372 // Run detection
373 let detections = pipeline.forward(&test_images)?;
374
375 if let TaskOutput::Detection {
376 boxes,
377 scores,
378 classes,
379 } = detections
380 {
381 println!(" Detection results:");
382
383 for batch_idx in 0..boxes.dim().0 {
384 println!("\n Image {}:", batch_idx + 1);
385
386 // Filter detections by score threshold
387 let threshold = 0.5;
388 let mut num_detections = 0;
389
390 for det_idx in 0..boxes.dim().1 {
391 let score = scores[[batch_idx, det_idx]];
392
393 if score > threshold {
394 let class_id = classes[[batch_idx, det_idx]];
395 let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
396
397 println!(
398 " - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
399 num_detections + 1,
400 class_id,
401 score,
402 bbox[0],
403 bbox[1],
404 bbox[2],
405 bbox[3]
406 );
407
408 num_detections += 1;
409 }
410 }
411
412 if num_detections == 0 {
413 println!(" - No objects detected above threshold");
414 } else {
415 println!(" Total objects detected: {num_detections}");
416 }
417 }
418 }
419
420 // Analyze detection performance
421 println!("\n Detection performance analysis:");
422 println!(" - Quantum anchor generation improves localization");
423 println!(" - Entangled features enhance multi-scale detection");
424 println!(" - Quantum NMS reduces redundant detections");
425
426 Ok(())
427}Sourcepub fn segmentation(num_classes: usize) -> Self
pub fn segmentation(num_classes: usize) -> Self
Create configuration for segmentation
Examples found in repository?
examples/computer_vision.rs (line 434)
430fn segmentation_demo() -> Result<()> {
431 println!(" Quantum semantic segmentation demo...");
432
433 // Create segmentation pipeline
434 let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
435 let mut pipeline = QuantumVisionPipeline::new(config)?;
436
437 // Test images
438 let test_images = create_test_image(1, 3, 512, 512)?;
439
440 println!(" Processing image for semantic segmentation...");
441
442 // Run segmentation
443 let segmentation = pipeline.forward(&test_images)?;
444
445 if let TaskOutput::Segmentation {
446 masks,
447 class_scores,
448 } = segmentation
449 {
450 println!(" Segmentation results:");
451 println!(" - Mask shape: {:?}", masks.dim());
452 println!(" - Class scores shape: {:?}", class_scores.dim());
453
454 // Analyze segmentation quality
455 let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
456 println!("\n Segmentation metrics:");
457 println!(" - Mean IoU: {:.3}", seg_metrics.mean_iou);
458 println!(
459 " - Pixel accuracy: {:.1}%",
460 seg_metrics.pixel_accuracy * 100.0
461 );
462 println!(
463 " - Boundary precision: {:.3}",
464 seg_metrics.boundary_precision
465 );
466
467 // Class distribution
468 println!("\n Predicted class distribution:");
469 let class_counts = compute_class_distribution(&masks)?;
470 for (class_id, count) in class_counts.iter().take(5) {
471 let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
472 println!(" - Class {class_id}: {percentage:.1}% of pixels");
473 }
474 }
475
476 // Quantum advantages for segmentation
477 println!("\n Quantum segmentation advantages:");
478 println!(" - Quantum attention captures long-range dependencies");
479 println!(" - Hierarchical encoding preserves multi-scale features");
480 println!(" - Entanglement enables pixel-to-pixel correlations");
481
482 Ok(())
483}Trait Implementations§
Source§impl Clone for QuantumVisionConfig
impl Clone for QuantumVisionConfig
Source§fn clone(&self) -> QuantumVisionConfig
fn clone(&self) -> QuantumVisionConfig
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreAuto Trait Implementations§
impl Freeze for QuantumVisionConfig
impl RefUnwindSafe for QuantumVisionConfig
impl Send for QuantumVisionConfig
impl Sync for QuantumVisionConfig
impl Unpin for QuantumVisionConfig
impl UnwindSafe for QuantumVisionConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.