pub struct QuantumVisionConfig {
pub num_qubits: usize,
pub encoding_method: ImageEncodingMethod,
pub backbone: VisionBackbone,
pub task_config: VisionTaskConfig,
pub preprocessing: PreprocessingConfig,
pub quantum_enhancement: QuantumEnhancement,
}Expand description
Quantum computer vision pipeline configuration
Fields§
§num_qubits: usizeNumber of qubits for encoding
encoding_method: ImageEncodingMethodImage encoding method
backbone: VisionBackboneVision backbone type
task_config: VisionTaskConfigTask-specific configuration
preprocessing: PreprocessingConfigPreprocessing configuration
quantum_enhancement: QuantumEnhancementQuantum enhancement level
Implementations§
Source§impl QuantumVisionConfig
impl QuantumVisionConfig
Sourcepub fn default() -> Self
pub fn default() -> Self
Create default configuration
Examples found in repository?
examples/computer_vision.rs (line 266)
262fn classification_demo() -> Result<()> {
263 println!(" Quantum image classification demo...");
264
265 // Create classification pipeline
266 let config = QuantumVisionConfig::default();
267 let mut pipeline = QuantumVisionPipeline::new(config)?;
268
269 // Create synthetic dataset
270 let num_classes = 10;
271 let num_samples = 20;
272 let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
273
274 println!(
275 " Dataset: {} training, {} validation samples",
276 train_data.len(),
277 val_data.len()
278 );
279
280 // Train the model (simplified)
281 println!("\n Training quantum classifier...");
282 let history = pipeline.train(
283 &train_data,
284 &val_data,
285 5, // epochs
286 OptimizationMethod::Adam,
287 )?;
288
289 // Display training results
290 println!("\n Training results:");
291 for (epoch, train_loss, val_loss) in history
292 .epochs
293 .iter()
294 .zip(history.train_losses.iter())
295 .zip(history.val_losses.iter())
296 .map(|((e, t), v)| (e, t, v))
297 {
298 println!(
299 " Epoch {}: train_loss={:.4}, val_loss={:.4}",
300 epoch + 1,
301 train_loss,
302 val_loss
303 );
304 }
305
306 // Test on new images
307 println!("\n Testing on new images...");
308 let test_images = create_test_image(5, 3, 224, 224)?;
309 let predictions = pipeline.forward(&test_images)?;
310
311 if let TaskOutput::Classification { probabilities, .. } = predictions {
312 for (i, prob_row) in probabilities.outer_iter().enumerate() {
313 let (predicted_class, confidence) = prob_row
314 .iter()
315 .enumerate()
316 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
317 .map_or((0, 0.0), |(idx, &prob)| (idx, prob));
318
319 println!(
320 " Image {}: Class {} (confidence: {:.2}%)",
321 i + 1,
322 predicted_class,
323 confidence * 100.0
324 );
325 }
326 }
327
328 // Analyze quantum advantage
329 let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
330 println!("\n Quantum advantage analysis:");
331 println!(
332 " - Parameter efficiency: {:.2}x classical",
333 quantum_advantage.param_efficiency
334 );
335 println!(
336 " - Feature expressiveness: {:.2}x",
337 quantum_advantage.expressiveness
338 );
339 println!(
340 " - Training speedup: {:.2}x",
341 quantum_advantage.training_speedup
342 );
343
344 Ok(())
345}Sourcepub fn object_detection(num_classes: usize) -> Self
pub fn object_detection(num_classes: usize) -> Self
Create configuration for object detection
Examples found in repository?
examples/computer_vision.rs (line 352)
348fn object_detection_demo() -> Result<()> {
349 println!(" Quantum object detection demo...");
350
351 // Create detection pipeline
352 let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
353 let mut pipeline = QuantumVisionPipeline::new(config)?;
354
355 // Test image
356 let test_images = create_test_image(2, 3, 416, 416)?;
357
358 println!(
359 " Processing {} images for object detection...",
360 test_images.dim().0
361 );
362
363 // Run detection
364 let detections = pipeline.forward(&test_images)?;
365
366 if let TaskOutput::Detection {
367 boxes,
368 scores,
369 classes,
370 } = detections
371 {
372 println!(" Detection results:");
373
374 for batch_idx in 0..boxes.dim().0 {
375 println!("\n Image {}:", batch_idx + 1);
376
377 // Filter detections by score threshold
378 let threshold = 0.5;
379 let mut num_detections = 0;
380
381 for det_idx in 0..boxes.dim().1 {
382 let score = scores[[batch_idx, det_idx]];
383
384 if score > threshold {
385 let class_id = classes[[batch_idx, det_idx]];
386 let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
387
388 println!(
389 " - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
390 num_detections + 1,
391 class_id,
392 score,
393 bbox[0],
394 bbox[1],
395 bbox[2],
396 bbox[3]
397 );
398
399 num_detections += 1;
400 }
401 }
402
403 if num_detections == 0 {
404 println!(" - No objects detected above threshold");
405 } else {
406 println!(" Total objects detected: {num_detections}");
407 }
408 }
409 }
410
411 // Analyze detection performance
412 println!("\n Detection performance analysis:");
413 println!(" - Quantum anchor generation improves localization");
414 println!(" - Entangled features enhance multi-scale detection");
415 println!(" - Quantum NMS reduces redundant detections");
416
417 Ok(())
418}Sourcepub fn segmentation(num_classes: usize) -> Self
pub fn segmentation(num_classes: usize) -> Self
Create configuration for segmentation
Examples found in repository?
examples/computer_vision.rs (line 425)
421fn segmentation_demo() -> Result<()> {
422 println!(" Quantum semantic segmentation demo...");
423
424 // Create segmentation pipeline
425 let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
426 let mut pipeline = QuantumVisionPipeline::new(config)?;
427
428 // Test images
429 let test_images = create_test_image(1, 3, 512, 512)?;
430
431 println!(" Processing image for semantic segmentation...");
432
433 // Run segmentation
434 let segmentation = pipeline.forward(&test_images)?;
435
436 if let TaskOutput::Segmentation {
437 masks,
438 class_scores,
439 } = segmentation
440 {
441 println!(" Segmentation results:");
442 println!(" - Mask shape: {:?}", masks.dim());
443 println!(" - Class scores shape: {:?}", class_scores.dim());
444
445 // Analyze segmentation quality
446 let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
447 println!("\n Segmentation metrics:");
448 println!(" - Mean IoU: {:.3}", seg_metrics.mean_iou);
449 println!(
450 " - Pixel accuracy: {:.1}%",
451 seg_metrics.pixel_accuracy * 100.0
452 );
453 println!(
454 " - Boundary precision: {:.3}",
455 seg_metrics.boundary_precision
456 );
457
458 // Class distribution
459 println!("\n Predicted class distribution:");
460 let class_counts = compute_class_distribution(&masks)?;
461 for (class_id, count) in class_counts.iter().take(5) {
462 let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
463 println!(" - Class {class_id}: {percentage:.1}% of pixels");
464 }
465 }
466
467 // Quantum advantages for segmentation
468 println!("\n Quantum segmentation advantages:");
469 println!(" - Quantum attention captures long-range dependencies");
470 println!(" - Hierarchical encoding preserves multi-scale features");
471 println!(" - Entanglement enables pixel-to-pixel correlations");
472
473 Ok(())
474}Trait Implementations§
Source§impl Clone for QuantumVisionConfig
impl Clone for QuantumVisionConfig
Source§fn clone(&self) -> QuantumVisionConfig
fn clone(&self) -> QuantumVisionConfig
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreAuto Trait Implementations§
impl Freeze for QuantumVisionConfig
impl RefUnwindSafe for QuantumVisionConfig
impl Send for QuantumVisionConfig
impl Sync for QuantumVisionConfig
impl Unpin for QuantumVisionConfig
impl UnwindSafe for QuantumVisionConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.