pub struct QuantumVisionConfig {
pub num_qubits: usize,
pub encoding_method: ImageEncodingMethod,
pub backbone: VisionBackbone,
pub task_config: VisionTaskConfig,
pub preprocessing: PreprocessingConfig,
pub quantum_enhancement: QuantumEnhancement,
}Expand description
Quantum computer vision pipeline configuration
Fields§
§num_qubits: usizeNumber of qubits for encoding
encoding_method: ImageEncodingMethodImage encoding method
backbone: VisionBackboneVision backbone type
task_config: VisionTaskConfigTask-specific configuration
preprocessing: PreprocessingConfigPreprocessing configuration
quantum_enhancement: QuantumEnhancementQuantum enhancement level
Implementations§
Source§impl QuantumVisionConfig
impl QuantumVisionConfig
Sourcepub fn default() -> Self
pub fn default() -> Self
Create default configuration
Examples found in repository?
examples/computer_vision.rs (line 265)
261fn classification_demo() -> Result<()> {
262 println!(" Quantum image classification demo...");
263
264 // Create classification pipeline
265 let config = QuantumVisionConfig::default();
266 let mut pipeline = QuantumVisionPipeline::new(config)?;
267
268 // Create synthetic dataset
269 let num_classes = 10;
270 let num_samples = 20;
271 let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
272
273 println!(
274 " Dataset: {} training, {} validation samples",
275 train_data.len(),
276 val_data.len()
277 );
278
279 // Train the model (simplified)
280 println!("\n Training quantum classifier...");
281 let history = pipeline.train(
282 &train_data,
283 &val_data,
284 5, // epochs
285 OptimizationMethod::Adam,
286 )?;
287
288 // Display training results
289 println!("\n Training results:");
290 for (epoch, train_loss, val_loss) in history
291 .epochs
292 .iter()
293 .zip(history.train_losses.iter())
294 .zip(history.val_losses.iter())
295 .map(|((e, t), v)| (e, t, v))
296 {
297 println!(
298 " Epoch {}: train_loss={:.4}, val_loss={:.4}",
299 epoch + 1,
300 train_loss,
301 val_loss
302 );
303 }
304
305 // Test on new images
306 println!("\n Testing on new images...");
307 let test_images = create_test_image(5, 3, 224, 224)?;
308 let predictions = pipeline.forward(&test_images)?;
309
310 if let TaskOutput::Classification { probabilities, .. } = predictions {
311 for (i, prob_row) in probabilities.outer_iter().enumerate() {
312 let (predicted_class, confidence) = prob_row
313 .iter()
314 .enumerate()
315 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
316 .map_or((0, 0.0), |(idx, &prob)| (idx, prob));
317
318 println!(
319 " Image {}: Class {} (confidence: {:.2}%)",
320 i + 1,
321 predicted_class,
322 confidence * 100.0
323 );
324 }
325 }
326
327 // Analyze quantum advantage
328 let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
329 println!("\n Quantum advantage analysis:");
330 println!(
331 " - Parameter efficiency: {:.2}x classical",
332 quantum_advantage.param_efficiency
333 );
334 println!(
335 " - Feature expressiveness: {:.2}x",
336 quantum_advantage.expressiveness
337 );
338 println!(
339 " - Training speedup: {:.2}x",
340 quantum_advantage.training_speedup
341 );
342
343 Ok(())
344}Sourcepub fn object_detection(num_classes: usize) -> Self
pub fn object_detection(num_classes: usize) -> Self
Create configuration for object detection
Examples found in repository?
examples/computer_vision.rs (line 351)
347fn object_detection_demo() -> Result<()> {
348 println!(" Quantum object detection demo...");
349
350 // Create detection pipeline
351 let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
352 let mut pipeline = QuantumVisionPipeline::new(config)?;
353
354 // Test image
355 let test_images = create_test_image(2, 3, 416, 416)?;
356
357 println!(
358 " Processing {} images for object detection...",
359 test_images.dim().0
360 );
361
362 // Run detection
363 let detections = pipeline.forward(&test_images)?;
364
365 if let TaskOutput::Detection {
366 boxes,
367 scores,
368 classes,
369 } = detections
370 {
371 println!(" Detection results:");
372
373 for batch_idx in 0..boxes.dim().0 {
374 println!("\n Image {}:", batch_idx + 1);
375
376 // Filter detections by score threshold
377 let threshold = 0.5;
378 let mut num_detections = 0;
379
380 for det_idx in 0..boxes.dim().1 {
381 let score = scores[[batch_idx, det_idx]];
382
383 if score > threshold {
384 let class_id = classes[[batch_idx, det_idx]];
385 let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
386
387 println!(
388 " - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
389 num_detections + 1,
390 class_id,
391 score,
392 bbox[0],
393 bbox[1],
394 bbox[2],
395 bbox[3]
396 );
397
398 num_detections += 1;
399 }
400 }
401
402 if num_detections == 0 {
403 println!(" - No objects detected above threshold");
404 } else {
405 println!(" Total objects detected: {num_detections}");
406 }
407 }
408 }
409
410 // Analyze detection performance
411 println!("\n Detection performance analysis:");
412 println!(" - Quantum anchor generation improves localization");
413 println!(" - Entangled features enhance multi-scale detection");
414 println!(" - Quantum NMS reduces redundant detections");
415
416 Ok(())
417}Sourcepub fn segmentation(num_classes: usize) -> Self
pub fn segmentation(num_classes: usize) -> Self
Create configuration for segmentation
Examples found in repository?
examples/computer_vision.rs (line 424)
420fn segmentation_demo() -> Result<()> {
421 println!(" Quantum semantic segmentation demo...");
422
423 // Create segmentation pipeline
424 let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
425 let mut pipeline = QuantumVisionPipeline::new(config)?;
426
427 // Test images
428 let test_images = create_test_image(1, 3, 512, 512)?;
429
430 println!(" Processing image for semantic segmentation...");
431
432 // Run segmentation
433 let segmentation = pipeline.forward(&test_images)?;
434
435 if let TaskOutput::Segmentation {
436 masks,
437 class_scores,
438 } = segmentation
439 {
440 println!(" Segmentation results:");
441 println!(" - Mask shape: {:?}", masks.dim());
442 println!(" - Class scores shape: {:?}", class_scores.dim());
443
444 // Analyze segmentation quality
445 let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
446 println!("\n Segmentation metrics:");
447 println!(" - Mean IoU: {:.3}", seg_metrics.mean_iou);
448 println!(
449 " - Pixel accuracy: {:.1}%",
450 seg_metrics.pixel_accuracy * 100.0
451 );
452 println!(
453 " - Boundary precision: {:.3}",
454 seg_metrics.boundary_precision
455 );
456
457 // Class distribution
458 println!("\n Predicted class distribution:");
459 let class_counts = compute_class_distribution(&masks)?;
460 for (class_id, count) in class_counts.iter().take(5) {
461 let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
462 println!(" - Class {class_id}: {percentage:.1}% of pixels");
463 }
464 }
465
466 // Quantum advantages for segmentation
467 println!("\n Quantum segmentation advantages:");
468 println!(" - Quantum attention captures long-range dependencies");
469 println!(" - Hierarchical encoding preserves multi-scale features");
470 println!(" - Entanglement enables pixel-to-pixel correlations");
471
472 Ok(())
473}Trait Implementations§
Source§impl Clone for QuantumVisionConfig
impl Clone for QuantumVisionConfig
Source§fn clone(&self) -> QuantumVisionConfig
fn clone(&self) -> QuantumVisionConfig
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreAuto Trait Implementations§
impl Freeze for QuantumVisionConfig
impl RefUnwindSafe for QuantumVisionConfig
impl Send for QuantumVisionConfig
impl Sync for QuantumVisionConfig
impl Unpin for QuantumVisionConfig
impl UnwindSafe for QuantumVisionConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.