pub struct QuantumVisionConfig {
pub num_qubits: usize,
pub encoding_method: ImageEncodingMethod,
pub backbone: VisionBackbone,
pub task_config: VisionTaskConfig,
pub preprocessing: PreprocessingConfig,
pub quantum_enhancement: QuantumEnhancement,
}
Expand description
Quantum computer vision pipeline configuration
Fields§
§num_qubits: usize
Number of qubits for encoding
encoding_method: ImageEncodingMethod
Image encoding method
backbone: VisionBackbone
Vision backbone type
task_config: VisionTaskConfig
Task-specific configuration
preprocessing: PreprocessingConfig
Preprocessing configuration
quantum_enhancement: QuantumEnhancement
Quantum enhancement level
Implementations§
Source§impl QuantumVisionConfig
impl QuantumVisionConfig
Sourcepub fn default() -> Self
pub fn default() -> Self
Create default configuration
Examples found in repository?
examples/computer_vision.rs (line 267)
263fn classification_demo() -> Result<()> {
264 println!(" Quantum image classification demo...");
265
266 // Create classification pipeline
267 let config = QuantumVisionConfig::default();
268 let mut pipeline = QuantumVisionPipeline::new(config)?;
269
270 // Create synthetic dataset
271 let num_classes = 10;
272 let num_samples = 20;
273 let (train_data, val_data) = create_classification_dataset(num_samples, num_classes)?;
274
275 println!(
276 " Dataset: {} training, {} validation samples",
277 train_data.len(),
278 val_data.len()
279 );
280
281 // Train the model (simplified)
282 println!("\n Training quantum classifier...");
283 let history = pipeline.train(
284 &train_data,
285 &val_data,
286 5, // epochs
287 OptimizationMethod::Adam,
288 )?;
289
290 // Display training results
291 println!("\n Training results:");
292 for (epoch, train_loss, val_loss) in history
293 .epochs
294 .iter()
295 .zip(history.train_losses.iter())
296 .zip(history.val_losses.iter())
297 .map(|((e, t), v)| (e, t, v))
298 {
299 println!(
300 " Epoch {}: train_loss={:.4}, val_loss={:.4}",
301 epoch + 1,
302 train_loss,
303 val_loss
304 );
305 }
306
307 // Test on new images
308 println!("\n Testing on new images...");
309 let test_images = create_test_image(5, 3, 224, 224)?;
310 let predictions = pipeline.forward(&test_images)?;
311
312 match predictions {
313 TaskOutput::Classification { probabilities, .. } => {
314 for (i, prob_row) in probabilities.outer_iter().enumerate() {
315 let (predicted_class, confidence) = prob_row
316 .iter()
317 .enumerate()
318 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
319 .map(|(idx, &prob)| (idx, prob))
320 .unwrap_or((0, 0.0));
321
322 println!(
323 " Image {}: Class {} (confidence: {:.2}%)",
324 i + 1,
325 predicted_class,
326 confidence * 100.0
327 );
328 }
329 }
330 _ => {}
331 }
332
333 // Analyze quantum advantage
334 let quantum_advantage = analyze_classification_quantum_advantage(&pipeline)?;
335 println!("\n Quantum advantage analysis:");
336 println!(
337 " - Parameter efficiency: {:.2}x classical",
338 quantum_advantage.param_efficiency
339 );
340 println!(
341 " - Feature expressiveness: {:.2}x",
342 quantum_advantage.expressiveness
343 );
344 println!(
345 " - Training speedup: {:.2}x",
346 quantum_advantage.training_speedup
347 );
348
349 Ok(())
350}
Sourcepub fn object_detection(num_classes: usize) -> Self
pub fn object_detection(num_classes: usize) -> Self
Create configuration for object detection
Examples found in repository?
examples/computer_vision.rs (line 357)
353fn object_detection_demo() -> Result<()> {
354 println!(" Quantum object detection demo...");
355
356 // Create detection pipeline
357 let config = QuantumVisionConfig::object_detection(80); // 80 classes (COCO-like)
358 let mut pipeline = QuantumVisionPipeline::new(config)?;
359
360 // Test image
361 let test_images = create_test_image(2, 3, 416, 416)?;
362
363 println!(
364 " Processing {} images for object detection...",
365 test_images.dim().0
366 );
367
368 // Run detection
369 let detections = pipeline.forward(&test_images)?;
370
371 match detections {
372 TaskOutput::Detection {
373 boxes,
374 scores,
375 classes,
376 } => {
377 println!(" Detection results:");
378
379 for batch_idx in 0..boxes.dim().0 {
380 println!("\n Image {}:", batch_idx + 1);
381
382 // Filter detections by score threshold
383 let threshold = 0.5;
384 let mut num_detections = 0;
385
386 for det_idx in 0..boxes.dim().1 {
387 let score = scores[[batch_idx, det_idx]];
388
389 if score > threshold {
390 let class_id = classes[[batch_idx, det_idx]];
391 let bbox = boxes.slice(scirs2_core::ndarray::s![batch_idx, det_idx, ..]);
392
393 println!(" - Object {}: Class {}, Score {:.3}, Box [{:.1}, {:.1}, {:.1}, {:.1}]",
394 num_detections + 1, class_id, score,
395 bbox[0], bbox[1], bbox[2], bbox[3]);
396
397 num_detections += 1;
398 }
399 }
400
401 if num_detections == 0 {
402 println!(" - No objects detected above threshold");
403 } else {
404 println!(" Total objects detected: {}", num_detections);
405 }
406 }
407 }
408 _ => {}
409 }
410
411 // Analyze detection performance
412 println!("\n Detection performance analysis:");
413 println!(" - Quantum anchor generation improves localization");
414 println!(" - Entangled features enhance multi-scale detection");
415 println!(" - Quantum NMS reduces redundant detections");
416
417 Ok(())
418}
Sourcepub fn segmentation(num_classes: usize) -> Self
pub fn segmentation(num_classes: usize) -> Self
Create configuration for segmentation
Examples found in repository?
examples/computer_vision.rs (line 425)
421fn segmentation_demo() -> Result<()> {
422 println!(" Quantum semantic segmentation demo...");
423
424 // Create segmentation pipeline
425 let config = QuantumVisionConfig::segmentation(21); // 21 classes (Pascal VOC-like)
426 let mut pipeline = QuantumVisionPipeline::new(config)?;
427
428 // Test images
429 let test_images = create_test_image(1, 3, 512, 512)?;
430
431 println!(" Processing image for semantic segmentation...");
432
433 // Run segmentation
434 let segmentation = pipeline.forward(&test_images)?;
435
436 match segmentation {
437 TaskOutput::Segmentation {
438 masks,
439 class_scores,
440 } => {
441 println!(" Segmentation results:");
442 println!(" - Mask shape: {:?}", masks.dim());
443 println!(" - Class scores shape: {:?}", class_scores.dim());
444
445 // Analyze segmentation quality
446 let seg_metrics = analyze_segmentation_quality(&masks, &class_scores)?;
447 println!("\n Segmentation metrics:");
448 println!(" - Mean IoU: {:.3}", seg_metrics.mean_iou);
449 println!(
450 " - Pixel accuracy: {:.1}%",
451 seg_metrics.pixel_accuracy * 100.0
452 );
453 println!(
454 " - Boundary precision: {:.3}",
455 seg_metrics.boundary_precision
456 );
457
458 // Class distribution
459 println!("\n Predicted class distribution:");
460 let class_counts = compute_class_distribution(&masks)?;
461 for (class_id, count) in class_counts.iter().take(5) {
462 let percentage = *count as f64 / (512.0 * 512.0) * 100.0;
463 println!(" - Class {}: {:.1}% of pixels", class_id, percentage);
464 }
465 }
466 _ => {}
467 }
468
469 // Quantum advantages for segmentation
470 println!("\n Quantum segmentation advantages:");
471 println!(" - Quantum attention captures long-range dependencies");
472 println!(" - Hierarchical encoding preserves multi-scale features");
473 println!(" - Entanglement enables pixel-to-pixel correlations");
474
475 Ok(())
476}
Trait Implementations§
Source§impl Clone for QuantumVisionConfig
impl Clone for QuantumVisionConfig
Source§fn clone(&self) -> QuantumVisionConfig
fn clone(&self) -> QuantumVisionConfig
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source
. Read moreAuto Trait Implementations§
impl Freeze for QuantumVisionConfig
impl RefUnwindSafe for QuantumVisionConfig
impl Send for QuantumVisionConfig
impl Sync for QuantumVisionConfig
impl Unpin for QuantumVisionConfig
impl UnwindSafe for QuantumVisionConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self
from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self
is actually part of its subset T
(and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset
but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self
to the equivalent element of its superset.