bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// WebGPU and cross-platform backend support
65pub mod backend;
66
67// Model caching system for efficient multi-viewpoint rendering
68pub mod cache;
69
70// Test fixtures for pre-rendered images (CI/CD support)
71pub mod fixtures;
72
73// Re-export ycbust types for convenience
74pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
75
76/// YCB dataset utilities
77pub mod ycb {
78    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
79
80    use std::path::Path;
81
82    /// Download YCB models to the specified directory.
83    ///
84    /// # Arguments
85    /// * `output_dir` - Directory to download models to
86    /// * `subset` - Which subset of objects to download
87    ///
88    /// # Example
89    /// ```ignore
90    /// use bevy_sensor::ycb::{download_models, Subset};
91    ///
92    /// download_models("/tmp/ycb", Subset::Representative).await?;
93    /// ```
94    pub async fn download_models<P: AsRef<Path>>(
95        output_dir: P,
96        subset: Subset,
97    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
98        let options = DownloadOptions {
99            overwrite: false,
100            full: false,
101            show_progress: true,
102            delete_archives: true,
103        };
104        download_ycb(subset, output_dir.as_ref(), options).await?;
105        Ok(())
106    }
107
108    /// Download YCB models with custom options.
109    pub async fn download_models_with_options<P: AsRef<Path>>(
110        output_dir: P,
111        subset: Subset,
112        options: DownloadOptions,
113    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
114        download_ycb(subset, output_dir.as_ref(), options).await?;
115        Ok(())
116    }
117
118    /// Check if YCB models exist at the given path
119    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
120        let path = output_dir.as_ref();
121        // Check for at least one representative object
122        path.join("003_cracker_box/google_16k/textured.obj")
123            .exists()
124    }
125
126    /// Get the path to a specific YCB object's OBJ file
127    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
128        output_dir
129            .as_ref()
130            .join(object_id)
131            .join("google_16k")
132            .join("textured.obj")
133    }
134
135    /// Get the path to a specific YCB object's texture file
136    pub fn object_texture_path<P: AsRef<Path>>(
137        output_dir: P,
138        object_id: &str,
139    ) -> std::path::PathBuf {
140        output_dir
141            .as_ref()
142            .join(object_id)
143            .join("google_16k")
144            .join("texture_map.png")
145    }
146}
147
148/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
149/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
150#[derive(Clone, Debug, PartialEq)]
151pub struct ObjectRotation {
152    /// Rotation around X-axis (pitch) in degrees
153    pub pitch: f64,
154    /// Rotation around Y-axis (yaw) in degrees
155    pub yaw: f64,
156    /// Rotation around Z-axis (roll) in degrees
157    pub roll: f64,
158}
159
160impl ObjectRotation {
161    /// Create a new rotation from Euler angles in degrees
162    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
163        Self { pitch, yaw, roll }
164    }
165
166    /// Create from TBP-style array [pitch, yaw, roll] in degrees
167    pub fn from_array(arr: [f64; 3]) -> Self {
168        Self {
169            pitch: arr[0],
170            yaw: arr[1],
171            roll: arr[2],
172        }
173    }
174
175    /// Identity rotation (no rotation)
176    pub fn identity() -> Self {
177        Self::new(0.0, 0.0, 0.0)
178    }
179
180    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
181    /// Used in shorter YCB experiments to reduce computational load.
182    pub fn tbp_benchmark_rotations() -> Vec<Self> {
183        vec![
184            Self::from_array([0.0, 0.0, 0.0]),
185            Self::from_array([0.0, 90.0, 0.0]),
186            Self::from_array([0.0, 180.0, 0.0]),
187        ]
188    }
189
190    /// TBP 14 known orientations (cube faces and corners)
191    /// These are the orientations objects are learned in during training.
192    pub fn tbp_known_orientations() -> Vec<Self> {
193        vec![
194            // 6 cube faces (90° rotations around each axis)
195            Self::from_array([0.0, 0.0, 0.0]),   // Front
196            Self::from_array([0.0, 90.0, 0.0]),  // Right
197            Self::from_array([0.0, 180.0, 0.0]), // Back
198            Self::from_array([0.0, 270.0, 0.0]), // Left
199            Self::from_array([90.0, 0.0, 0.0]),  // Top
200            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
201            // 8 cube corners (45° rotations)
202            Self::from_array([45.0, 45.0, 0.0]),
203            Self::from_array([45.0, 135.0, 0.0]),
204            Self::from_array([45.0, 225.0, 0.0]),
205            Self::from_array([45.0, 315.0, 0.0]),
206            Self::from_array([-45.0, 45.0, 0.0]),
207            Self::from_array([-45.0, 135.0, 0.0]),
208            Self::from_array([-45.0, 225.0, 0.0]),
209            Self::from_array([-45.0, 315.0, 0.0]),
210        ]
211    }
212
213    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
214    pub fn to_quat(&self) -> Quat {
215        Quat::from_euler(
216            EulerRot::XYZ,
217            (self.pitch as f32).to_radians(),
218            (self.yaw as f32).to_radians(),
219            (self.roll as f32).to_radians(),
220        )
221    }
222
223    /// Convert to Bevy Transform (rotation only, no translation)
224    pub fn to_transform(&self) -> Transform {
225        Transform::from_rotation(self.to_quat())
226    }
227}
228
229impl Default for ObjectRotation {
230    fn default() -> Self {
231        Self::identity()
232    }
233}
234
235/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
236/// Uses spherical coordinates to capture objects from multiple elevations.
237#[derive(Clone, Debug)]
238pub struct ViewpointConfig {
239    /// Distance from camera to object center (meters)
240    pub radius: f32,
241    /// Number of horizontal positions (yaw angles) around the object
242    pub yaw_count: usize,
243    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
244    pub pitch_angles_deg: Vec<f32>,
245}
246
247impl Default for ViewpointConfig {
248    fn default() -> Self {
249        Self {
250            radius: 0.5,
251            yaw_count: 8,
252            // Three elevations: below (-30°), level (0°), above (+30°)
253            // This matches TBP's look_up/look_down capability
254            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
255        }
256    }
257}
258
259impl ViewpointConfig {
260    /// Total number of viewpoints this config will generate
261    pub fn viewpoint_count(&self) -> usize {
262        self.yaw_count * self.pitch_angles_deg.len()
263    }
264}
265
266/// Full sensor configuration for capture sessions
267#[derive(Clone, Debug, Resource)]
268pub struct SensorConfig {
269    /// Viewpoint configuration (camera positions)
270    pub viewpoints: ViewpointConfig,
271    /// Object rotations to capture (each rotation generates a full viewpoint set)
272    pub object_rotations: Vec<ObjectRotation>,
273    /// Output directory for captures
274    pub output_dir: String,
275    /// Filename pattern (use {view} for view index, {rot} for rotation index)
276    pub filename_pattern: String,
277}
278
279impl Default for SensorConfig {
280    fn default() -> Self {
281        Self {
282            viewpoints: ViewpointConfig::default(),
283            object_rotations: vec![ObjectRotation::identity()],
284            output_dir: ".".to_string(),
285            filename_pattern: "capture_{rot}_{view}.png".to_string(),
286        }
287    }
288}
289
290impl SensorConfig {
291    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
292    pub fn tbp_benchmark() -> Self {
293        Self {
294            viewpoints: ViewpointConfig::default(),
295            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
296            output_dir: ".".to_string(),
297            filename_pattern: "capture_{rot}_{view}.png".to_string(),
298        }
299    }
300
301    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
302    pub fn tbp_full_training() -> Self {
303        Self {
304            viewpoints: ViewpointConfig::default(),
305            object_rotations: ObjectRotation::tbp_known_orientations(),
306            output_dir: ".".to_string(),
307            filename_pattern: "capture_{rot}_{view}.png".to_string(),
308        }
309    }
310
311    /// Total number of captures this config will generate
312    pub fn total_captures(&self) -> usize {
313        self.viewpoints.viewpoint_count() * self.object_rotations.len()
314    }
315}
316
317/// Generate camera viewpoints using spherical coordinates.
318///
319/// Spherical coordinate system (matching TBP habitat sensor conventions):
320/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
321/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
322/// - Radius: distance from origin (object center)
323pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
324    let mut views = Vec::with_capacity(config.viewpoint_count());
325
326    for pitch_deg in &config.pitch_angles_deg {
327        let pitch = pitch_deg.to_radians();
328
329        for i in 0..config.yaw_count {
330            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
331
332            // Spherical to Cartesian conversion (Y-up coordinate system)
333            // x = r * cos(pitch) * sin(yaw)
334            // y = r * sin(pitch)
335            // z = r * cos(pitch) * cos(yaw)
336            let x = config.radius * pitch.cos() * yaw.sin();
337            let y = config.radius * pitch.sin();
338            let z = config.radius * pitch.cos() * yaw.cos();
339
340            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
341            views.push(transform);
342        }
343    }
344    views
345}
346
347/// Marker component for the target object being captured
348#[derive(Component)]
349pub struct CaptureTarget;
350
351/// Marker component for the capture camera
352#[derive(Component)]
353pub struct CaptureCamera;
354
355// ============================================================================
356// Headless Rendering API (NEW)
357// ============================================================================
358
359/// Configuration for headless rendering.
360///
361/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
362#[derive(Clone, Debug)]
363pub struct RenderConfig {
364    /// Image width in pixels (default: 64)
365    pub width: u32,
366    /// Image height in pixels (default: 64)
367    pub height: u32,
368    /// Zoom factor affecting field of view (default: 1.0)
369    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
370    pub zoom: f32,
371    /// Near clipping plane in meters (default: 0.01)
372    pub near_plane: f32,
373    /// Far clipping plane in meters (default: 10.0)
374    pub far_plane: f32,
375    /// Lighting configuration
376    pub lighting: LightingConfig,
377}
378
379/// Lighting configuration for rendering.
380///
381/// Controls ambient light and point lights in the scene.
382#[derive(Clone, Debug)]
383pub struct LightingConfig {
384    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
385    pub ambient_brightness: f32,
386    /// Key light intensity in lumens (default: 1500.0)
387    pub key_light_intensity: f32,
388    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
389    pub key_light_position: [f32; 3],
390    /// Fill light intensity in lumens (default: 500.0)
391    pub fill_light_intensity: f32,
392    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
393    pub fill_light_position: [f32; 3],
394    /// Enable shadows (default: false for performance)
395    pub shadows_enabled: bool,
396}
397
398impl Default for LightingConfig {
399    fn default() -> Self {
400        Self {
401            ambient_brightness: 0.3,
402            key_light_intensity: 1500.0,
403            key_light_position: [4.0, 8.0, 4.0],
404            fill_light_intensity: 500.0,
405            fill_light_position: [-4.0, 2.0, -4.0],
406            shadows_enabled: false,
407        }
408    }
409}
410
411impl LightingConfig {
412    /// Bright lighting for clear visibility
413    pub fn bright() -> Self {
414        Self {
415            ambient_brightness: 0.5,
416            key_light_intensity: 2000.0,
417            key_light_position: [4.0, 8.0, 4.0],
418            fill_light_intensity: 800.0,
419            fill_light_position: [-4.0, 2.0, -4.0],
420            shadows_enabled: false,
421        }
422    }
423
424    /// Soft lighting with minimal shadows
425    pub fn soft() -> Self {
426        Self {
427            ambient_brightness: 0.4,
428            key_light_intensity: 1000.0,
429            key_light_position: [3.0, 6.0, 3.0],
430            fill_light_intensity: 600.0,
431            fill_light_position: [-3.0, 3.0, -3.0],
432            shadows_enabled: false,
433        }
434    }
435
436    /// Unlit mode - ambient only, no point lights
437    pub fn unlit() -> Self {
438        Self {
439            ambient_brightness: 1.0,
440            key_light_intensity: 0.0,
441            key_light_position: [0.0, 0.0, 0.0],
442            fill_light_intensity: 0.0,
443            fill_light_position: [0.0, 0.0, 0.0],
444            shadows_enabled: false,
445        }
446    }
447}
448
449impl Default for RenderConfig {
450    fn default() -> Self {
451        Self::tbp_default()
452    }
453}
454
455impl RenderConfig {
456    /// TBP-compatible 64x64 RGBD sensor configuration.
457    ///
458    /// This matches the default resolution used in TBP's habitat sensor.
459    pub fn tbp_default() -> Self {
460        Self {
461            width: 64,
462            height: 64,
463            zoom: 1.0,
464            near_plane: 0.01,
465            far_plane: 10.0,
466            lighting: LightingConfig::default(),
467        }
468    }
469
470    /// Higher resolution configuration for debugging and visualization.
471    pub fn preview() -> Self {
472        Self {
473            width: 256,
474            height: 256,
475            zoom: 1.0,
476            near_plane: 0.01,
477            far_plane: 10.0,
478            lighting: LightingConfig::default(),
479        }
480    }
481
482    /// High resolution configuration for detailed captures.
483    pub fn high_res() -> Self {
484        Self {
485            width: 512,
486            height: 512,
487            zoom: 1.0,
488            near_plane: 0.01,
489            far_plane: 10.0,
490            lighting: LightingConfig::default(),
491        }
492    }
493
494    /// Calculate vertical field of view in radians based on zoom.
495    ///
496    /// Base FOV is 60 degrees, adjusted by zoom factor.
497    pub fn fov_radians(&self) -> f32 {
498        let base_fov_deg = 60.0_f32;
499        (base_fov_deg / self.zoom).to_radians()
500    }
501
502    /// Compute camera intrinsics for use with neocortx.
503    ///
504    /// Returns focal length and principal point based on resolution and FOV.
505    /// Uses f64 for TBP numerical precision compatibility.
506    pub fn intrinsics(&self) -> CameraIntrinsics {
507        let fov = self.fov_radians() as f64;
508        // focal_length = (height/2) / tan(fov/2)
509        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
510        let fx = fy; // Assuming square pixels
511
512        CameraIntrinsics {
513            focal_length: [fx, fy],
514            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
515            image_size: [self.width, self.height],
516        }
517    }
518}
519
520/// Camera intrinsic parameters for 3D reconstruction.
521///
522/// Compatible with neocortx's VisionIntrinsics format.
523/// Uses f64 for TBP numerical precision compatibility.
524#[derive(Clone, Debug, PartialEq)]
525pub struct CameraIntrinsics {
526    /// Focal length in pixels (fx, fy)
527    pub focal_length: [f64; 2],
528    /// Principal point (cx, cy) - typically image center
529    pub principal_point: [f64; 2],
530    /// Image dimensions (width, height)
531    pub image_size: [u32; 2],
532}
533
534impl CameraIntrinsics {
535    /// Project a 3D point to 2D pixel coordinates.
536    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
537        if point.z <= 0.0 {
538            return None;
539        }
540        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
541        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
542        Some([x, y])
543    }
544
545    /// Unproject a 2D pixel to a 3D point at given depth.
546    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
547        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
548        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
549        [x, y, depth]
550    }
551}
552
553/// Output from headless rendering containing RGBA and depth data.
554#[derive(Clone, Debug)]
555pub struct RenderOutput {
556    /// RGBA pixel data in row-major order (width * height * 4 bytes)
557    pub rgba: Vec<u8>,
558    /// Depth values in meters, row-major order (width * height f64s)
559    /// Values are linear depth from camera, not normalized.
560    /// Uses f64 for TBP numerical precision compatibility.
561    pub depth: Vec<f64>,
562    /// Image width in pixels
563    pub width: u32,
564    /// Image height in pixels
565    pub height: u32,
566    /// Camera intrinsics used for this render
567    pub intrinsics: CameraIntrinsics,
568    /// Camera transform (world position and orientation)
569    pub camera_transform: Transform,
570    /// Object rotation applied during render
571    pub object_rotation: ObjectRotation,
572}
573
574impl RenderOutput {
575    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
576    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
577        if x >= self.width || y >= self.height {
578            return None;
579        }
580        let idx = ((y * self.width + x) * 4) as usize;
581        Some([
582            self.rgba[idx],
583            self.rgba[idx + 1],
584            self.rgba[idx + 2],
585            self.rgba[idx + 3],
586        ])
587    }
588
589    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
590    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
591        if x >= self.width || y >= self.height {
592            return None;
593        }
594        let idx = (y * self.width + x) as usize;
595        Some(self.depth[idx])
596    }
597
598    /// Get RGB pixel (without alpha) at (x, y).
599    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
600        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
601    }
602
603    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
604    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
605        let mut image = Vec::with_capacity(self.height as usize);
606        for y in 0..self.height {
607            let mut row = Vec::with_capacity(self.width as usize);
608            for x in 0..self.width {
609                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
610            }
611            image.push(row);
612        }
613        image
614    }
615
616    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
617    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
618        let mut image = Vec::with_capacity(self.height as usize);
619        for y in 0..self.height {
620            let mut row = Vec::with_capacity(self.width as usize);
621            for x in 0..self.width {
622                row.push(self.get_depth(x, y).unwrap_or(0.0));
623            }
624            image.push(row);
625        }
626        image
627    }
628}
629
630/// Errors that can occur during rendering and file operations.
631#[derive(Debug, Clone)]
632pub enum RenderError {
633    /// Object mesh file not found
634    MeshNotFound(String),
635    /// Object texture file not found
636    TextureNotFound(String),
637    /// Generic file not found error
638    FileNotFound { path: String, reason: String },
639    /// File write failed
640    FileWriteFailed { path: String, reason: String },
641    /// Directory creation failed
642    DirectoryCreationFailed { path: String, reason: String },
643    /// Bevy rendering failed
644    RenderFailed(String),
645    /// Invalid configuration
646    InvalidConfig(String),
647    /// Invalid input parameters
648    InvalidInput(String),
649    /// JSON serialization/deserialization error
650    SerializationError(String),
651    /// Binary data parsing error
652    DataParsingError(String),
653    /// Render timeout
654    RenderTimeout { duration_secs: u64 },
655}
656
657impl std::fmt::Display for RenderError {
658    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
659        match self {
660            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
661            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
662            RenderError::FileNotFound { path, reason } => {
663                write!(f, "File not found at {}: {}", path, reason)
664            }
665            RenderError::FileWriteFailed { path, reason } => {
666                write!(f, "Failed to write file {}: {}", path, reason)
667            }
668            RenderError::DirectoryCreationFailed { path, reason } => {
669                write!(f, "Failed to create directory {}: {}", path, reason)
670            }
671            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
672            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
673            RenderError::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
674            RenderError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
675            RenderError::DataParsingError(msg) => write!(f, "Data parsing error: {}", msg),
676            RenderError::RenderTimeout { duration_secs } => {
677                write!(f, "Render timeout after {} seconds", duration_secs)
678            }
679        }
680    }
681}
682
683impl std::error::Error for RenderError {}
684
685/// Render a YCB object to an in-memory buffer.
686///
687/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
688/// renders a single frame, extracts the RGBA and depth data, and shuts down.
689///
690/// # Arguments
691/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
692/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
693/// * `object_rotation` - Rotation to apply to the object
694/// * `config` - Render configuration (resolution, depth range, etc.)
695///
696/// # Example
697/// ```ignore
698/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
699/// use std::path::Path;
700///
701/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
702/// let output = render_to_buffer(
703///     Path::new("/tmp/ycb/003_cracker_box"),
704///     &viewpoints[0],
705///     &ObjectRotation::identity(),
706///     &RenderConfig::tbp_default(),
707/// )?;
708/// ```
709pub fn render_to_buffer(
710    object_dir: &Path,
711    camera_transform: &Transform,
712    object_rotation: &ObjectRotation,
713    config: &RenderConfig,
714) -> Result<RenderOutput, RenderError> {
715    // Use the actual Bevy headless renderer
716    render::render_headless(object_dir, camera_transform, object_rotation, config)
717}
718
719/// Render all viewpoints and rotations for a YCB object.
720///
721/// Convenience function that renders all combinations of viewpoints and rotations.
722///
723/// # Arguments
724/// * `object_dir` - Path to YCB object directory
725/// * `viewpoint_config` - Viewpoint configuration (camera positions)
726/// * `rotations` - Object rotations to render
727/// * `render_config` - Render configuration
728///
729/// # Returns
730/// Vector of RenderOutput, one per viewpoint × rotation combination.
731pub fn render_all_viewpoints(
732    object_dir: &Path,
733    viewpoint_config: &ViewpointConfig,
734    rotations: &[ObjectRotation],
735    render_config: &RenderConfig,
736) -> Result<Vec<RenderOutput>, RenderError> {
737    let viewpoints = generate_viewpoints(viewpoint_config);
738    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
739
740    for rotation in rotations {
741        for viewpoint in &viewpoints {
742            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
743            outputs.push(output);
744        }
745    }
746
747    Ok(outputs)
748}
749
750/// Render with model caching support for efficient multi-viewpoint rendering.
751///
752/// This function tracks which models have been loaded and provides performance
753/// insights. For maximum efficiency when rendering many viewpoints of the same
754/// object, use the batch rendering API (`create_batch_renderer`, `render_batch`).
755///
756/// # Arguments
757/// * `object_dir` - Path to YCB object directory
758/// * `camera_transform` - Camera position and orientation
759/// * `object_rotation` - Rotation to apply to the object
760/// * `config` - Render configuration
761/// * `cache` - Model cache to track loaded assets
762///
763/// # Returns
764/// RenderOutput with rendered RGBA and depth data
765///
766/// # Example
767/// ```ignore
768/// use bevy_sensor::{render_to_buffer_cached, cache::ModelCache, RenderConfig, ObjectRotation};
769/// use std::path::PathBuf;
770///
771/// let mut cache = ModelCache::new();
772/// let object_dir = PathBuf::from("/tmp/ycb/003_cracker_box");
773/// let config = RenderConfig::tbp_default();
774/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
775///
776/// // First render: loads from disk and caches
777/// let output1 = render_to_buffer_cached(
778///     &object_dir,
779///     &viewpoints[0],
780///     &ObjectRotation::identity(),
781///     &config,
782///     &mut cache,
783/// )?;
784///
785/// // Subsequent renders: tracks in cache (actual speedup comes from batch API)
786/// for viewpoint in &viewpoints[1..] {
787///     let output = render_to_buffer_cached(
788///         &object_dir,
789///         viewpoint,
790///         &ObjectRotation::identity(),
791///         &config,
792///         &mut cache,
793///     )?;
794/// }
795/// ```
796///
797/// # Note
798/// This function uses the same rendering engine as `render_to_buffer()`. For true
799/// asset caching performance gains (2-3x speedup), combine with batch rendering:
800///
801/// ```ignore
802/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig, RenderConfig, ObjectRotation};
803///
804/// let requests: Vec<_> = viewpoints.iter().map(|vp| {
805///     BatchRenderRequest {
806///         object_dir: object_dir.clone(),
807///         viewpoint: *vp,
808///         object_rotation: ObjectRotation::identity(),
809///         render_config: RenderConfig::tbp_default(),
810///     }
811/// }).collect();
812///
813/// let outputs = render_batch(requests, &BatchRenderConfig::default())?;
814/// ```
815pub fn render_to_buffer_cached(
816    object_dir: &Path,
817    camera_transform: &Transform,
818    object_rotation: &ObjectRotation,
819    config: &RenderConfig,
820    cache: &mut cache::ModelCache,
821) -> Result<RenderOutput, RenderError> {
822    let mesh_path = object_dir.join("google_16k/textured.obj");
823    let texture_path = object_dir.join("google_16k/texture_map.png");
824
825    // Track in cache
826    cache.cache_scene(mesh_path.clone());
827    cache.cache_texture(texture_path.clone());
828
829    // Render using standard pipeline
830    render::render_headless(object_dir, camera_transform, object_rotation, config)
831}
832
833/// Render directly to files (for subprocess mode).
834///
835/// This function is designed for subprocess rendering where the process will exit
836/// after rendering. It saves RGBA and depth data directly to the specified files
837/// before the process terminates.
838///
839/// # Arguments
840/// * `object_dir` - Path to YCB object directory
841/// * `camera_transform` - Camera position and orientation
842/// * `object_rotation` - Rotation to apply to the object
843/// * `config` - Render configuration
844/// * `rgba_path` - Output path for RGBA PNG
845/// * `depth_path` - Output path for depth data (raw f32 bytes)
846///
847/// # Note
848/// This function may call `std::process::exit(0)` and not return.
849pub fn render_to_files(
850    object_dir: &Path,
851    camera_transform: &Transform,
852    object_rotation: &ObjectRotation,
853    config: &RenderConfig,
854    rgba_path: &Path,
855    depth_path: &Path,
856) -> Result<(), RenderError> {
857    render::render_to_files(
858        object_dir,
859        camera_transform,
860        object_rotation,
861        config,
862        rgba_path,
863        depth_path,
864    )
865}
866
867// Re-export batch types for convenient API access
868pub use batch::{
869    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
870    BatchState, RenderStatus,
871};
872
873/// Create a new batch renderer for efficient multi-viewpoint rendering.
874///
875/// This creates a persistent Bevy app that can render multiple viewpoints without
876/// subprocess spawning overhead. Achieves 10-100x speedup vs individual render_to_buffer calls.
877///
878/// # Arguments
879/// * `config` - Batch rendering configuration
880///
881/// # Returns
882/// A BatchRenderer instance ready to queue render requests
883///
884/// # Example
885/// ```ignore
886/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
887///
888/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
889/// ```
890pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
891    // For now, just create an empty renderer that will need a Bevy app
892    // The actual app creation happens when rendering starts
893    Ok(BatchRenderer::new(config.clone()))
894}
895
896/// Queue a render request for batch processing.
897///
898/// Adds a render request to the batch queue. Requests are processed in order
899/// when you call render_next_in_batch().
900///
901/// # Arguments
902/// * `renderer` - The batch renderer instance
903/// * `request` - The render request
904///
905/// # Returns
906/// Ok if queued successfully, Err if queue is full
907///
908/// # Example
909/// ```ignore
910/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
911/// use std::path::PathBuf;
912///
913/// queue_render_request(&mut renderer, BatchRenderRequest {
914///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
915///     viewpoint: camera_transform,
916///     object_rotation: ObjectRotation::identity(),
917///     render_config: RenderConfig::tbp_default(),
918/// })?;
919/// ```
920pub fn queue_render_request(
921    renderer: &mut BatchRenderer,
922    request: BatchRenderRequest,
923) -> Result<(), RenderError> {
924    renderer
925        .queue_request(request)
926        .map_err(|e| RenderError::RenderFailed(e.to_string()))
927}
928
929/// Process and execute the next render in the batch queue.
930///
931/// Executes a single render from the queued requests. Returns None when the queue is empty.
932/// Use this in a loop to process all queued renders.
933///
934/// # Arguments
935/// * `renderer` - The batch renderer instance
936/// * `timeout_ms` - Timeout in milliseconds for this render
937///
938/// # Returns
939/// Some(output) if a render completed, None if queue is empty
940///
941/// # Example
942/// ```ignore
943/// loop {
944///     match render_next_in_batch(&mut renderer, 500)? {
945///         Some(output) => println!("Render complete: {:?}", output.status),
946///         None => break, // All renders done
947///     }
948/// }
949/// ```
950pub fn render_next_in_batch(
951    renderer: &mut BatchRenderer,
952    _timeout_ms: u32,
953) -> Result<Option<BatchRenderOutput>, RenderError> {
954    // This is a stub - the actual implementation will require a running Bevy app
955    // For now, just render single batches immediately using render_to_buffer
956    if let Some(request) = renderer.pending_requests.pop_front() {
957        let output = render_to_buffer(
958            &request.object_dir,
959            &request.viewpoint,
960            &request.object_rotation,
961            &request.render_config,
962        )?;
963        let batch_output = BatchRenderOutput::from_render_output(request, output);
964        renderer.completed_results.push(batch_output.clone());
965        renderer.renders_processed += 1;
966        Ok(Some(batch_output))
967    } else {
968        Ok(None)
969    }
970}
971
972/// Render multiple requests in batch (convenience function).
973///
974/// Queues all requests and executes them in batch, returning all results.
975/// Simpler than manage queue + loop for one-off batches.
976///
977/// # Arguments
978/// * `requests` - Vector of render requests
979/// * `config` - Batch rendering configuration
980///
981/// # Returns
982/// Vector of BatchRenderOutput results in same order as input
983///
984/// # Example
985/// ```ignore
986/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
987///
988/// let results = render_batch(requests, &BatchRenderConfig::default())?;
989/// ```
990pub fn render_batch(
991    requests: Vec<BatchRenderRequest>,
992    config: &BatchRenderConfig,
993) -> Result<Vec<BatchRenderOutput>, RenderError> {
994    let mut renderer = create_batch_renderer(config)?;
995
996    // Queue all requests
997    for request in requests {
998        queue_render_request(&mut renderer, request)?;
999    }
1000
1001    // Execute all and collect results
1002    let mut results = Vec::new();
1003    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
1004        results.push(output);
1005    }
1006
1007    Ok(results)
1008}
1009
1010// Re-export bevy types that consumers will need
1011pub use bevy::prelude::{Quat, Transform, Vec3};
1012
1013#[cfg(test)]
1014mod tests {
1015    use super::*;
1016
1017    #[test]
1018    fn test_object_rotation_identity() {
1019        let rot = ObjectRotation::identity();
1020        assert_eq!(rot.pitch, 0.0);
1021        assert_eq!(rot.yaw, 0.0);
1022        assert_eq!(rot.roll, 0.0);
1023    }
1024
1025    #[test]
1026    fn test_object_rotation_from_array() {
1027        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
1028        assert_eq!(rot.pitch, 10.0);
1029        assert_eq!(rot.yaw, 20.0);
1030        assert_eq!(rot.roll, 30.0);
1031    }
1032
1033    #[test]
1034    fn test_tbp_benchmark_rotations() {
1035        let rotations = ObjectRotation::tbp_benchmark_rotations();
1036        assert_eq!(rotations.len(), 3);
1037        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
1038        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
1039        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
1040    }
1041
1042    #[test]
1043    fn test_tbp_known_orientations_count() {
1044        let orientations = ObjectRotation::tbp_known_orientations();
1045        assert_eq!(orientations.len(), 14);
1046    }
1047
1048    #[test]
1049    fn test_rotation_to_quat() {
1050        let rot = ObjectRotation::identity();
1051        let quat = rot.to_quat();
1052        // Identity quaternion should be approximately (1, 0, 0, 0)
1053        assert!((quat.w - 1.0).abs() < 0.001);
1054        assert!(quat.x.abs() < 0.001);
1055        assert!(quat.y.abs() < 0.001);
1056        assert!(quat.z.abs() < 0.001);
1057    }
1058
1059    #[test]
1060    fn test_rotation_90_yaw() {
1061        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
1062        let quat = rot.to_quat();
1063        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
1064        assert!((quat.w - 0.707).abs() < 0.01);
1065        assert!((quat.y - 0.707).abs() < 0.01);
1066    }
1067
1068    #[test]
1069    fn test_viewpoint_config_default() {
1070        let config = ViewpointConfig::default();
1071        assert_eq!(config.radius, 0.5);
1072        assert_eq!(config.yaw_count, 8);
1073        assert_eq!(config.pitch_angles_deg.len(), 3);
1074    }
1075
1076    #[test]
1077    fn test_viewpoint_count() {
1078        let config = ViewpointConfig::default();
1079        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
1080    }
1081
1082    #[test]
1083    fn test_generate_viewpoints_count() {
1084        let config = ViewpointConfig::default();
1085        let viewpoints = generate_viewpoints(&config);
1086        assert_eq!(viewpoints.len(), 24);
1087    }
1088
1089    #[test]
1090    fn test_viewpoints_spherical_radius() {
1091        let config = ViewpointConfig::default();
1092        let viewpoints = generate_viewpoints(&config);
1093
1094        for (i, transform) in viewpoints.iter().enumerate() {
1095            let actual_radius = transform.translation.length();
1096            assert!(
1097                (actual_radius - config.radius).abs() < 0.001,
1098                "Viewpoint {} has incorrect radius: {} (expected {})",
1099                i,
1100                actual_radius,
1101                config.radius
1102            );
1103        }
1104    }
1105
1106    #[test]
1107    fn test_viewpoints_looking_at_origin() {
1108        let config = ViewpointConfig::default();
1109        let viewpoints = generate_viewpoints(&config);
1110
1111        for (i, transform) in viewpoints.iter().enumerate() {
1112            let forward = transform.forward();
1113            let to_origin = (Vec3::ZERO - transform.translation).normalize();
1114            let dot = forward.dot(to_origin);
1115            assert!(
1116                dot > 0.99,
1117                "Viewpoint {} not looking at origin, dot product: {}",
1118                i,
1119                dot
1120            );
1121        }
1122    }
1123
1124    #[test]
1125    fn test_sensor_config_default() {
1126        let config = SensorConfig::default();
1127        assert_eq!(config.object_rotations.len(), 1);
1128        assert_eq!(config.total_captures(), 24);
1129    }
1130
1131    #[test]
1132    fn test_sensor_config_tbp_benchmark() {
1133        let config = SensorConfig::tbp_benchmark();
1134        assert_eq!(config.object_rotations.len(), 3);
1135        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1136    }
1137
1138    #[test]
1139    fn test_sensor_config_tbp_full() {
1140        let config = SensorConfig::tbp_full_training();
1141        assert_eq!(config.object_rotations.len(), 14);
1142        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1143    }
1144
1145    #[test]
1146    fn test_ycb_representative_objects() {
1147        // Verify representative objects are defined
1148        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1149        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1150    }
1151
1152    #[test]
1153    fn test_ycb_ten_objects() {
1154        // Verify ten objects subset is defined
1155        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1156    }
1157
1158    #[test]
1159    fn test_ycb_object_mesh_path() {
1160        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1161        assert_eq!(
1162            path.to_string_lossy(),
1163            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
1164        );
1165    }
1166
1167    #[test]
1168    fn test_ycb_object_texture_path() {
1169        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1170        assert_eq!(
1171            path.to_string_lossy(),
1172            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
1173        );
1174    }
1175
1176    // =========================================================================
1177    // Headless Rendering API Tests
1178    // =========================================================================
1179
1180    #[test]
1181    fn test_render_config_tbp_default() {
1182        let config = RenderConfig::tbp_default();
1183        assert_eq!(config.width, 64);
1184        assert_eq!(config.height, 64);
1185        assert_eq!(config.zoom, 1.0);
1186        assert_eq!(config.near_plane, 0.01);
1187        assert_eq!(config.far_plane, 10.0);
1188    }
1189
1190    #[test]
1191    fn test_render_config_preview() {
1192        let config = RenderConfig::preview();
1193        assert_eq!(config.width, 256);
1194        assert_eq!(config.height, 256);
1195    }
1196
1197    #[test]
1198    fn test_render_config_default_is_tbp() {
1199        let default = RenderConfig::default();
1200        let tbp = RenderConfig::tbp_default();
1201        assert_eq!(default.width, tbp.width);
1202        assert_eq!(default.height, tbp.height);
1203    }
1204
1205    #[test]
1206    fn test_render_config_fov() {
1207        let config = RenderConfig::tbp_default();
1208        let fov = config.fov_radians();
1209        // Base FOV is 60 degrees = ~1.047 radians
1210        assert!((fov - 1.047).abs() < 0.01);
1211
1212        // Zoom in should reduce FOV
1213        let zoomed = RenderConfig {
1214            zoom: 2.0,
1215            ..config
1216        };
1217        assert!(zoomed.fov_radians() < fov);
1218    }
1219
1220    #[test]
1221    fn test_render_config_intrinsics() {
1222        let config = RenderConfig::tbp_default();
1223        let intrinsics = config.intrinsics();
1224
1225        assert_eq!(intrinsics.image_size, [64, 64]);
1226        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1227        // Focal length should be positive and reasonable
1228        assert!(intrinsics.focal_length[0] > 0.0);
1229        assert!(intrinsics.focal_length[1] > 0.0);
1230        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1231        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1232    }
1233
1234    #[test]
1235    fn test_camera_intrinsics_project() {
1236        let intrinsics = CameraIntrinsics {
1237            focal_length: [100.0, 100.0],
1238            principal_point: [32.0, 32.0],
1239            image_size: [64, 64],
1240        };
1241
1242        // Point at origin of camera frame projects to principal point
1243        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1244        assert!(center.is_some());
1245        let [x, y] = center.unwrap();
1246        assert!((x - 32.0).abs() < 0.001);
1247        assert!((y - 32.0).abs() < 0.001);
1248
1249        // Point behind camera returns None
1250        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1251        assert!(behind.is_none());
1252    }
1253
1254    #[test]
1255    fn test_camera_intrinsics_unproject() {
1256        let intrinsics = CameraIntrinsics {
1257            focal_length: [100.0, 100.0],
1258            principal_point: [32.0, 32.0],
1259            image_size: [64, 64],
1260        };
1261
1262        // Unproject principal point at depth 1.0
1263        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1264        assert!((point[0]).abs() < 0.001); // x
1265        assert!((point[1]).abs() < 0.001); // y
1266        assert!((point[2] - 1.0).abs() < 0.001); // z
1267    }
1268
1269    #[test]
1270    fn test_render_output_get_rgba() {
1271        let output = RenderOutput {
1272            rgba: vec![
1273                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1274            ],
1275            depth: vec![1.0, 2.0, 3.0, 4.0],
1276            width: 2,
1277            height: 2,
1278            intrinsics: RenderConfig::tbp_default().intrinsics(),
1279            camera_transform: Transform::IDENTITY,
1280            object_rotation: ObjectRotation::identity(),
1281        };
1282
1283        // Top-left: red
1284        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1285        // Top-right: green
1286        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1287        // Bottom-left: blue
1288        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1289        // Bottom-right: white
1290        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1291        // Out of bounds
1292        assert_eq!(output.get_rgba(2, 0), None);
1293    }
1294
1295    #[test]
1296    fn test_render_output_get_depth() {
1297        let output = RenderOutput {
1298            rgba: vec![0u8; 16],
1299            depth: vec![1.0, 2.0, 3.0, 4.0],
1300            width: 2,
1301            height: 2,
1302            intrinsics: RenderConfig::tbp_default().intrinsics(),
1303            camera_transform: Transform::IDENTITY,
1304            object_rotation: ObjectRotation::identity(),
1305        };
1306
1307        assert_eq!(output.get_depth(0, 0), Some(1.0));
1308        assert_eq!(output.get_depth(1, 0), Some(2.0));
1309        assert_eq!(output.get_depth(0, 1), Some(3.0));
1310        assert_eq!(output.get_depth(1, 1), Some(4.0));
1311        assert_eq!(output.get_depth(2, 0), None);
1312    }
1313
1314    #[test]
1315    fn test_render_output_to_rgb_image() {
1316        let output = RenderOutput {
1317            rgba: vec![
1318                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1319            ],
1320            depth: vec![1.0, 2.0, 3.0, 4.0],
1321            width: 2,
1322            height: 2,
1323            intrinsics: RenderConfig::tbp_default().intrinsics(),
1324            camera_transform: Transform::IDENTITY,
1325            object_rotation: ObjectRotation::identity(),
1326        };
1327
1328        let image = output.to_rgb_image();
1329        assert_eq!(image.len(), 2); // 2 rows
1330        assert_eq!(image[0].len(), 2); // 2 columns
1331        assert_eq!(image[0][0], [255, 0, 0]); // Red
1332        assert_eq!(image[0][1], [0, 255, 0]); // Green
1333        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1334        assert_eq!(image[1][1], [255, 255, 255]); // White
1335    }
1336
1337    #[test]
1338    fn test_render_output_to_depth_image() {
1339        let output = RenderOutput {
1340            rgba: vec![0u8; 16],
1341            depth: vec![1.0, 2.0, 3.0, 4.0],
1342            width: 2,
1343            height: 2,
1344            intrinsics: RenderConfig::tbp_default().intrinsics(),
1345            camera_transform: Transform::IDENTITY,
1346            object_rotation: ObjectRotation::identity(),
1347        };
1348
1349        let depth_image = output.to_depth_image();
1350        assert_eq!(depth_image.len(), 2);
1351        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1352        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1353    }
1354
1355    #[test]
1356    fn test_render_error_display() {
1357        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1358        assert!(err.to_string().contains("Mesh not found"));
1359        assert!(err.to_string().contains("/path/to/mesh.obj"));
1360    }
1361
1362    // =========================================================================
1363    // Edge Case Tests
1364    // =========================================================================
1365
1366    #[test]
1367    fn test_object_rotation_extreme_angles() {
1368        // Test angles beyond 360 degrees
1369        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1370        let quat = rot.to_quat();
1371        // Quaternion should still be valid (normalized)
1372        assert!((quat.length() - 1.0).abs() < 0.001);
1373    }
1374
1375    #[test]
1376    fn test_object_rotation_to_transform() {
1377        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1378        let transform = rot.to_transform();
1379        // Transform should have no translation
1380        assert_eq!(transform.translation, Vec3::ZERO);
1381        // Should have rotation
1382        assert!(transform.rotation != Quat::IDENTITY);
1383    }
1384
1385    #[test]
1386    fn test_viewpoint_config_single_viewpoint() {
1387        let config = ViewpointConfig {
1388            radius: 1.0,
1389            yaw_count: 1,
1390            pitch_angles_deg: vec![0.0],
1391        };
1392        assert_eq!(config.viewpoint_count(), 1);
1393        let viewpoints = generate_viewpoints(&config);
1394        assert_eq!(viewpoints.len(), 1);
1395        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1396        let pos = viewpoints[0].translation;
1397        assert!((pos.x).abs() < 0.001);
1398        assert!((pos.y).abs() < 0.001);
1399        assert!((pos.z - 1.0).abs() < 0.001);
1400    }
1401
1402    #[test]
1403    fn test_viewpoint_radius_scaling() {
1404        let config1 = ViewpointConfig {
1405            radius: 0.5,
1406            yaw_count: 4,
1407            pitch_angles_deg: vec![0.0],
1408        };
1409        let config2 = ViewpointConfig {
1410            radius: 2.0,
1411            yaw_count: 4,
1412            pitch_angles_deg: vec![0.0],
1413        };
1414
1415        let v1 = generate_viewpoints(&config1);
1416        let v2 = generate_viewpoints(&config2);
1417
1418        // Viewpoints should scale proportionally
1419        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1420            let ratio = vp2.translation.length() / vp1.translation.length();
1421            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1422        }
1423    }
1424
1425    #[test]
1426    fn test_camera_intrinsics_project_at_z_zero() {
1427        let intrinsics = CameraIntrinsics {
1428            focal_length: [100.0, 100.0],
1429            principal_point: [32.0, 32.0],
1430            image_size: [64, 64],
1431        };
1432
1433        // Point at z=0 should return None (division by zero protection)
1434        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1435        assert!(result.is_none());
1436    }
1437
1438    #[test]
1439    fn test_camera_intrinsics_roundtrip() {
1440        let intrinsics = CameraIntrinsics {
1441            focal_length: [100.0, 100.0],
1442            principal_point: [32.0, 32.0],
1443            image_size: [64, 64],
1444        };
1445
1446        // Project a 3D point
1447        let original = Vec3::new(0.5, -0.3, 2.0);
1448        let projected = intrinsics.project(original).unwrap();
1449
1450        // Unproject back with the same depth (convert f32 to f64)
1451        let unprojected = intrinsics.unproject(projected, original.z as f64);
1452
1453        // Should get back approximately the same point
1454        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1455        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1456        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1457    }
1458
1459    #[test]
1460    fn test_render_output_empty() {
1461        let output = RenderOutput {
1462            rgba: vec![],
1463            depth: vec![],
1464            width: 0,
1465            height: 0,
1466            intrinsics: RenderConfig::tbp_default().intrinsics(),
1467            camera_transform: Transform::IDENTITY,
1468            object_rotation: ObjectRotation::identity(),
1469        };
1470
1471        // Should handle empty gracefully
1472        assert_eq!(output.get_rgba(0, 0), None);
1473        assert_eq!(output.get_depth(0, 0), None);
1474        assert!(output.to_rgb_image().is_empty());
1475        assert!(output.to_depth_image().is_empty());
1476    }
1477
1478    #[test]
1479    fn test_render_output_1x1() {
1480        let output = RenderOutput {
1481            rgba: vec![128, 64, 32, 255],
1482            depth: vec![0.5],
1483            width: 1,
1484            height: 1,
1485            intrinsics: RenderConfig::tbp_default().intrinsics(),
1486            camera_transform: Transform::IDENTITY,
1487            object_rotation: ObjectRotation::identity(),
1488        };
1489
1490        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1491        assert_eq!(output.get_depth(0, 0), Some(0.5));
1492        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1493
1494        let rgb_img = output.to_rgb_image();
1495        assert_eq!(rgb_img.len(), 1);
1496        assert_eq!(rgb_img[0].len(), 1);
1497        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1498    }
1499
1500    #[test]
1501    fn test_render_config_high_res() {
1502        let config = RenderConfig::high_res();
1503        assert_eq!(config.width, 512);
1504        assert_eq!(config.height, 512);
1505
1506        let intrinsics = config.intrinsics();
1507        assert_eq!(intrinsics.image_size, [512, 512]);
1508        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1509    }
1510
1511    #[test]
1512    fn test_render_config_zoom_affects_fov() {
1513        let base = RenderConfig::tbp_default();
1514        let zoomed = RenderConfig {
1515            zoom: 2.0,
1516            ..base.clone()
1517        };
1518
1519        // Higher zoom = lower FOV
1520        assert!(zoomed.fov_radians() < base.fov_radians());
1521        // Specifically, 2x zoom = half FOV
1522        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1523    }
1524
1525    #[test]
1526    fn test_render_config_zoom_affects_intrinsics() {
1527        let base = RenderConfig::tbp_default();
1528        let zoomed = RenderConfig {
1529            zoom: 2.0,
1530            ..base.clone()
1531        };
1532
1533        // Higher zoom = higher focal length
1534        let base_intrinsics = base.intrinsics();
1535        let zoomed_intrinsics = zoomed.intrinsics();
1536
1537        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1538    }
1539
1540    #[test]
1541    fn test_lighting_config_variants() {
1542        let default = LightingConfig::default();
1543        let bright = LightingConfig::bright();
1544        let soft = LightingConfig::soft();
1545        let unlit = LightingConfig::unlit();
1546
1547        // Bright should have higher intensity than default
1548        assert!(bright.key_light_intensity > default.key_light_intensity);
1549
1550        // Unlit should have no point lights
1551        assert_eq!(unlit.key_light_intensity, 0.0);
1552        assert_eq!(unlit.fill_light_intensity, 0.0);
1553        assert_eq!(unlit.ambient_brightness, 1.0);
1554
1555        // Soft should have lower intensity
1556        assert!(soft.key_light_intensity < default.key_light_intensity);
1557    }
1558
1559    #[test]
1560    fn test_all_render_error_variants() {
1561        let errors = vec![
1562            RenderError::MeshNotFound("mesh.obj".to_string()),
1563            RenderError::TextureNotFound("texture.png".to_string()),
1564            RenderError::RenderFailed("GPU error".to_string()),
1565            RenderError::InvalidConfig("bad config".to_string()),
1566        ];
1567
1568        for err in errors {
1569            // All variants should have Display impl
1570            let msg = err.to_string();
1571            assert!(!msg.is_empty());
1572        }
1573    }
1574
1575    #[test]
1576    fn test_tbp_known_orientations_unique() {
1577        let orientations = ObjectRotation::tbp_known_orientations();
1578
1579        // All 14 orientations should produce unique quaternions
1580        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1581
1582        for (i, q1) in quats.iter().enumerate() {
1583            for (j, q2) in quats.iter().enumerate() {
1584                if i != j {
1585                    // Quaternions should be different (accounting for q == -q equivalence)
1586                    let dot = q1.dot(*q2).abs();
1587                    assert!(
1588                        dot < 0.999,
1589                        "Orientations {} and {} produce same quaternion",
1590                        i,
1591                        j
1592                    );
1593                }
1594            }
1595        }
1596    }
1597}