bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// Test fixtures for pre-rendered images (CI/CD support)
65pub mod fixtures;
66
67// Re-export ycbust types for convenience
68pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
69
70/// YCB dataset utilities
71pub mod ycb {
72    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
73
74    use std::path::Path;
75
76    /// Download YCB models to the specified directory.
77    ///
78    /// # Arguments
79    /// * `output_dir` - Directory to download models to
80    /// * `subset` - Which subset of objects to download
81    ///
82    /// # Example
83    /// ```ignore
84    /// use bevy_sensor::ycb::{download_models, Subset};
85    ///
86    /// download_models("/tmp/ycb", Subset::Representative).await?;
87    /// ```
88    pub async fn download_models<P: AsRef<Path>>(
89        output_dir: P,
90        subset: Subset,
91    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
92        let options = DownloadOptions {
93            overwrite: false,
94            full: false,
95            show_progress: true,
96            delete_archives: true,
97        };
98        download_ycb(subset, output_dir.as_ref(), options).await?;
99        Ok(())
100    }
101
102    /// Download YCB models with custom options.
103    pub async fn download_models_with_options<P: AsRef<Path>>(
104        output_dir: P,
105        subset: Subset,
106        options: DownloadOptions,
107    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
108        download_ycb(subset, output_dir.as_ref(), options).await?;
109        Ok(())
110    }
111
112    /// Check if YCB models exist at the given path
113    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
114        let path = output_dir.as_ref();
115        // Check for at least one representative object
116        path.join("003_cracker_box/google_16k/textured.obj")
117            .exists()
118    }
119
120    /// Get the path to a specific YCB object's OBJ file
121    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
122        output_dir
123            .as_ref()
124            .join(object_id)
125            .join("google_16k")
126            .join("textured.obj")
127    }
128
129    /// Get the path to a specific YCB object's texture file
130    pub fn object_texture_path<P: AsRef<Path>>(
131        output_dir: P,
132        object_id: &str,
133    ) -> std::path::PathBuf {
134        output_dir
135            .as_ref()
136            .join(object_id)
137            .join("google_16k")
138            .join("texture_map.png")
139    }
140}
141
142/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
143/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
144#[derive(Clone, Debug, PartialEq)]
145pub struct ObjectRotation {
146    /// Rotation around X-axis (pitch) in degrees
147    pub pitch: f64,
148    /// Rotation around Y-axis (yaw) in degrees
149    pub yaw: f64,
150    /// Rotation around Z-axis (roll) in degrees
151    pub roll: f64,
152}
153
154impl ObjectRotation {
155    /// Create a new rotation from Euler angles in degrees
156    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
157        Self { pitch, yaw, roll }
158    }
159
160    /// Create from TBP-style array [pitch, yaw, roll] in degrees
161    pub fn from_array(arr: [f64; 3]) -> Self {
162        Self {
163            pitch: arr[0],
164            yaw: arr[1],
165            roll: arr[2],
166        }
167    }
168
169    /// Identity rotation (no rotation)
170    pub fn identity() -> Self {
171        Self::new(0.0, 0.0, 0.0)
172    }
173
174    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
175    /// Used in shorter YCB experiments to reduce computational load.
176    pub fn tbp_benchmark_rotations() -> Vec<Self> {
177        vec![
178            Self::from_array([0.0, 0.0, 0.0]),
179            Self::from_array([0.0, 90.0, 0.0]),
180            Self::from_array([0.0, 180.0, 0.0]),
181        ]
182    }
183
184    /// TBP 14 known orientations (cube faces and corners)
185    /// These are the orientations objects are learned in during training.
186    pub fn tbp_known_orientations() -> Vec<Self> {
187        vec![
188            // 6 cube faces (90° rotations around each axis)
189            Self::from_array([0.0, 0.0, 0.0]),   // Front
190            Self::from_array([0.0, 90.0, 0.0]),  // Right
191            Self::from_array([0.0, 180.0, 0.0]), // Back
192            Self::from_array([0.0, 270.0, 0.0]), // Left
193            Self::from_array([90.0, 0.0, 0.0]),  // Top
194            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
195            // 8 cube corners (45° rotations)
196            Self::from_array([45.0, 45.0, 0.0]),
197            Self::from_array([45.0, 135.0, 0.0]),
198            Self::from_array([45.0, 225.0, 0.0]),
199            Self::from_array([45.0, 315.0, 0.0]),
200            Self::from_array([-45.0, 45.0, 0.0]),
201            Self::from_array([-45.0, 135.0, 0.0]),
202            Self::from_array([-45.0, 225.0, 0.0]),
203            Self::from_array([-45.0, 315.0, 0.0]),
204        ]
205    }
206
207    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
208    pub fn to_quat(&self) -> Quat {
209        Quat::from_euler(
210            EulerRot::XYZ,
211            (self.pitch as f32).to_radians(),
212            (self.yaw as f32).to_radians(),
213            (self.roll as f32).to_radians(),
214        )
215    }
216
217    /// Convert to Bevy Transform (rotation only, no translation)
218    pub fn to_transform(&self) -> Transform {
219        Transform::from_rotation(self.to_quat())
220    }
221}
222
223impl Default for ObjectRotation {
224    fn default() -> Self {
225        Self::identity()
226    }
227}
228
229/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
230/// Uses spherical coordinates to capture objects from multiple elevations.
231#[derive(Clone, Debug)]
232pub struct ViewpointConfig {
233    /// Distance from camera to object center (meters)
234    pub radius: f32,
235    /// Number of horizontal positions (yaw angles) around the object
236    pub yaw_count: usize,
237    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
238    pub pitch_angles_deg: Vec<f32>,
239}
240
241impl Default for ViewpointConfig {
242    fn default() -> Self {
243        Self {
244            radius: 0.5,
245            yaw_count: 8,
246            // Three elevations: below (-30°), level (0°), above (+30°)
247            // This matches TBP's look_up/look_down capability
248            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
249        }
250    }
251}
252
253impl ViewpointConfig {
254    /// Total number of viewpoints this config will generate
255    pub fn viewpoint_count(&self) -> usize {
256        self.yaw_count * self.pitch_angles_deg.len()
257    }
258}
259
260/// Full sensor configuration for capture sessions
261#[derive(Clone, Debug, Resource)]
262pub struct SensorConfig {
263    /// Viewpoint configuration (camera positions)
264    pub viewpoints: ViewpointConfig,
265    /// Object rotations to capture (each rotation generates a full viewpoint set)
266    pub object_rotations: Vec<ObjectRotation>,
267    /// Output directory for captures
268    pub output_dir: String,
269    /// Filename pattern (use {view} for view index, {rot} for rotation index)
270    pub filename_pattern: String,
271}
272
273impl Default for SensorConfig {
274    fn default() -> Self {
275        Self {
276            viewpoints: ViewpointConfig::default(),
277            object_rotations: vec![ObjectRotation::identity()],
278            output_dir: ".".to_string(),
279            filename_pattern: "capture_{rot}_{view}.png".to_string(),
280        }
281    }
282}
283
284impl SensorConfig {
285    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
286    pub fn tbp_benchmark() -> Self {
287        Self {
288            viewpoints: ViewpointConfig::default(),
289            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
290            output_dir: ".".to_string(),
291            filename_pattern: "capture_{rot}_{view}.png".to_string(),
292        }
293    }
294
295    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
296    pub fn tbp_full_training() -> Self {
297        Self {
298            viewpoints: ViewpointConfig::default(),
299            object_rotations: ObjectRotation::tbp_known_orientations(),
300            output_dir: ".".to_string(),
301            filename_pattern: "capture_{rot}_{view}.png".to_string(),
302        }
303    }
304
305    /// Total number of captures this config will generate
306    pub fn total_captures(&self) -> usize {
307        self.viewpoints.viewpoint_count() * self.object_rotations.len()
308    }
309}
310
311/// Generate camera viewpoints using spherical coordinates.
312///
313/// Spherical coordinate system (matching TBP habitat sensor conventions):
314/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
315/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
316/// - Radius: distance from origin (object center)
317pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
318    let mut views = Vec::with_capacity(config.viewpoint_count());
319
320    for pitch_deg in &config.pitch_angles_deg {
321        let pitch = pitch_deg.to_radians();
322
323        for i in 0..config.yaw_count {
324            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
325
326            // Spherical to Cartesian conversion (Y-up coordinate system)
327            // x = r * cos(pitch) * sin(yaw)
328            // y = r * sin(pitch)
329            // z = r * cos(pitch) * cos(yaw)
330            let x = config.radius * pitch.cos() * yaw.sin();
331            let y = config.radius * pitch.sin();
332            let z = config.radius * pitch.cos() * yaw.cos();
333
334            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
335            views.push(transform);
336        }
337    }
338    views
339}
340
341/// Marker component for the target object being captured
342#[derive(Component)]
343pub struct CaptureTarget;
344
345/// Marker component for the capture camera
346#[derive(Component)]
347pub struct CaptureCamera;
348
349// ============================================================================
350// Headless Rendering API (NEW)
351// ============================================================================
352
353/// Configuration for headless rendering.
354///
355/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
356#[derive(Clone, Debug)]
357pub struct RenderConfig {
358    /// Image width in pixels (default: 64)
359    pub width: u32,
360    /// Image height in pixels (default: 64)
361    pub height: u32,
362    /// Zoom factor affecting field of view (default: 1.0)
363    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
364    pub zoom: f32,
365    /// Near clipping plane in meters (default: 0.01)
366    pub near_plane: f32,
367    /// Far clipping plane in meters (default: 10.0)
368    pub far_plane: f32,
369    /// Lighting configuration
370    pub lighting: LightingConfig,
371}
372
373/// Lighting configuration for rendering.
374///
375/// Controls ambient light and point lights in the scene.
376#[derive(Clone, Debug)]
377pub struct LightingConfig {
378    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
379    pub ambient_brightness: f32,
380    /// Key light intensity in lumens (default: 1500.0)
381    pub key_light_intensity: f32,
382    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
383    pub key_light_position: [f32; 3],
384    /// Fill light intensity in lumens (default: 500.0)
385    pub fill_light_intensity: f32,
386    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
387    pub fill_light_position: [f32; 3],
388    /// Enable shadows (default: false for performance)
389    pub shadows_enabled: bool,
390}
391
392impl Default for LightingConfig {
393    fn default() -> Self {
394        Self {
395            ambient_brightness: 0.3,
396            key_light_intensity: 1500.0,
397            key_light_position: [4.0, 8.0, 4.0],
398            fill_light_intensity: 500.0,
399            fill_light_position: [-4.0, 2.0, -4.0],
400            shadows_enabled: false,
401        }
402    }
403}
404
405impl LightingConfig {
406    /// Bright lighting for clear visibility
407    pub fn bright() -> Self {
408        Self {
409            ambient_brightness: 0.5,
410            key_light_intensity: 2000.0,
411            key_light_position: [4.0, 8.0, 4.0],
412            fill_light_intensity: 800.0,
413            fill_light_position: [-4.0, 2.0, -4.0],
414            shadows_enabled: false,
415        }
416    }
417
418    /// Soft lighting with minimal shadows
419    pub fn soft() -> Self {
420        Self {
421            ambient_brightness: 0.4,
422            key_light_intensity: 1000.0,
423            key_light_position: [3.0, 6.0, 3.0],
424            fill_light_intensity: 600.0,
425            fill_light_position: [-3.0, 3.0, -3.0],
426            shadows_enabled: false,
427        }
428    }
429
430    /// Unlit mode - ambient only, no point lights
431    pub fn unlit() -> Self {
432        Self {
433            ambient_brightness: 1.0,
434            key_light_intensity: 0.0,
435            key_light_position: [0.0, 0.0, 0.0],
436            fill_light_intensity: 0.0,
437            fill_light_position: [0.0, 0.0, 0.0],
438            shadows_enabled: false,
439        }
440    }
441}
442
443impl Default for RenderConfig {
444    fn default() -> Self {
445        Self::tbp_default()
446    }
447}
448
449impl RenderConfig {
450    /// TBP-compatible 64x64 RGBD sensor configuration.
451    ///
452    /// This matches the default resolution used in TBP's habitat sensor.
453    pub fn tbp_default() -> Self {
454        Self {
455            width: 64,
456            height: 64,
457            zoom: 1.0,
458            near_plane: 0.01,
459            far_plane: 10.0,
460            lighting: LightingConfig::default(),
461        }
462    }
463
464    /// Higher resolution configuration for debugging and visualization.
465    pub fn preview() -> Self {
466        Self {
467            width: 256,
468            height: 256,
469            zoom: 1.0,
470            near_plane: 0.01,
471            far_plane: 10.0,
472            lighting: LightingConfig::default(),
473        }
474    }
475
476    /// High resolution configuration for detailed captures.
477    pub fn high_res() -> Self {
478        Self {
479            width: 512,
480            height: 512,
481            zoom: 1.0,
482            near_plane: 0.01,
483            far_plane: 10.0,
484            lighting: LightingConfig::default(),
485        }
486    }
487
488    /// Calculate vertical field of view in radians based on zoom.
489    ///
490    /// Base FOV is 60 degrees, adjusted by zoom factor.
491    pub fn fov_radians(&self) -> f32 {
492        let base_fov_deg = 60.0_f32;
493        (base_fov_deg / self.zoom).to_radians()
494    }
495
496    /// Compute camera intrinsics for use with neocortx.
497    ///
498    /// Returns focal length and principal point based on resolution and FOV.
499    /// Uses f64 for TBP numerical precision compatibility.
500    pub fn intrinsics(&self) -> CameraIntrinsics {
501        let fov = self.fov_radians() as f64;
502        // focal_length = (height/2) / tan(fov/2)
503        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
504        let fx = fy; // Assuming square pixels
505
506        CameraIntrinsics {
507            focal_length: [fx, fy],
508            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
509            image_size: [self.width, self.height],
510        }
511    }
512}
513
514/// Camera intrinsic parameters for 3D reconstruction.
515///
516/// Compatible with neocortx's VisionIntrinsics format.
517/// Uses f64 for TBP numerical precision compatibility.
518#[derive(Clone, Debug, PartialEq)]
519pub struct CameraIntrinsics {
520    /// Focal length in pixels (fx, fy)
521    pub focal_length: [f64; 2],
522    /// Principal point (cx, cy) - typically image center
523    pub principal_point: [f64; 2],
524    /// Image dimensions (width, height)
525    pub image_size: [u32; 2],
526}
527
528impl CameraIntrinsics {
529    /// Project a 3D point to 2D pixel coordinates.
530    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
531        if point.z <= 0.0 {
532            return None;
533        }
534        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
535        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
536        Some([x, y])
537    }
538
539    /// Unproject a 2D pixel to a 3D point at given depth.
540    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
541        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
542        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
543        [x, y, depth]
544    }
545}
546
547/// Output from headless rendering containing RGBA and depth data.
548#[derive(Clone, Debug)]
549pub struct RenderOutput {
550    /// RGBA pixel data in row-major order (width * height * 4 bytes)
551    pub rgba: Vec<u8>,
552    /// Depth values in meters, row-major order (width * height f64s)
553    /// Values are linear depth from camera, not normalized.
554    /// Uses f64 for TBP numerical precision compatibility.
555    pub depth: Vec<f64>,
556    /// Image width in pixels
557    pub width: u32,
558    /// Image height in pixels
559    pub height: u32,
560    /// Camera intrinsics used for this render
561    pub intrinsics: CameraIntrinsics,
562    /// Camera transform (world position and orientation)
563    pub camera_transform: Transform,
564    /// Object rotation applied during render
565    pub object_rotation: ObjectRotation,
566}
567
568impl RenderOutput {
569    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
570    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
571        if x >= self.width || y >= self.height {
572            return None;
573        }
574        let idx = ((y * self.width + x) * 4) as usize;
575        Some([
576            self.rgba[idx],
577            self.rgba[idx + 1],
578            self.rgba[idx + 2],
579            self.rgba[idx + 3],
580        ])
581    }
582
583    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
584    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
585        if x >= self.width || y >= self.height {
586            return None;
587        }
588        let idx = (y * self.width + x) as usize;
589        Some(self.depth[idx])
590    }
591
592    /// Get RGB pixel (without alpha) at (x, y).
593    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
594        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
595    }
596
597    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
598    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
599        let mut image = Vec::with_capacity(self.height as usize);
600        for y in 0..self.height {
601            let mut row = Vec::with_capacity(self.width as usize);
602            for x in 0..self.width {
603                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
604            }
605            image.push(row);
606        }
607        image
608    }
609
610    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
611    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
612        let mut image = Vec::with_capacity(self.height as usize);
613        for y in 0..self.height {
614            let mut row = Vec::with_capacity(self.width as usize);
615            for x in 0..self.width {
616                row.push(self.get_depth(x, y).unwrap_or(0.0));
617            }
618            image.push(row);
619        }
620        image
621    }
622}
623
624/// Errors that can occur during rendering and file operations.
625#[derive(Debug, Clone)]
626pub enum RenderError {
627    /// Object mesh file not found
628    MeshNotFound(String),
629    /// Object texture file not found
630    TextureNotFound(String),
631    /// Generic file not found error
632    FileNotFound { path: String, reason: String },
633    /// File write failed
634    FileWriteFailed { path: String, reason: String },
635    /// Directory creation failed
636    DirectoryCreationFailed { path: String, reason: String },
637    /// Bevy rendering failed
638    RenderFailed(String),
639    /// Invalid configuration
640    InvalidConfig(String),
641    /// Invalid input parameters
642    InvalidInput(String),
643    /// JSON serialization/deserialization error
644    SerializationError(String),
645    /// Binary data parsing error
646    DataParsingError(String),
647    /// Render timeout
648    RenderTimeout { duration_secs: u64 },
649}
650
651impl std::fmt::Display for RenderError {
652    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
653        match self {
654            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
655            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
656            RenderError::FileNotFound { path, reason } => {
657                write!(f, "File not found at {}: {}", path, reason)
658            }
659            RenderError::FileWriteFailed { path, reason } => {
660                write!(f, "Failed to write file {}: {}", path, reason)
661            }
662            RenderError::DirectoryCreationFailed { path, reason } => {
663                write!(f, "Failed to create directory {}: {}", path, reason)
664            }
665            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
666            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
667            RenderError::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
668            RenderError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
669            RenderError::DataParsingError(msg) => write!(f, "Data parsing error: {}", msg),
670            RenderError::RenderTimeout { duration_secs } => {
671                write!(f, "Render timeout after {} seconds", duration_secs)
672            }
673        }
674    }
675}
676
677impl std::error::Error for RenderError {}
678
679/// Render a YCB object to an in-memory buffer.
680///
681/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
682/// renders a single frame, extracts the RGBA and depth data, and shuts down.
683///
684/// # Arguments
685/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
686/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
687/// * `object_rotation` - Rotation to apply to the object
688/// * `config` - Render configuration (resolution, depth range, etc.)
689///
690/// # Example
691/// ```ignore
692/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
693/// use std::path::Path;
694///
695/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
696/// let output = render_to_buffer(
697///     Path::new("/tmp/ycb/003_cracker_box"),
698///     &viewpoints[0],
699///     &ObjectRotation::identity(),
700///     &RenderConfig::tbp_default(),
701/// )?;
702/// ```
703pub fn render_to_buffer(
704    object_dir: &Path,
705    camera_transform: &Transform,
706    object_rotation: &ObjectRotation,
707    config: &RenderConfig,
708) -> Result<RenderOutput, RenderError> {
709    // Use the actual Bevy headless renderer
710    render::render_headless(object_dir, camera_transform, object_rotation, config)
711}
712
713/// Render all viewpoints and rotations for a YCB object.
714///
715/// Convenience function that renders all combinations of viewpoints and rotations.
716///
717/// # Arguments
718/// * `object_dir` - Path to YCB object directory
719/// * `viewpoint_config` - Viewpoint configuration (camera positions)
720/// * `rotations` - Object rotations to render
721/// * `render_config` - Render configuration
722///
723/// # Returns
724/// Vector of RenderOutput, one per viewpoint × rotation combination.
725pub fn render_all_viewpoints(
726    object_dir: &Path,
727    viewpoint_config: &ViewpointConfig,
728    rotations: &[ObjectRotation],
729    render_config: &RenderConfig,
730) -> Result<Vec<RenderOutput>, RenderError> {
731    let viewpoints = generate_viewpoints(viewpoint_config);
732    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
733
734    for rotation in rotations {
735        for viewpoint in &viewpoints {
736            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
737            outputs.push(output);
738        }
739    }
740
741    Ok(outputs)
742}
743
744/// Render directly to files (for subprocess mode).
745///
746/// This function is designed for subprocess rendering where the process will exit
747/// after rendering. It saves RGBA and depth data directly to the specified files
748/// before the process terminates.
749///
750/// # Arguments
751/// * `object_dir` - Path to YCB object directory
752/// * `camera_transform` - Camera position and orientation
753/// * `object_rotation` - Rotation to apply to the object
754/// * `config` - Render configuration
755/// * `rgba_path` - Output path for RGBA PNG
756/// * `depth_path` - Output path for depth data (raw f32 bytes)
757///
758/// # Note
759/// This function may call `std::process::exit(0)` and not return.
760pub fn render_to_files(
761    object_dir: &Path,
762    camera_transform: &Transform,
763    object_rotation: &ObjectRotation,
764    config: &RenderConfig,
765    rgba_path: &Path,
766    depth_path: &Path,
767) -> Result<(), RenderError> {
768    render::render_to_files(
769        object_dir,
770        camera_transform,
771        object_rotation,
772        config,
773        rgba_path,
774        depth_path,
775    )
776}
777
778// Re-export batch types for convenient API access
779pub use batch::{
780    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
781    BatchState, RenderStatus,
782};
783
784/// Create a new batch renderer for efficient multi-viewpoint rendering.
785///
786/// This creates a persistent Bevy app that can render multiple viewpoints without
787/// subprocess spawning overhead. Achieves 10-100x speedup vs individual render_to_buffer calls.
788///
789/// # Arguments
790/// * `config` - Batch rendering configuration
791///
792/// # Returns
793/// A BatchRenderer instance ready to queue render requests
794///
795/// # Example
796/// ```ignore
797/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
798///
799/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
800/// ```
801pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
802    // For now, just create an empty renderer that will need a Bevy app
803    // The actual app creation happens when rendering starts
804    Ok(BatchRenderer::new(config.clone()))
805}
806
807/// Queue a render request for batch processing.
808///
809/// Adds a render request to the batch queue. Requests are processed in order
810/// when you call render_next_in_batch().
811///
812/// # Arguments
813/// * `renderer` - The batch renderer instance
814/// * `request` - The render request
815///
816/// # Returns
817/// Ok if queued successfully, Err if queue is full
818///
819/// # Example
820/// ```ignore
821/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
822/// use std::path::PathBuf;
823///
824/// queue_render_request(&mut renderer, BatchRenderRequest {
825///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
826///     viewpoint: camera_transform,
827///     object_rotation: ObjectRotation::identity(),
828///     render_config: RenderConfig::tbp_default(),
829/// })?;
830/// ```
831pub fn queue_render_request(
832    renderer: &mut BatchRenderer,
833    request: BatchRenderRequest,
834) -> Result<(), RenderError> {
835    renderer
836        .queue_request(request)
837        .map_err(|e| RenderError::RenderFailed(e.to_string()))
838}
839
840/// Process and execute the next render in the batch queue.
841///
842/// Executes a single render from the queued requests. Returns None when the queue is empty.
843/// Use this in a loop to process all queued renders.
844///
845/// # Arguments
846/// * `renderer` - The batch renderer instance
847/// * `timeout_ms` - Timeout in milliseconds for this render
848///
849/// # Returns
850/// Some(output) if a render completed, None if queue is empty
851///
852/// # Example
853/// ```ignore
854/// loop {
855///     match render_next_in_batch(&mut renderer, 500)? {
856///         Some(output) => println!("Render complete: {:?}", output.status),
857///         None => break, // All renders done
858///     }
859/// }
860/// ```
861pub fn render_next_in_batch(
862    renderer: &mut BatchRenderer,
863    _timeout_ms: u32,
864) -> Result<Option<BatchRenderOutput>, RenderError> {
865    // This is a stub - the actual implementation will require a running Bevy app
866    // For now, just render single batches immediately using render_to_buffer
867    if let Some(request) = renderer.pending_requests.pop_front() {
868        let output = render_to_buffer(
869            &request.object_dir,
870            &request.viewpoint,
871            &request.object_rotation,
872            &request.render_config,
873        )?;
874        let batch_output = BatchRenderOutput::from_render_output(request, output);
875        renderer.completed_results.push(batch_output.clone());
876        renderer.renders_processed += 1;
877        Ok(Some(batch_output))
878    } else {
879        Ok(None)
880    }
881}
882
883/// Render multiple requests in batch (convenience function).
884///
885/// Queues all requests and executes them in batch, returning all results.
886/// Simpler than manage queue + loop for one-off batches.
887///
888/// # Arguments
889/// * `requests` - Vector of render requests
890/// * `config` - Batch rendering configuration
891///
892/// # Returns
893/// Vector of BatchRenderOutput results in same order as input
894///
895/// # Example
896/// ```ignore
897/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
898///
899/// let results = render_batch(requests, &BatchRenderConfig::default())?;
900/// ```
901pub fn render_batch(
902    requests: Vec<BatchRenderRequest>,
903    config: &BatchRenderConfig,
904) -> Result<Vec<BatchRenderOutput>, RenderError> {
905    let mut renderer = create_batch_renderer(config)?;
906
907    // Queue all requests
908    for request in requests {
909        queue_render_request(&mut renderer, request)?;
910    }
911
912    // Execute all and collect results
913    let mut results = Vec::new();
914    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
915        results.push(output);
916    }
917
918    Ok(results)
919}
920
921// Re-export bevy types that consumers will need
922pub use bevy::prelude::{Quat, Transform, Vec3};
923
924#[cfg(test)]
925mod tests {
926    use super::*;
927
928    #[test]
929    fn test_object_rotation_identity() {
930        let rot = ObjectRotation::identity();
931        assert_eq!(rot.pitch, 0.0);
932        assert_eq!(rot.yaw, 0.0);
933        assert_eq!(rot.roll, 0.0);
934    }
935
936    #[test]
937    fn test_object_rotation_from_array() {
938        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
939        assert_eq!(rot.pitch, 10.0);
940        assert_eq!(rot.yaw, 20.0);
941        assert_eq!(rot.roll, 30.0);
942    }
943
944    #[test]
945    fn test_tbp_benchmark_rotations() {
946        let rotations = ObjectRotation::tbp_benchmark_rotations();
947        assert_eq!(rotations.len(), 3);
948        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
949        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
950        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
951    }
952
953    #[test]
954    fn test_tbp_known_orientations_count() {
955        let orientations = ObjectRotation::tbp_known_orientations();
956        assert_eq!(orientations.len(), 14);
957    }
958
959    #[test]
960    fn test_rotation_to_quat() {
961        let rot = ObjectRotation::identity();
962        let quat = rot.to_quat();
963        // Identity quaternion should be approximately (1, 0, 0, 0)
964        assert!((quat.w - 1.0).abs() < 0.001);
965        assert!(quat.x.abs() < 0.001);
966        assert!(quat.y.abs() < 0.001);
967        assert!(quat.z.abs() < 0.001);
968    }
969
970    #[test]
971    fn test_rotation_90_yaw() {
972        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
973        let quat = rot.to_quat();
974        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
975        assert!((quat.w - 0.707).abs() < 0.01);
976        assert!((quat.y - 0.707).abs() < 0.01);
977    }
978
979    #[test]
980    fn test_viewpoint_config_default() {
981        let config = ViewpointConfig::default();
982        assert_eq!(config.radius, 0.5);
983        assert_eq!(config.yaw_count, 8);
984        assert_eq!(config.pitch_angles_deg.len(), 3);
985    }
986
987    #[test]
988    fn test_viewpoint_count() {
989        let config = ViewpointConfig::default();
990        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
991    }
992
993    #[test]
994    fn test_generate_viewpoints_count() {
995        let config = ViewpointConfig::default();
996        let viewpoints = generate_viewpoints(&config);
997        assert_eq!(viewpoints.len(), 24);
998    }
999
1000    #[test]
1001    fn test_viewpoints_spherical_radius() {
1002        let config = ViewpointConfig::default();
1003        let viewpoints = generate_viewpoints(&config);
1004
1005        for (i, transform) in viewpoints.iter().enumerate() {
1006            let actual_radius = transform.translation.length();
1007            assert!(
1008                (actual_radius - config.radius).abs() < 0.001,
1009                "Viewpoint {} has incorrect radius: {} (expected {})",
1010                i,
1011                actual_radius,
1012                config.radius
1013            );
1014        }
1015    }
1016
1017    #[test]
1018    fn test_viewpoints_looking_at_origin() {
1019        let config = ViewpointConfig::default();
1020        let viewpoints = generate_viewpoints(&config);
1021
1022        for (i, transform) in viewpoints.iter().enumerate() {
1023            let forward = transform.forward();
1024            let to_origin = (Vec3::ZERO - transform.translation).normalize();
1025            let dot = forward.dot(to_origin);
1026            assert!(
1027                dot > 0.99,
1028                "Viewpoint {} not looking at origin, dot product: {}",
1029                i,
1030                dot
1031            );
1032        }
1033    }
1034
1035    #[test]
1036    fn test_sensor_config_default() {
1037        let config = SensorConfig::default();
1038        assert_eq!(config.object_rotations.len(), 1);
1039        assert_eq!(config.total_captures(), 24);
1040    }
1041
1042    #[test]
1043    fn test_sensor_config_tbp_benchmark() {
1044        let config = SensorConfig::tbp_benchmark();
1045        assert_eq!(config.object_rotations.len(), 3);
1046        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1047    }
1048
1049    #[test]
1050    fn test_sensor_config_tbp_full() {
1051        let config = SensorConfig::tbp_full_training();
1052        assert_eq!(config.object_rotations.len(), 14);
1053        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1054    }
1055
1056    #[test]
1057    fn test_ycb_representative_objects() {
1058        // Verify representative objects are defined
1059        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1060        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1061    }
1062
1063    #[test]
1064    fn test_ycb_ten_objects() {
1065        // Verify ten objects subset is defined
1066        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1067    }
1068
1069    #[test]
1070    fn test_ycb_object_mesh_path() {
1071        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1072        assert_eq!(
1073            path.to_string_lossy(),
1074            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
1075        );
1076    }
1077
1078    #[test]
1079    fn test_ycb_object_texture_path() {
1080        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1081        assert_eq!(
1082            path.to_string_lossy(),
1083            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
1084        );
1085    }
1086
1087    // =========================================================================
1088    // Headless Rendering API Tests
1089    // =========================================================================
1090
1091    #[test]
1092    fn test_render_config_tbp_default() {
1093        let config = RenderConfig::tbp_default();
1094        assert_eq!(config.width, 64);
1095        assert_eq!(config.height, 64);
1096        assert_eq!(config.zoom, 1.0);
1097        assert_eq!(config.near_plane, 0.01);
1098        assert_eq!(config.far_plane, 10.0);
1099    }
1100
1101    #[test]
1102    fn test_render_config_preview() {
1103        let config = RenderConfig::preview();
1104        assert_eq!(config.width, 256);
1105        assert_eq!(config.height, 256);
1106    }
1107
1108    #[test]
1109    fn test_render_config_default_is_tbp() {
1110        let default = RenderConfig::default();
1111        let tbp = RenderConfig::tbp_default();
1112        assert_eq!(default.width, tbp.width);
1113        assert_eq!(default.height, tbp.height);
1114    }
1115
1116    #[test]
1117    fn test_render_config_fov() {
1118        let config = RenderConfig::tbp_default();
1119        let fov = config.fov_radians();
1120        // Base FOV is 60 degrees = ~1.047 radians
1121        assert!((fov - 1.047).abs() < 0.01);
1122
1123        // Zoom in should reduce FOV
1124        let zoomed = RenderConfig {
1125            zoom: 2.0,
1126            ..config
1127        };
1128        assert!(zoomed.fov_radians() < fov);
1129    }
1130
1131    #[test]
1132    fn test_render_config_intrinsics() {
1133        let config = RenderConfig::tbp_default();
1134        let intrinsics = config.intrinsics();
1135
1136        assert_eq!(intrinsics.image_size, [64, 64]);
1137        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1138        // Focal length should be positive and reasonable
1139        assert!(intrinsics.focal_length[0] > 0.0);
1140        assert!(intrinsics.focal_length[1] > 0.0);
1141        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1142        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1143    }
1144
1145    #[test]
1146    fn test_camera_intrinsics_project() {
1147        let intrinsics = CameraIntrinsics {
1148            focal_length: [100.0, 100.0],
1149            principal_point: [32.0, 32.0],
1150            image_size: [64, 64],
1151        };
1152
1153        // Point at origin of camera frame projects to principal point
1154        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1155        assert!(center.is_some());
1156        let [x, y] = center.unwrap();
1157        assert!((x - 32.0).abs() < 0.001);
1158        assert!((y - 32.0).abs() < 0.001);
1159
1160        // Point behind camera returns None
1161        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1162        assert!(behind.is_none());
1163    }
1164
1165    #[test]
1166    fn test_camera_intrinsics_unproject() {
1167        let intrinsics = CameraIntrinsics {
1168            focal_length: [100.0, 100.0],
1169            principal_point: [32.0, 32.0],
1170            image_size: [64, 64],
1171        };
1172
1173        // Unproject principal point at depth 1.0
1174        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1175        assert!((point[0]).abs() < 0.001); // x
1176        assert!((point[1]).abs() < 0.001); // y
1177        assert!((point[2] - 1.0).abs() < 0.001); // z
1178    }
1179
1180    #[test]
1181    fn test_render_output_get_rgba() {
1182        let output = RenderOutput {
1183            rgba: vec![
1184                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1185            ],
1186            depth: vec![1.0, 2.0, 3.0, 4.0],
1187            width: 2,
1188            height: 2,
1189            intrinsics: RenderConfig::tbp_default().intrinsics(),
1190            camera_transform: Transform::IDENTITY,
1191            object_rotation: ObjectRotation::identity(),
1192        };
1193
1194        // Top-left: red
1195        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1196        // Top-right: green
1197        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1198        // Bottom-left: blue
1199        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1200        // Bottom-right: white
1201        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1202        // Out of bounds
1203        assert_eq!(output.get_rgba(2, 0), None);
1204    }
1205
1206    #[test]
1207    fn test_render_output_get_depth() {
1208        let output = RenderOutput {
1209            rgba: vec![0u8; 16],
1210            depth: vec![1.0, 2.0, 3.0, 4.0],
1211            width: 2,
1212            height: 2,
1213            intrinsics: RenderConfig::tbp_default().intrinsics(),
1214            camera_transform: Transform::IDENTITY,
1215            object_rotation: ObjectRotation::identity(),
1216        };
1217
1218        assert_eq!(output.get_depth(0, 0), Some(1.0));
1219        assert_eq!(output.get_depth(1, 0), Some(2.0));
1220        assert_eq!(output.get_depth(0, 1), Some(3.0));
1221        assert_eq!(output.get_depth(1, 1), Some(4.0));
1222        assert_eq!(output.get_depth(2, 0), None);
1223    }
1224
1225    #[test]
1226    fn test_render_output_to_rgb_image() {
1227        let output = RenderOutput {
1228            rgba: vec![
1229                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1230            ],
1231            depth: vec![1.0, 2.0, 3.0, 4.0],
1232            width: 2,
1233            height: 2,
1234            intrinsics: RenderConfig::tbp_default().intrinsics(),
1235            camera_transform: Transform::IDENTITY,
1236            object_rotation: ObjectRotation::identity(),
1237        };
1238
1239        let image = output.to_rgb_image();
1240        assert_eq!(image.len(), 2); // 2 rows
1241        assert_eq!(image[0].len(), 2); // 2 columns
1242        assert_eq!(image[0][0], [255, 0, 0]); // Red
1243        assert_eq!(image[0][1], [0, 255, 0]); // Green
1244        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1245        assert_eq!(image[1][1], [255, 255, 255]); // White
1246    }
1247
1248    #[test]
1249    fn test_render_output_to_depth_image() {
1250        let output = RenderOutput {
1251            rgba: vec![0u8; 16],
1252            depth: vec![1.0, 2.0, 3.0, 4.0],
1253            width: 2,
1254            height: 2,
1255            intrinsics: RenderConfig::tbp_default().intrinsics(),
1256            camera_transform: Transform::IDENTITY,
1257            object_rotation: ObjectRotation::identity(),
1258        };
1259
1260        let depth_image = output.to_depth_image();
1261        assert_eq!(depth_image.len(), 2);
1262        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1263        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1264    }
1265
1266    #[test]
1267    fn test_render_error_display() {
1268        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1269        assert!(err.to_string().contains("Mesh not found"));
1270        assert!(err.to_string().contains("/path/to/mesh.obj"));
1271    }
1272
1273    // =========================================================================
1274    // Edge Case Tests
1275    // =========================================================================
1276
1277    #[test]
1278    fn test_object_rotation_extreme_angles() {
1279        // Test angles beyond 360 degrees
1280        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1281        let quat = rot.to_quat();
1282        // Quaternion should still be valid (normalized)
1283        assert!((quat.length() - 1.0).abs() < 0.001);
1284    }
1285
1286    #[test]
1287    fn test_object_rotation_to_transform() {
1288        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1289        let transform = rot.to_transform();
1290        // Transform should have no translation
1291        assert_eq!(transform.translation, Vec3::ZERO);
1292        // Should have rotation
1293        assert!(transform.rotation != Quat::IDENTITY);
1294    }
1295
1296    #[test]
1297    fn test_viewpoint_config_single_viewpoint() {
1298        let config = ViewpointConfig {
1299            radius: 1.0,
1300            yaw_count: 1,
1301            pitch_angles_deg: vec![0.0],
1302        };
1303        assert_eq!(config.viewpoint_count(), 1);
1304        let viewpoints = generate_viewpoints(&config);
1305        assert_eq!(viewpoints.len(), 1);
1306        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1307        let pos = viewpoints[0].translation;
1308        assert!((pos.x).abs() < 0.001);
1309        assert!((pos.y).abs() < 0.001);
1310        assert!((pos.z - 1.0).abs() < 0.001);
1311    }
1312
1313    #[test]
1314    fn test_viewpoint_radius_scaling() {
1315        let config1 = ViewpointConfig {
1316            radius: 0.5,
1317            yaw_count: 4,
1318            pitch_angles_deg: vec![0.0],
1319        };
1320        let config2 = ViewpointConfig {
1321            radius: 2.0,
1322            yaw_count: 4,
1323            pitch_angles_deg: vec![0.0],
1324        };
1325
1326        let v1 = generate_viewpoints(&config1);
1327        let v2 = generate_viewpoints(&config2);
1328
1329        // Viewpoints should scale proportionally
1330        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1331            let ratio = vp2.translation.length() / vp1.translation.length();
1332            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1333        }
1334    }
1335
1336    #[test]
1337    fn test_camera_intrinsics_project_at_z_zero() {
1338        let intrinsics = CameraIntrinsics {
1339            focal_length: [100.0, 100.0],
1340            principal_point: [32.0, 32.0],
1341            image_size: [64, 64],
1342        };
1343
1344        // Point at z=0 should return None (division by zero protection)
1345        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1346        assert!(result.is_none());
1347    }
1348
1349    #[test]
1350    fn test_camera_intrinsics_roundtrip() {
1351        let intrinsics = CameraIntrinsics {
1352            focal_length: [100.0, 100.0],
1353            principal_point: [32.0, 32.0],
1354            image_size: [64, 64],
1355        };
1356
1357        // Project a 3D point
1358        let original = Vec3::new(0.5, -0.3, 2.0);
1359        let projected = intrinsics.project(original).unwrap();
1360
1361        // Unproject back with the same depth (convert f32 to f64)
1362        let unprojected = intrinsics.unproject(projected, original.z as f64);
1363
1364        // Should get back approximately the same point
1365        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1366        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1367        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1368    }
1369
1370    #[test]
1371    fn test_render_output_empty() {
1372        let output = RenderOutput {
1373            rgba: vec![],
1374            depth: vec![],
1375            width: 0,
1376            height: 0,
1377            intrinsics: RenderConfig::tbp_default().intrinsics(),
1378            camera_transform: Transform::IDENTITY,
1379            object_rotation: ObjectRotation::identity(),
1380        };
1381
1382        // Should handle empty gracefully
1383        assert_eq!(output.get_rgba(0, 0), None);
1384        assert_eq!(output.get_depth(0, 0), None);
1385        assert!(output.to_rgb_image().is_empty());
1386        assert!(output.to_depth_image().is_empty());
1387    }
1388
1389    #[test]
1390    fn test_render_output_1x1() {
1391        let output = RenderOutput {
1392            rgba: vec![128, 64, 32, 255],
1393            depth: vec![0.5],
1394            width: 1,
1395            height: 1,
1396            intrinsics: RenderConfig::tbp_default().intrinsics(),
1397            camera_transform: Transform::IDENTITY,
1398            object_rotation: ObjectRotation::identity(),
1399        };
1400
1401        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1402        assert_eq!(output.get_depth(0, 0), Some(0.5));
1403        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1404
1405        let rgb_img = output.to_rgb_image();
1406        assert_eq!(rgb_img.len(), 1);
1407        assert_eq!(rgb_img[0].len(), 1);
1408        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1409    }
1410
1411    #[test]
1412    fn test_render_config_high_res() {
1413        let config = RenderConfig::high_res();
1414        assert_eq!(config.width, 512);
1415        assert_eq!(config.height, 512);
1416
1417        let intrinsics = config.intrinsics();
1418        assert_eq!(intrinsics.image_size, [512, 512]);
1419        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1420    }
1421
1422    #[test]
1423    fn test_render_config_zoom_affects_fov() {
1424        let base = RenderConfig::tbp_default();
1425        let zoomed = RenderConfig {
1426            zoom: 2.0,
1427            ..base.clone()
1428        };
1429
1430        // Higher zoom = lower FOV
1431        assert!(zoomed.fov_radians() < base.fov_radians());
1432        // Specifically, 2x zoom = half FOV
1433        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1434    }
1435
1436    #[test]
1437    fn test_render_config_zoom_affects_intrinsics() {
1438        let base = RenderConfig::tbp_default();
1439        let zoomed = RenderConfig {
1440            zoom: 2.0,
1441            ..base.clone()
1442        };
1443
1444        // Higher zoom = higher focal length
1445        let base_intrinsics = base.intrinsics();
1446        let zoomed_intrinsics = zoomed.intrinsics();
1447
1448        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1449    }
1450
1451    #[test]
1452    fn test_lighting_config_variants() {
1453        let default = LightingConfig::default();
1454        let bright = LightingConfig::bright();
1455        let soft = LightingConfig::soft();
1456        let unlit = LightingConfig::unlit();
1457
1458        // Bright should have higher intensity than default
1459        assert!(bright.key_light_intensity > default.key_light_intensity);
1460
1461        // Unlit should have no point lights
1462        assert_eq!(unlit.key_light_intensity, 0.0);
1463        assert_eq!(unlit.fill_light_intensity, 0.0);
1464        assert_eq!(unlit.ambient_brightness, 1.0);
1465
1466        // Soft should have lower intensity
1467        assert!(soft.key_light_intensity < default.key_light_intensity);
1468    }
1469
1470    #[test]
1471    fn test_all_render_error_variants() {
1472        let errors = vec![
1473            RenderError::MeshNotFound("mesh.obj".to_string()),
1474            RenderError::TextureNotFound("texture.png".to_string()),
1475            RenderError::RenderFailed("GPU error".to_string()),
1476            RenderError::InvalidConfig("bad config".to_string()),
1477        ];
1478
1479        for err in errors {
1480            // All variants should have Display impl
1481            let msg = err.to_string();
1482            assert!(!msg.is_empty());
1483        }
1484    }
1485
1486    #[test]
1487    fn test_tbp_known_orientations_unique() {
1488        let orientations = ObjectRotation::tbp_known_orientations();
1489
1490        // All 14 orientations should produce unique quaternions
1491        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1492
1493        for (i, q1) in quats.iter().enumerate() {
1494            for (j, q2) in quats.iter().enumerate() {
1495                if i != j {
1496                    // Quaternions should be different (accounting for q == -q equivalence)
1497                    let dot = q1.dot(*q2).abs();
1498                    assert!(
1499                        dot < 0.999,
1500                        "Orientations {} and {} produce same quaternion",
1501                        i,
1502                        j
1503                    );
1504                }
1505            }
1506        }
1507    }
1508}