bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// Test fixtures for pre-rendered images (CI/CD support)
65pub mod fixtures;
66
67// Re-export ycbust types for convenience
68pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
69
70/// YCB dataset utilities
71pub mod ycb {
72    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
73
74    use std::path::Path;
75
76    /// Download YCB models to the specified directory.
77    ///
78    /// # Arguments
79    /// * `output_dir` - Directory to download models to
80    /// * `subset` - Which subset of objects to download
81    ///
82    /// # Example
83    /// ```ignore
84    /// use bevy_sensor::ycb::{download_models, Subset};
85    ///
86    /// download_models("/tmp/ycb", Subset::Representative).await?;
87    /// ```
88    pub async fn download_models<P: AsRef<Path>>(
89        output_dir: P,
90        subset: Subset,
91    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
92        let options = DownloadOptions {
93            overwrite: false,
94            full: false,
95            show_progress: true,
96            delete_archives: true,
97        };
98        download_ycb(subset, output_dir.as_ref(), options).await?;
99        Ok(())
100    }
101
102    /// Download YCB models with custom options.
103    pub async fn download_models_with_options<P: AsRef<Path>>(
104        output_dir: P,
105        subset: Subset,
106        options: DownloadOptions,
107    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
108        download_ycb(subset, output_dir.as_ref(), options).await?;
109        Ok(())
110    }
111
112    /// Check if YCB models exist at the given path
113    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
114        let path = output_dir.as_ref();
115        // Check for at least one representative object
116        path.join("003_cracker_box/google_16k/textured.obj")
117            .exists()
118    }
119
120    /// Get the path to a specific YCB object's OBJ file
121    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
122        output_dir
123            .as_ref()
124            .join(object_id)
125            .join("google_16k")
126            .join("textured.obj")
127    }
128
129    /// Get the path to a specific YCB object's texture file
130    pub fn object_texture_path<P: AsRef<Path>>(
131        output_dir: P,
132        object_id: &str,
133    ) -> std::path::PathBuf {
134        output_dir
135            .as_ref()
136            .join(object_id)
137            .join("google_16k")
138            .join("texture_map.png")
139    }
140}
141
142/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
143/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
144#[derive(Clone, Debug, PartialEq)]
145pub struct ObjectRotation {
146    /// Rotation around X-axis (pitch) in degrees
147    pub pitch: f64,
148    /// Rotation around Y-axis (yaw) in degrees
149    pub yaw: f64,
150    /// Rotation around Z-axis (roll) in degrees
151    pub roll: f64,
152}
153
154impl ObjectRotation {
155    /// Create a new rotation from Euler angles in degrees
156    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
157        Self { pitch, yaw, roll }
158    }
159
160    /// Create from TBP-style array [pitch, yaw, roll] in degrees
161    pub fn from_array(arr: [f64; 3]) -> Self {
162        Self {
163            pitch: arr[0],
164            yaw: arr[1],
165            roll: arr[2],
166        }
167    }
168
169    /// Identity rotation (no rotation)
170    pub fn identity() -> Self {
171        Self::new(0.0, 0.0, 0.0)
172    }
173
174    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
175    /// Used in shorter YCB experiments to reduce computational load.
176    pub fn tbp_benchmark_rotations() -> Vec<Self> {
177        vec![
178            Self::from_array([0.0, 0.0, 0.0]),
179            Self::from_array([0.0, 90.0, 0.0]),
180            Self::from_array([0.0, 180.0, 0.0]),
181        ]
182    }
183
184    /// TBP 14 known orientations (cube faces and corners)
185    /// These are the orientations objects are learned in during training.
186    pub fn tbp_known_orientations() -> Vec<Self> {
187        vec![
188            // 6 cube faces (90° rotations around each axis)
189            Self::from_array([0.0, 0.0, 0.0]),   // Front
190            Self::from_array([0.0, 90.0, 0.0]),  // Right
191            Self::from_array([0.0, 180.0, 0.0]), // Back
192            Self::from_array([0.0, 270.0, 0.0]), // Left
193            Self::from_array([90.0, 0.0, 0.0]),  // Top
194            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
195            // 8 cube corners (45° rotations)
196            Self::from_array([45.0, 45.0, 0.0]),
197            Self::from_array([45.0, 135.0, 0.0]),
198            Self::from_array([45.0, 225.0, 0.0]),
199            Self::from_array([45.0, 315.0, 0.0]),
200            Self::from_array([-45.0, 45.0, 0.0]),
201            Self::from_array([-45.0, 135.0, 0.0]),
202            Self::from_array([-45.0, 225.0, 0.0]),
203            Self::from_array([-45.0, 315.0, 0.0]),
204        ]
205    }
206
207    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
208    pub fn to_quat(&self) -> Quat {
209        Quat::from_euler(
210            EulerRot::XYZ,
211            (self.pitch as f32).to_radians(),
212            (self.yaw as f32).to_radians(),
213            (self.roll as f32).to_radians(),
214        )
215    }
216
217    /// Convert to Bevy Transform (rotation only, no translation)
218    pub fn to_transform(&self) -> Transform {
219        Transform::from_rotation(self.to_quat())
220    }
221}
222
223impl Default for ObjectRotation {
224    fn default() -> Self {
225        Self::identity()
226    }
227}
228
229/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
230/// Uses spherical coordinates to capture objects from multiple elevations.
231#[derive(Clone, Debug)]
232pub struct ViewpointConfig {
233    /// Distance from camera to object center (meters)
234    pub radius: f32,
235    /// Number of horizontal positions (yaw angles) around the object
236    pub yaw_count: usize,
237    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
238    pub pitch_angles_deg: Vec<f32>,
239}
240
241impl Default for ViewpointConfig {
242    fn default() -> Self {
243        Self {
244            radius: 0.5,
245            yaw_count: 8,
246            // Three elevations: below (-30°), level (0°), above (+30°)
247            // This matches TBP's look_up/look_down capability
248            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
249        }
250    }
251}
252
253impl ViewpointConfig {
254    /// Total number of viewpoints this config will generate
255    pub fn viewpoint_count(&self) -> usize {
256        self.yaw_count * self.pitch_angles_deg.len()
257    }
258}
259
260/// Full sensor configuration for capture sessions
261#[derive(Clone, Debug, Resource)]
262pub struct SensorConfig {
263    /// Viewpoint configuration (camera positions)
264    pub viewpoints: ViewpointConfig,
265    /// Object rotations to capture (each rotation generates a full viewpoint set)
266    pub object_rotations: Vec<ObjectRotation>,
267    /// Output directory for captures
268    pub output_dir: String,
269    /// Filename pattern (use {view} for view index, {rot} for rotation index)
270    pub filename_pattern: String,
271}
272
273impl Default for SensorConfig {
274    fn default() -> Self {
275        Self {
276            viewpoints: ViewpointConfig::default(),
277            object_rotations: vec![ObjectRotation::identity()],
278            output_dir: ".".to_string(),
279            filename_pattern: "capture_{rot}_{view}.png".to_string(),
280        }
281    }
282}
283
284impl SensorConfig {
285    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
286    pub fn tbp_benchmark() -> Self {
287        Self {
288            viewpoints: ViewpointConfig::default(),
289            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
290            output_dir: ".".to_string(),
291            filename_pattern: "capture_{rot}_{view}.png".to_string(),
292        }
293    }
294
295    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
296    pub fn tbp_full_training() -> Self {
297        Self {
298            viewpoints: ViewpointConfig::default(),
299            object_rotations: ObjectRotation::tbp_known_orientations(),
300            output_dir: ".".to_string(),
301            filename_pattern: "capture_{rot}_{view}.png".to_string(),
302        }
303    }
304
305    /// Total number of captures this config will generate
306    pub fn total_captures(&self) -> usize {
307        self.viewpoints.viewpoint_count() * self.object_rotations.len()
308    }
309}
310
311/// Generate camera viewpoints using spherical coordinates.
312///
313/// Spherical coordinate system (matching TBP habitat sensor conventions):
314/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
315/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
316/// - Radius: distance from origin (object center)
317pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
318    let mut views = Vec::with_capacity(config.viewpoint_count());
319
320    for pitch_deg in &config.pitch_angles_deg {
321        let pitch = pitch_deg.to_radians();
322
323        for i in 0..config.yaw_count {
324            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
325
326            // Spherical to Cartesian conversion (Y-up coordinate system)
327            // x = r * cos(pitch) * sin(yaw)
328            // y = r * sin(pitch)
329            // z = r * cos(pitch) * cos(yaw)
330            let x = config.radius * pitch.cos() * yaw.sin();
331            let y = config.radius * pitch.sin();
332            let z = config.radius * pitch.cos() * yaw.cos();
333
334            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
335            views.push(transform);
336        }
337    }
338    views
339}
340
341/// Marker component for the target object being captured
342#[derive(Component)]
343pub struct CaptureTarget;
344
345/// Marker component for the capture camera
346#[derive(Component)]
347pub struct CaptureCamera;
348
349// ============================================================================
350// Headless Rendering API (NEW)
351// ============================================================================
352
353/// Configuration for headless rendering.
354///
355/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
356#[derive(Clone, Debug)]
357pub struct RenderConfig {
358    /// Image width in pixels (default: 64)
359    pub width: u32,
360    /// Image height in pixels (default: 64)
361    pub height: u32,
362    /// Zoom factor affecting field of view (default: 1.0)
363    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
364    pub zoom: f32,
365    /// Near clipping plane in meters (default: 0.01)
366    pub near_plane: f32,
367    /// Far clipping plane in meters (default: 10.0)
368    pub far_plane: f32,
369    /// Lighting configuration
370    pub lighting: LightingConfig,
371}
372
373/// Lighting configuration for rendering.
374///
375/// Controls ambient light and point lights in the scene.
376#[derive(Clone, Debug)]
377pub struct LightingConfig {
378    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
379    pub ambient_brightness: f32,
380    /// Key light intensity in lumens (default: 1500.0)
381    pub key_light_intensity: f32,
382    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
383    pub key_light_position: [f32; 3],
384    /// Fill light intensity in lumens (default: 500.0)
385    pub fill_light_intensity: f32,
386    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
387    pub fill_light_position: [f32; 3],
388    /// Enable shadows (default: false for performance)
389    pub shadows_enabled: bool,
390}
391
392impl Default for LightingConfig {
393    fn default() -> Self {
394        Self {
395            ambient_brightness: 0.3,
396            key_light_intensity: 1500.0,
397            key_light_position: [4.0, 8.0, 4.0],
398            fill_light_intensity: 500.0,
399            fill_light_position: [-4.0, 2.0, -4.0],
400            shadows_enabled: false,
401        }
402    }
403}
404
405impl LightingConfig {
406    /// Bright lighting for clear visibility
407    pub fn bright() -> Self {
408        Self {
409            ambient_brightness: 0.5,
410            key_light_intensity: 2000.0,
411            key_light_position: [4.0, 8.0, 4.0],
412            fill_light_intensity: 800.0,
413            fill_light_position: [-4.0, 2.0, -4.0],
414            shadows_enabled: false,
415        }
416    }
417
418    /// Soft lighting with minimal shadows
419    pub fn soft() -> Self {
420        Self {
421            ambient_brightness: 0.4,
422            key_light_intensity: 1000.0,
423            key_light_position: [3.0, 6.0, 3.0],
424            fill_light_intensity: 600.0,
425            fill_light_position: [-3.0, 3.0, -3.0],
426            shadows_enabled: false,
427        }
428    }
429
430    /// Unlit mode - ambient only, no point lights
431    pub fn unlit() -> Self {
432        Self {
433            ambient_brightness: 1.0,
434            key_light_intensity: 0.0,
435            key_light_position: [0.0, 0.0, 0.0],
436            fill_light_intensity: 0.0,
437            fill_light_position: [0.0, 0.0, 0.0],
438            shadows_enabled: false,
439        }
440    }
441}
442
443impl Default for RenderConfig {
444    fn default() -> Self {
445        Self::tbp_default()
446    }
447}
448
449impl RenderConfig {
450    /// TBP-compatible 64x64 RGBD sensor configuration.
451    ///
452    /// This matches the default resolution used in TBP's habitat sensor.
453    pub fn tbp_default() -> Self {
454        Self {
455            width: 64,
456            height: 64,
457            zoom: 1.0,
458            near_plane: 0.01,
459            far_plane: 10.0,
460            lighting: LightingConfig::default(),
461        }
462    }
463
464    /// Higher resolution configuration for debugging and visualization.
465    pub fn preview() -> Self {
466        Self {
467            width: 256,
468            height: 256,
469            zoom: 1.0,
470            near_plane: 0.01,
471            far_plane: 10.0,
472            lighting: LightingConfig::default(),
473        }
474    }
475
476    /// High resolution configuration for detailed captures.
477    pub fn high_res() -> Self {
478        Self {
479            width: 512,
480            height: 512,
481            zoom: 1.0,
482            near_plane: 0.01,
483            far_plane: 10.0,
484            lighting: LightingConfig::default(),
485        }
486    }
487
488    /// Calculate vertical field of view in radians based on zoom.
489    ///
490    /// Base FOV is 60 degrees, adjusted by zoom factor.
491    pub fn fov_radians(&self) -> f32 {
492        let base_fov_deg = 60.0_f32;
493        (base_fov_deg / self.zoom).to_radians()
494    }
495
496    /// Compute camera intrinsics for use with neocortx.
497    ///
498    /// Returns focal length and principal point based on resolution and FOV.
499    /// Uses f64 for TBP numerical precision compatibility.
500    pub fn intrinsics(&self) -> CameraIntrinsics {
501        let fov = self.fov_radians() as f64;
502        // focal_length = (height/2) / tan(fov/2)
503        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
504        let fx = fy; // Assuming square pixels
505
506        CameraIntrinsics {
507            focal_length: [fx, fy],
508            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
509            image_size: [self.width, self.height],
510        }
511    }
512}
513
514/// Camera intrinsic parameters for 3D reconstruction.
515///
516/// Compatible with neocortx's VisionIntrinsics format.
517/// Uses f64 for TBP numerical precision compatibility.
518#[derive(Clone, Debug, PartialEq)]
519pub struct CameraIntrinsics {
520    /// Focal length in pixels (fx, fy)
521    pub focal_length: [f64; 2],
522    /// Principal point (cx, cy) - typically image center
523    pub principal_point: [f64; 2],
524    /// Image dimensions (width, height)
525    pub image_size: [u32; 2],
526}
527
528impl CameraIntrinsics {
529    /// Project a 3D point to 2D pixel coordinates.
530    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
531        if point.z <= 0.0 {
532            return None;
533        }
534        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
535        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
536        Some([x, y])
537    }
538
539    /// Unproject a 2D pixel to a 3D point at given depth.
540    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
541        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
542        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
543        [x, y, depth]
544    }
545}
546
547/// Output from headless rendering containing RGBA and depth data.
548#[derive(Clone, Debug)]
549pub struct RenderOutput {
550    /// RGBA pixel data in row-major order (width * height * 4 bytes)
551    pub rgba: Vec<u8>,
552    /// Depth values in meters, row-major order (width * height f64s)
553    /// Values are linear depth from camera, not normalized.
554    /// Uses f64 for TBP numerical precision compatibility.
555    pub depth: Vec<f64>,
556    /// Image width in pixels
557    pub width: u32,
558    /// Image height in pixels
559    pub height: u32,
560    /// Camera intrinsics used for this render
561    pub intrinsics: CameraIntrinsics,
562    /// Camera transform (world position and orientation)
563    pub camera_transform: Transform,
564    /// Object rotation applied during render
565    pub object_rotation: ObjectRotation,
566}
567
568impl RenderOutput {
569    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
570    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
571        if x >= self.width || y >= self.height {
572            return None;
573        }
574        let idx = ((y * self.width + x) * 4) as usize;
575        Some([
576            self.rgba[idx],
577            self.rgba[idx + 1],
578            self.rgba[idx + 2],
579            self.rgba[idx + 3],
580        ])
581    }
582
583    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
584    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
585        if x >= self.width || y >= self.height {
586            return None;
587        }
588        let idx = (y * self.width + x) as usize;
589        Some(self.depth[idx])
590    }
591
592    /// Get RGB pixel (without alpha) at (x, y).
593    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
594        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
595    }
596
597    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
598    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
599        let mut image = Vec::with_capacity(self.height as usize);
600        for y in 0..self.height {
601            let mut row = Vec::with_capacity(self.width as usize);
602            for x in 0..self.width {
603                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
604            }
605            image.push(row);
606        }
607        image
608    }
609
610    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
611    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
612        let mut image = Vec::with_capacity(self.height as usize);
613        for y in 0..self.height {
614            let mut row = Vec::with_capacity(self.width as usize);
615            for x in 0..self.width {
616                row.push(self.get_depth(x, y).unwrap_or(0.0));
617            }
618            image.push(row);
619        }
620        image
621    }
622}
623
624/// Errors that can occur during rendering.
625#[derive(Debug, Clone)]
626pub enum RenderError {
627    /// Object mesh file not found
628    MeshNotFound(String),
629    /// Object texture file not found
630    TextureNotFound(String),
631    /// Bevy rendering failed
632    RenderFailed(String),
633    /// Invalid configuration
634    InvalidConfig(String),
635}
636
637impl std::fmt::Display for RenderError {
638    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
639        match self {
640            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
641            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
642            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
643            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
644        }
645    }
646}
647
648impl std::error::Error for RenderError {}
649
650/// Render a YCB object to an in-memory buffer.
651///
652/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
653/// renders a single frame, extracts the RGBA and depth data, and shuts down.
654///
655/// # Arguments
656/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
657/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
658/// * `object_rotation` - Rotation to apply to the object
659/// * `config` - Render configuration (resolution, depth range, etc.)
660///
661/// # Example
662/// ```ignore
663/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
664/// use std::path::Path;
665///
666/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
667/// let output = render_to_buffer(
668///     Path::new("/tmp/ycb/003_cracker_box"),
669///     &viewpoints[0],
670///     &ObjectRotation::identity(),
671///     &RenderConfig::tbp_default(),
672/// )?;
673/// ```
674pub fn render_to_buffer(
675    object_dir: &Path,
676    camera_transform: &Transform,
677    object_rotation: &ObjectRotation,
678    config: &RenderConfig,
679) -> Result<RenderOutput, RenderError> {
680    // Use the actual Bevy headless renderer
681    render::render_headless(object_dir, camera_transform, object_rotation, config)
682}
683
684/// Render all viewpoints and rotations for a YCB object.
685///
686/// Convenience function that renders all combinations of viewpoints and rotations.
687///
688/// # Arguments
689/// * `object_dir` - Path to YCB object directory
690/// * `viewpoint_config` - Viewpoint configuration (camera positions)
691/// * `rotations` - Object rotations to render
692/// * `render_config` - Render configuration
693///
694/// # Returns
695/// Vector of RenderOutput, one per viewpoint × rotation combination.
696pub fn render_all_viewpoints(
697    object_dir: &Path,
698    viewpoint_config: &ViewpointConfig,
699    rotations: &[ObjectRotation],
700    render_config: &RenderConfig,
701) -> Result<Vec<RenderOutput>, RenderError> {
702    let viewpoints = generate_viewpoints(viewpoint_config);
703    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
704
705    for rotation in rotations {
706        for viewpoint in &viewpoints {
707            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
708            outputs.push(output);
709        }
710    }
711
712    Ok(outputs)
713}
714
715/// Render directly to files (for subprocess mode).
716///
717/// This function is designed for subprocess rendering where the process will exit
718/// after rendering. It saves RGBA and depth data directly to the specified files
719/// before the process terminates.
720///
721/// # Arguments
722/// * `object_dir` - Path to YCB object directory
723/// * `camera_transform` - Camera position and orientation
724/// * `object_rotation` - Rotation to apply to the object
725/// * `config` - Render configuration
726/// * `rgba_path` - Output path for RGBA PNG
727/// * `depth_path` - Output path for depth data (raw f32 bytes)
728///
729/// # Note
730/// This function may call `std::process::exit(0)` and not return.
731pub fn render_to_files(
732    object_dir: &Path,
733    camera_transform: &Transform,
734    object_rotation: &ObjectRotation,
735    config: &RenderConfig,
736    rgba_path: &Path,
737    depth_path: &Path,
738) -> Result<(), RenderError> {
739    render::render_to_files(
740        object_dir,
741        camera_transform,
742        object_rotation,
743        config,
744        rgba_path,
745        depth_path,
746    )
747}
748
749// Re-export batch types for convenient API access
750pub use batch::{
751    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
752    BatchState, RenderStatus,
753};
754
755/// Create a new batch renderer for efficient multi-viewpoint rendering.
756///
757/// This creates a persistent Bevy app that can render multiple viewpoints without
758/// subprocess spawning overhead. Achieves 10-100x speedup vs individual render_to_buffer calls.
759///
760/// # Arguments
761/// * `config` - Batch rendering configuration
762///
763/// # Returns
764/// A BatchRenderer instance ready to queue render requests
765///
766/// # Example
767/// ```ignore
768/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
769///
770/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
771/// ```
772pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
773    // For now, just create an empty renderer that will need a Bevy app
774    // The actual app creation happens when rendering starts
775    Ok(BatchRenderer::new(config.clone()))
776}
777
778/// Queue a render request for batch processing.
779///
780/// Adds a render request to the batch queue. Requests are processed in order
781/// when you call render_next_in_batch().
782///
783/// # Arguments
784/// * `renderer` - The batch renderer instance
785/// * `request` - The render request
786///
787/// # Returns
788/// Ok if queued successfully, Err if queue is full
789///
790/// # Example
791/// ```ignore
792/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
793/// use std::path::PathBuf;
794///
795/// queue_render_request(&mut renderer, BatchRenderRequest {
796///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
797///     viewpoint: camera_transform,
798///     object_rotation: ObjectRotation::identity(),
799///     render_config: RenderConfig::tbp_default(),
800/// })?;
801/// ```
802pub fn queue_render_request(
803    renderer: &mut BatchRenderer,
804    request: BatchRenderRequest,
805) -> Result<(), RenderError> {
806    renderer
807        .queue_request(request)
808        .map_err(|e| RenderError::RenderFailed(e.to_string()))
809}
810
811/// Process and execute the next render in the batch queue.
812///
813/// Executes a single render from the queued requests. Returns None when the queue is empty.
814/// Use this in a loop to process all queued renders.
815///
816/// # Arguments
817/// * `renderer` - The batch renderer instance
818/// * `timeout_ms` - Timeout in milliseconds for this render
819///
820/// # Returns
821/// Some(output) if a render completed, None if queue is empty
822///
823/// # Example
824/// ```ignore
825/// loop {
826///     match render_next_in_batch(&mut renderer, 500)? {
827///         Some(output) => println!("Render complete: {:?}", output.status),
828///         None => break, // All renders done
829///     }
830/// }
831/// ```
832pub fn render_next_in_batch(
833    renderer: &mut BatchRenderer,
834    _timeout_ms: u32,
835) -> Result<Option<BatchRenderOutput>, RenderError> {
836    // This is a stub - the actual implementation will require a running Bevy app
837    // For now, just render single batches immediately using render_to_buffer
838    if let Some(request) = renderer.pending_requests.pop_front() {
839        let output = render_to_buffer(
840            &request.object_dir,
841            &request.viewpoint,
842            &request.object_rotation,
843            &request.render_config,
844        )?;
845        let batch_output = BatchRenderOutput::from_render_output(request, output);
846        renderer.completed_results.push(batch_output.clone());
847        renderer.renders_processed += 1;
848        Ok(Some(batch_output))
849    } else {
850        Ok(None)
851    }
852}
853
854/// Render multiple requests in batch (convenience function).
855///
856/// Queues all requests and executes them in batch, returning all results.
857/// Simpler than manage queue + loop for one-off batches.
858///
859/// # Arguments
860/// * `requests` - Vector of render requests
861/// * `config` - Batch rendering configuration
862///
863/// # Returns
864/// Vector of BatchRenderOutput results in same order as input
865///
866/// # Example
867/// ```ignore
868/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
869///
870/// let results = render_batch(requests, &BatchRenderConfig::default())?;
871/// ```
872pub fn render_batch(
873    requests: Vec<BatchRenderRequest>,
874    config: &BatchRenderConfig,
875) -> Result<Vec<BatchRenderOutput>, RenderError> {
876    let mut renderer = create_batch_renderer(config)?;
877
878    // Queue all requests
879    for request in requests {
880        queue_render_request(&mut renderer, request)?;
881    }
882
883    // Execute all and collect results
884    let mut results = Vec::new();
885    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
886        results.push(output);
887    }
888
889    Ok(results)
890}
891
892// Re-export bevy types that consumers will need
893pub use bevy::prelude::{Quat, Transform, Vec3};
894
895#[cfg(test)]
896mod tests {
897    use super::*;
898
899    #[test]
900    fn test_object_rotation_identity() {
901        let rot = ObjectRotation::identity();
902        assert_eq!(rot.pitch, 0.0);
903        assert_eq!(rot.yaw, 0.0);
904        assert_eq!(rot.roll, 0.0);
905    }
906
907    #[test]
908    fn test_object_rotation_from_array() {
909        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
910        assert_eq!(rot.pitch, 10.0);
911        assert_eq!(rot.yaw, 20.0);
912        assert_eq!(rot.roll, 30.0);
913    }
914
915    #[test]
916    fn test_tbp_benchmark_rotations() {
917        let rotations = ObjectRotation::tbp_benchmark_rotations();
918        assert_eq!(rotations.len(), 3);
919        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
920        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
921        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
922    }
923
924    #[test]
925    fn test_tbp_known_orientations_count() {
926        let orientations = ObjectRotation::tbp_known_orientations();
927        assert_eq!(orientations.len(), 14);
928    }
929
930    #[test]
931    fn test_rotation_to_quat() {
932        let rot = ObjectRotation::identity();
933        let quat = rot.to_quat();
934        // Identity quaternion should be approximately (1, 0, 0, 0)
935        assert!((quat.w - 1.0).abs() < 0.001);
936        assert!(quat.x.abs() < 0.001);
937        assert!(quat.y.abs() < 0.001);
938        assert!(quat.z.abs() < 0.001);
939    }
940
941    #[test]
942    fn test_rotation_90_yaw() {
943        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
944        let quat = rot.to_quat();
945        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
946        assert!((quat.w - 0.707).abs() < 0.01);
947        assert!((quat.y - 0.707).abs() < 0.01);
948    }
949
950    #[test]
951    fn test_viewpoint_config_default() {
952        let config = ViewpointConfig::default();
953        assert_eq!(config.radius, 0.5);
954        assert_eq!(config.yaw_count, 8);
955        assert_eq!(config.pitch_angles_deg.len(), 3);
956    }
957
958    #[test]
959    fn test_viewpoint_count() {
960        let config = ViewpointConfig::default();
961        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
962    }
963
964    #[test]
965    fn test_generate_viewpoints_count() {
966        let config = ViewpointConfig::default();
967        let viewpoints = generate_viewpoints(&config);
968        assert_eq!(viewpoints.len(), 24);
969    }
970
971    #[test]
972    fn test_viewpoints_spherical_radius() {
973        let config = ViewpointConfig::default();
974        let viewpoints = generate_viewpoints(&config);
975
976        for (i, transform) in viewpoints.iter().enumerate() {
977            let actual_radius = transform.translation.length();
978            assert!(
979                (actual_radius - config.radius).abs() < 0.001,
980                "Viewpoint {} has incorrect radius: {} (expected {})",
981                i,
982                actual_radius,
983                config.radius
984            );
985        }
986    }
987
988    #[test]
989    fn test_viewpoints_looking_at_origin() {
990        let config = ViewpointConfig::default();
991        let viewpoints = generate_viewpoints(&config);
992
993        for (i, transform) in viewpoints.iter().enumerate() {
994            let forward = transform.forward();
995            let to_origin = (Vec3::ZERO - transform.translation).normalize();
996            let dot = forward.dot(to_origin);
997            assert!(
998                dot > 0.99,
999                "Viewpoint {} not looking at origin, dot product: {}",
1000                i,
1001                dot
1002            );
1003        }
1004    }
1005
1006    #[test]
1007    fn test_sensor_config_default() {
1008        let config = SensorConfig::default();
1009        assert_eq!(config.object_rotations.len(), 1);
1010        assert_eq!(config.total_captures(), 24);
1011    }
1012
1013    #[test]
1014    fn test_sensor_config_tbp_benchmark() {
1015        let config = SensorConfig::tbp_benchmark();
1016        assert_eq!(config.object_rotations.len(), 3);
1017        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1018    }
1019
1020    #[test]
1021    fn test_sensor_config_tbp_full() {
1022        let config = SensorConfig::tbp_full_training();
1023        assert_eq!(config.object_rotations.len(), 14);
1024        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1025    }
1026
1027    #[test]
1028    fn test_ycb_representative_objects() {
1029        // Verify representative objects are defined
1030        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1031        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1032    }
1033
1034    #[test]
1035    fn test_ycb_ten_objects() {
1036        // Verify ten objects subset is defined
1037        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1038    }
1039
1040    #[test]
1041    fn test_ycb_object_mesh_path() {
1042        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1043        assert_eq!(
1044            path.to_string_lossy(),
1045            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
1046        );
1047    }
1048
1049    #[test]
1050    fn test_ycb_object_texture_path() {
1051        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1052        assert_eq!(
1053            path.to_string_lossy(),
1054            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
1055        );
1056    }
1057
1058    // =========================================================================
1059    // Headless Rendering API Tests
1060    // =========================================================================
1061
1062    #[test]
1063    fn test_render_config_tbp_default() {
1064        let config = RenderConfig::tbp_default();
1065        assert_eq!(config.width, 64);
1066        assert_eq!(config.height, 64);
1067        assert_eq!(config.zoom, 1.0);
1068        assert_eq!(config.near_plane, 0.01);
1069        assert_eq!(config.far_plane, 10.0);
1070    }
1071
1072    #[test]
1073    fn test_render_config_preview() {
1074        let config = RenderConfig::preview();
1075        assert_eq!(config.width, 256);
1076        assert_eq!(config.height, 256);
1077    }
1078
1079    #[test]
1080    fn test_render_config_default_is_tbp() {
1081        let default = RenderConfig::default();
1082        let tbp = RenderConfig::tbp_default();
1083        assert_eq!(default.width, tbp.width);
1084        assert_eq!(default.height, tbp.height);
1085    }
1086
1087    #[test]
1088    fn test_render_config_fov() {
1089        let config = RenderConfig::tbp_default();
1090        let fov = config.fov_radians();
1091        // Base FOV is 60 degrees = ~1.047 radians
1092        assert!((fov - 1.047).abs() < 0.01);
1093
1094        // Zoom in should reduce FOV
1095        let zoomed = RenderConfig {
1096            zoom: 2.0,
1097            ..config
1098        };
1099        assert!(zoomed.fov_radians() < fov);
1100    }
1101
1102    #[test]
1103    fn test_render_config_intrinsics() {
1104        let config = RenderConfig::tbp_default();
1105        let intrinsics = config.intrinsics();
1106
1107        assert_eq!(intrinsics.image_size, [64, 64]);
1108        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1109        // Focal length should be positive and reasonable
1110        assert!(intrinsics.focal_length[0] > 0.0);
1111        assert!(intrinsics.focal_length[1] > 0.0);
1112        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1113        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1114    }
1115
1116    #[test]
1117    fn test_camera_intrinsics_project() {
1118        let intrinsics = CameraIntrinsics {
1119            focal_length: [100.0, 100.0],
1120            principal_point: [32.0, 32.0],
1121            image_size: [64, 64],
1122        };
1123
1124        // Point at origin of camera frame projects to principal point
1125        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1126        assert!(center.is_some());
1127        let [x, y] = center.unwrap();
1128        assert!((x - 32.0).abs() < 0.001);
1129        assert!((y - 32.0).abs() < 0.001);
1130
1131        // Point behind camera returns None
1132        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1133        assert!(behind.is_none());
1134    }
1135
1136    #[test]
1137    fn test_camera_intrinsics_unproject() {
1138        let intrinsics = CameraIntrinsics {
1139            focal_length: [100.0, 100.0],
1140            principal_point: [32.0, 32.0],
1141            image_size: [64, 64],
1142        };
1143
1144        // Unproject principal point at depth 1.0
1145        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1146        assert!((point[0]).abs() < 0.001); // x
1147        assert!((point[1]).abs() < 0.001); // y
1148        assert!((point[2] - 1.0).abs() < 0.001); // z
1149    }
1150
1151    #[test]
1152    fn test_render_output_get_rgba() {
1153        let output = RenderOutput {
1154            rgba: vec![
1155                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1156            ],
1157            depth: vec![1.0, 2.0, 3.0, 4.0],
1158            width: 2,
1159            height: 2,
1160            intrinsics: RenderConfig::tbp_default().intrinsics(),
1161            camera_transform: Transform::IDENTITY,
1162            object_rotation: ObjectRotation::identity(),
1163        };
1164
1165        // Top-left: red
1166        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1167        // Top-right: green
1168        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1169        // Bottom-left: blue
1170        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1171        // Bottom-right: white
1172        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1173        // Out of bounds
1174        assert_eq!(output.get_rgba(2, 0), None);
1175    }
1176
1177    #[test]
1178    fn test_render_output_get_depth() {
1179        let output = RenderOutput {
1180            rgba: vec![0u8; 16],
1181            depth: vec![1.0, 2.0, 3.0, 4.0],
1182            width: 2,
1183            height: 2,
1184            intrinsics: RenderConfig::tbp_default().intrinsics(),
1185            camera_transform: Transform::IDENTITY,
1186            object_rotation: ObjectRotation::identity(),
1187        };
1188
1189        assert_eq!(output.get_depth(0, 0), Some(1.0));
1190        assert_eq!(output.get_depth(1, 0), Some(2.0));
1191        assert_eq!(output.get_depth(0, 1), Some(3.0));
1192        assert_eq!(output.get_depth(1, 1), Some(4.0));
1193        assert_eq!(output.get_depth(2, 0), None);
1194    }
1195
1196    #[test]
1197    fn test_render_output_to_rgb_image() {
1198        let output = RenderOutput {
1199            rgba: vec![
1200                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1201            ],
1202            depth: vec![1.0, 2.0, 3.0, 4.0],
1203            width: 2,
1204            height: 2,
1205            intrinsics: RenderConfig::tbp_default().intrinsics(),
1206            camera_transform: Transform::IDENTITY,
1207            object_rotation: ObjectRotation::identity(),
1208        };
1209
1210        let image = output.to_rgb_image();
1211        assert_eq!(image.len(), 2); // 2 rows
1212        assert_eq!(image[0].len(), 2); // 2 columns
1213        assert_eq!(image[0][0], [255, 0, 0]); // Red
1214        assert_eq!(image[0][1], [0, 255, 0]); // Green
1215        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1216        assert_eq!(image[1][1], [255, 255, 255]); // White
1217    }
1218
1219    #[test]
1220    fn test_render_output_to_depth_image() {
1221        let output = RenderOutput {
1222            rgba: vec![0u8; 16],
1223            depth: vec![1.0, 2.0, 3.0, 4.0],
1224            width: 2,
1225            height: 2,
1226            intrinsics: RenderConfig::tbp_default().intrinsics(),
1227            camera_transform: Transform::IDENTITY,
1228            object_rotation: ObjectRotation::identity(),
1229        };
1230
1231        let depth_image = output.to_depth_image();
1232        assert_eq!(depth_image.len(), 2);
1233        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1234        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1235    }
1236
1237    #[test]
1238    fn test_render_error_display() {
1239        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1240        assert!(err.to_string().contains("Mesh not found"));
1241        assert!(err.to_string().contains("/path/to/mesh.obj"));
1242    }
1243
1244    // =========================================================================
1245    // Edge Case Tests
1246    // =========================================================================
1247
1248    #[test]
1249    fn test_object_rotation_extreme_angles() {
1250        // Test angles beyond 360 degrees
1251        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1252        let quat = rot.to_quat();
1253        // Quaternion should still be valid (normalized)
1254        assert!((quat.length() - 1.0).abs() < 0.001);
1255    }
1256
1257    #[test]
1258    fn test_object_rotation_to_transform() {
1259        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1260        let transform = rot.to_transform();
1261        // Transform should have no translation
1262        assert_eq!(transform.translation, Vec3::ZERO);
1263        // Should have rotation
1264        assert!(transform.rotation != Quat::IDENTITY);
1265    }
1266
1267    #[test]
1268    fn test_viewpoint_config_single_viewpoint() {
1269        let config = ViewpointConfig {
1270            radius: 1.0,
1271            yaw_count: 1,
1272            pitch_angles_deg: vec![0.0],
1273        };
1274        assert_eq!(config.viewpoint_count(), 1);
1275        let viewpoints = generate_viewpoints(&config);
1276        assert_eq!(viewpoints.len(), 1);
1277        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1278        let pos = viewpoints[0].translation;
1279        assert!((pos.x).abs() < 0.001);
1280        assert!((pos.y).abs() < 0.001);
1281        assert!((pos.z - 1.0).abs() < 0.001);
1282    }
1283
1284    #[test]
1285    fn test_viewpoint_radius_scaling() {
1286        let config1 = ViewpointConfig {
1287            radius: 0.5,
1288            yaw_count: 4,
1289            pitch_angles_deg: vec![0.0],
1290        };
1291        let config2 = ViewpointConfig {
1292            radius: 2.0,
1293            yaw_count: 4,
1294            pitch_angles_deg: vec![0.0],
1295        };
1296
1297        let v1 = generate_viewpoints(&config1);
1298        let v2 = generate_viewpoints(&config2);
1299
1300        // Viewpoints should scale proportionally
1301        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1302            let ratio = vp2.translation.length() / vp1.translation.length();
1303            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1304        }
1305    }
1306
1307    #[test]
1308    fn test_camera_intrinsics_project_at_z_zero() {
1309        let intrinsics = CameraIntrinsics {
1310            focal_length: [100.0, 100.0],
1311            principal_point: [32.0, 32.0],
1312            image_size: [64, 64],
1313        };
1314
1315        // Point at z=0 should return None (division by zero protection)
1316        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1317        assert!(result.is_none());
1318    }
1319
1320    #[test]
1321    fn test_camera_intrinsics_roundtrip() {
1322        let intrinsics = CameraIntrinsics {
1323            focal_length: [100.0, 100.0],
1324            principal_point: [32.0, 32.0],
1325            image_size: [64, 64],
1326        };
1327
1328        // Project a 3D point
1329        let original = Vec3::new(0.5, -0.3, 2.0);
1330        let projected = intrinsics.project(original).unwrap();
1331
1332        // Unproject back with the same depth (convert f32 to f64)
1333        let unprojected = intrinsics.unproject(projected, original.z as f64);
1334
1335        // Should get back approximately the same point
1336        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1337        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1338        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1339    }
1340
1341    #[test]
1342    fn test_render_output_empty() {
1343        let output = RenderOutput {
1344            rgba: vec![],
1345            depth: vec![],
1346            width: 0,
1347            height: 0,
1348            intrinsics: RenderConfig::tbp_default().intrinsics(),
1349            camera_transform: Transform::IDENTITY,
1350            object_rotation: ObjectRotation::identity(),
1351        };
1352
1353        // Should handle empty gracefully
1354        assert_eq!(output.get_rgba(0, 0), None);
1355        assert_eq!(output.get_depth(0, 0), None);
1356        assert!(output.to_rgb_image().is_empty());
1357        assert!(output.to_depth_image().is_empty());
1358    }
1359
1360    #[test]
1361    fn test_render_output_1x1() {
1362        let output = RenderOutput {
1363            rgba: vec![128, 64, 32, 255],
1364            depth: vec![0.5],
1365            width: 1,
1366            height: 1,
1367            intrinsics: RenderConfig::tbp_default().intrinsics(),
1368            camera_transform: Transform::IDENTITY,
1369            object_rotation: ObjectRotation::identity(),
1370        };
1371
1372        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1373        assert_eq!(output.get_depth(0, 0), Some(0.5));
1374        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1375
1376        let rgb_img = output.to_rgb_image();
1377        assert_eq!(rgb_img.len(), 1);
1378        assert_eq!(rgb_img[0].len(), 1);
1379        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1380    }
1381
1382    #[test]
1383    fn test_render_config_high_res() {
1384        let config = RenderConfig::high_res();
1385        assert_eq!(config.width, 512);
1386        assert_eq!(config.height, 512);
1387
1388        let intrinsics = config.intrinsics();
1389        assert_eq!(intrinsics.image_size, [512, 512]);
1390        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1391    }
1392
1393    #[test]
1394    fn test_render_config_zoom_affects_fov() {
1395        let base = RenderConfig::tbp_default();
1396        let zoomed = RenderConfig {
1397            zoom: 2.0,
1398            ..base.clone()
1399        };
1400
1401        // Higher zoom = lower FOV
1402        assert!(zoomed.fov_radians() < base.fov_radians());
1403        // Specifically, 2x zoom = half FOV
1404        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1405    }
1406
1407    #[test]
1408    fn test_render_config_zoom_affects_intrinsics() {
1409        let base = RenderConfig::tbp_default();
1410        let zoomed = RenderConfig {
1411            zoom: 2.0,
1412            ..base.clone()
1413        };
1414
1415        // Higher zoom = higher focal length
1416        let base_intrinsics = base.intrinsics();
1417        let zoomed_intrinsics = zoomed.intrinsics();
1418
1419        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1420    }
1421
1422    #[test]
1423    fn test_lighting_config_variants() {
1424        let default = LightingConfig::default();
1425        let bright = LightingConfig::bright();
1426        let soft = LightingConfig::soft();
1427        let unlit = LightingConfig::unlit();
1428
1429        // Bright should have higher intensity than default
1430        assert!(bright.key_light_intensity > default.key_light_intensity);
1431
1432        // Unlit should have no point lights
1433        assert_eq!(unlit.key_light_intensity, 0.0);
1434        assert_eq!(unlit.fill_light_intensity, 0.0);
1435        assert_eq!(unlit.ambient_brightness, 1.0);
1436
1437        // Soft should have lower intensity
1438        assert!(soft.key_light_intensity < default.key_light_intensity);
1439    }
1440
1441    #[test]
1442    fn test_all_render_error_variants() {
1443        let errors = vec![
1444            RenderError::MeshNotFound("mesh.obj".to_string()),
1445            RenderError::TextureNotFound("texture.png".to_string()),
1446            RenderError::RenderFailed("GPU error".to_string()),
1447            RenderError::InvalidConfig("bad config".to_string()),
1448        ];
1449
1450        for err in errors {
1451            // All variants should have Display impl
1452            let msg = err.to_string();
1453            assert!(!msg.is_empty());
1454        }
1455    }
1456
1457    #[test]
1458    fn test_tbp_known_orientations_unique() {
1459        let orientations = ObjectRotation::tbp_known_orientations();
1460
1461        // All 14 orientations should produce unique quaternions
1462        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1463
1464        for (i, q1) in quats.iter().enumerate() {
1465            for (j, q2) in quats.iter().enumerate() {
1466                if i != j {
1467                    // Quaternions should be different (accounting for q == -q equivalence)
1468                    let dot = q1.dot(*q2).abs();
1469                    assert!(
1470                        dot < 0.999,
1471                        "Orientations {} and {} produce same quaternion",
1472                        i,
1473                        j
1474                    );
1475                }
1476            }
1477        }
1478    }
1479}