bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// WebGPU and cross-platform backend support
65pub mod backend;
66
67// Model caching system for efficient multi-viewpoint rendering
68pub mod cache;
69
70// Test fixtures for pre-rendered images (CI/CD support)
71pub mod fixtures;
72
73// Re-export ycbust types for convenience
74pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
75
76/// YCB dataset utilities
77pub mod ycb {
78    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
79
80    use std::path::Path;
81
82    /// Download YCB models to the specified directory.
83    ///
84    /// # Arguments
85    /// * `output_dir` - Directory to download models to
86    /// * `subset` - Which subset of objects to download
87    ///
88    /// # Example
89    /// ```ignore
90    /// use bevy_sensor::ycb::{download_models, Subset};
91    ///
92    /// download_models("/tmp/ycb", Subset::Representative).await?;
93    /// ```
94    pub async fn download_models<P: AsRef<Path>>(
95        output_dir: P,
96        subset: Subset,
97    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
98        let options = DownloadOptions {
99            overwrite: false,
100            full: false,
101            show_progress: true,
102            delete_archives: true,
103        };
104        download_ycb(subset, output_dir.as_ref(), options).await?;
105        Ok(())
106    }
107
108    /// Download YCB models with custom options.
109    pub async fn download_models_with_options<P: AsRef<Path>>(
110        output_dir: P,
111        subset: Subset,
112        options: DownloadOptions,
113    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
114        download_ycb(subset, output_dir.as_ref(), options).await?;
115        Ok(())
116    }
117
118    /// Check if YCB models exist at the given path
119    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
120        let path = output_dir.as_ref();
121        // Check for at least one representative object
122        path.join("003_cracker_box/google_16k/textured.obj")
123            .exists()
124    }
125
126    /// Get the path to a specific YCB object's OBJ file
127    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
128        output_dir
129            .as_ref()
130            .join(object_id)
131            .join("google_16k")
132            .join("textured.obj")
133    }
134
135    /// Get the path to a specific YCB object's texture file
136    pub fn object_texture_path<P: AsRef<Path>>(
137        output_dir: P,
138        object_id: &str,
139    ) -> std::path::PathBuf {
140        output_dir
141            .as_ref()
142            .join(object_id)
143            .join("google_16k")
144            .join("texture_map.png")
145    }
146}
147
148/// Initialize bevy-sensor rendering backend configuration.
149///
150/// **IMPORTANT**: Call this function ONCE at the start of your application,
151/// before any rendering operations, especially when using bevy-sensor as a library.
152///
153/// This ensures proper backend selection (WebGPU for WSL2, Vulkan for Linux, etc.)
154/// and is critical for GPU rendering on WSL2 environments.
155///
156/// # Why This Matters
157///
158/// The WGPU rendering backend caches its backend selection early during initialization.
159/// When bevy-sensor is used as a library, environment variables must be set BEFORE
160/// any GPU rendering code runs. This function does that automatically.
161///
162/// # Example
163///
164/// ```ignore
165/// use bevy_sensor;
166///
167/// fn main() {
168///     // Initialize FIRST, before any rendering
169///     bevy_sensor::initialize();
170///
171///     // Now use the rendering API
172///     let output = bevy_sensor::render_to_buffer(
173///         object_dir, &viewpoint, &rotation, &config
174///     )?;
175/// }
176/// ```
177///
178/// # Calling Multiple Times
179///
180/// Safe to call multiple times - subsequent calls are no-ops after the first call.
181pub fn initialize() {
182    // Use a OnceCell equivalent to ensure this only runs once
183    use std::sync::atomic::{AtomicBool, Ordering};
184    static INITIALIZED: AtomicBool = AtomicBool::new(false);
185
186    if !INITIALIZED.swap(true, Ordering::SeqCst) {
187        // First call - initialize backend
188        let config = backend::BackendConfig::new();
189        config.apply_env();
190    }
191}
192
193/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
194/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
195#[derive(Clone, Debug, PartialEq)]
196pub struct ObjectRotation {
197    /// Rotation around X-axis (pitch) in degrees
198    pub pitch: f64,
199    /// Rotation around Y-axis (yaw) in degrees
200    pub yaw: f64,
201    /// Rotation around Z-axis (roll) in degrees
202    pub roll: f64,
203}
204
205impl ObjectRotation {
206    /// Create a new rotation from Euler angles in degrees
207    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
208        Self { pitch, yaw, roll }
209    }
210
211    /// Create from TBP-style array [pitch, yaw, roll] in degrees
212    pub fn from_array(arr: [f64; 3]) -> Self {
213        Self {
214            pitch: arr[0],
215            yaw: arr[1],
216            roll: arr[2],
217        }
218    }
219
220    /// Identity rotation (no rotation)
221    pub fn identity() -> Self {
222        Self::new(0.0, 0.0, 0.0)
223    }
224
225    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
226    /// Used in shorter YCB experiments to reduce computational load.
227    pub fn tbp_benchmark_rotations() -> Vec<Self> {
228        vec![
229            Self::from_array([0.0, 0.0, 0.0]),
230            Self::from_array([0.0, 90.0, 0.0]),
231            Self::from_array([0.0, 180.0, 0.0]),
232        ]
233    }
234
235    /// TBP 14 known orientations (cube faces and corners)
236    /// These are the orientations objects are learned in during training.
237    pub fn tbp_known_orientations() -> Vec<Self> {
238        vec![
239            // 6 cube faces (90° rotations around each axis)
240            Self::from_array([0.0, 0.0, 0.0]),   // Front
241            Self::from_array([0.0, 90.0, 0.0]),  // Right
242            Self::from_array([0.0, 180.0, 0.0]), // Back
243            Self::from_array([0.0, 270.0, 0.0]), // Left
244            Self::from_array([90.0, 0.0, 0.0]),  // Top
245            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
246            // 8 cube corners (45° rotations)
247            Self::from_array([45.0, 45.0, 0.0]),
248            Self::from_array([45.0, 135.0, 0.0]),
249            Self::from_array([45.0, 225.0, 0.0]),
250            Self::from_array([45.0, 315.0, 0.0]),
251            Self::from_array([-45.0, 45.0, 0.0]),
252            Self::from_array([-45.0, 135.0, 0.0]),
253            Self::from_array([-45.0, 225.0, 0.0]),
254            Self::from_array([-45.0, 315.0, 0.0]),
255        ]
256    }
257
258    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
259    pub fn to_quat(&self) -> Quat {
260        Quat::from_euler(
261            EulerRot::XYZ,
262            (self.pitch as f32).to_radians(),
263            (self.yaw as f32).to_radians(),
264            (self.roll as f32).to_radians(),
265        )
266    }
267
268    /// Convert to Bevy Transform (rotation only, no translation)
269    pub fn to_transform(&self) -> Transform {
270        Transform::from_rotation(self.to_quat())
271    }
272}
273
274impl Default for ObjectRotation {
275    fn default() -> Self {
276        Self::identity()
277    }
278}
279
280/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
281/// Uses spherical coordinates to capture objects from multiple elevations.
282#[derive(Clone, Debug)]
283pub struct ViewpointConfig {
284    /// Distance from camera to object center (meters)
285    pub radius: f32,
286    /// Number of horizontal positions (yaw angles) around the object
287    pub yaw_count: usize,
288    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
289    pub pitch_angles_deg: Vec<f32>,
290}
291
292impl Default for ViewpointConfig {
293    fn default() -> Self {
294        Self {
295            radius: 0.5,
296            yaw_count: 8,
297            // Three elevations: below (-30°), level (0°), above (+30°)
298            // This matches TBP's look_up/look_down capability
299            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
300        }
301    }
302}
303
304impl ViewpointConfig {
305    /// Total number of viewpoints this config will generate
306    pub fn viewpoint_count(&self) -> usize {
307        self.yaw_count * self.pitch_angles_deg.len()
308    }
309}
310
311/// Full sensor configuration for capture sessions
312#[derive(Clone, Debug, Resource)]
313pub struct SensorConfig {
314    /// Viewpoint configuration (camera positions)
315    pub viewpoints: ViewpointConfig,
316    /// Object rotations to capture (each rotation generates a full viewpoint set)
317    pub object_rotations: Vec<ObjectRotation>,
318    /// Output directory for captures
319    pub output_dir: String,
320    /// Filename pattern (use {view} for view index, {rot} for rotation index)
321    pub filename_pattern: String,
322}
323
324impl Default for SensorConfig {
325    fn default() -> Self {
326        Self {
327            viewpoints: ViewpointConfig::default(),
328            object_rotations: vec![ObjectRotation::identity()],
329            output_dir: ".".to_string(),
330            filename_pattern: "capture_{rot}_{view}.png".to_string(),
331        }
332    }
333}
334
335impl SensorConfig {
336    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
337    pub fn tbp_benchmark() -> Self {
338        Self {
339            viewpoints: ViewpointConfig::default(),
340            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
341            output_dir: ".".to_string(),
342            filename_pattern: "capture_{rot}_{view}.png".to_string(),
343        }
344    }
345
346    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
347    pub fn tbp_full_training() -> Self {
348        Self {
349            viewpoints: ViewpointConfig::default(),
350            object_rotations: ObjectRotation::tbp_known_orientations(),
351            output_dir: ".".to_string(),
352            filename_pattern: "capture_{rot}_{view}.png".to_string(),
353        }
354    }
355
356    /// Total number of captures this config will generate
357    pub fn total_captures(&self) -> usize {
358        self.viewpoints.viewpoint_count() * self.object_rotations.len()
359    }
360}
361
362/// Generate camera viewpoints using spherical coordinates.
363///
364/// Spherical coordinate system (matching TBP habitat sensor conventions):
365/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
366/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
367/// - Radius: distance from origin (object center)
368pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
369    let mut views = Vec::with_capacity(config.viewpoint_count());
370
371    for pitch_deg in &config.pitch_angles_deg {
372        let pitch = pitch_deg.to_radians();
373
374        for i in 0..config.yaw_count {
375            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
376
377            // Spherical to Cartesian conversion (Y-up coordinate system)
378            // x = r * cos(pitch) * sin(yaw)
379            // y = r * sin(pitch)
380            // z = r * cos(pitch) * cos(yaw)
381            let x = config.radius * pitch.cos() * yaw.sin();
382            let y = config.radius * pitch.sin();
383            let z = config.radius * pitch.cos() * yaw.cos();
384
385            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
386            views.push(transform);
387        }
388    }
389    views
390}
391
392/// Marker component for the target object being captured
393#[derive(Component)]
394pub struct CaptureTarget;
395
396/// Marker component for the capture camera
397#[derive(Component)]
398pub struct CaptureCamera;
399
400// ============================================================================
401// Headless Rendering API (NEW)
402// ============================================================================
403
404/// Configuration for headless rendering.
405///
406/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
407#[derive(Clone, Debug)]
408pub struct RenderConfig {
409    /// Image width in pixels (default: 64)
410    pub width: u32,
411    /// Image height in pixels (default: 64)
412    pub height: u32,
413    /// Zoom factor affecting field of view (default: 1.0)
414    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
415    pub zoom: f32,
416    /// Near clipping plane in meters (default: 0.01)
417    pub near_plane: f32,
418    /// Far clipping plane in meters (default: 10.0)
419    pub far_plane: f32,
420    /// Lighting configuration
421    pub lighting: LightingConfig,
422}
423
424/// Lighting configuration for rendering.
425///
426/// Controls ambient light and point lights in the scene.
427#[derive(Clone, Debug)]
428pub struct LightingConfig {
429    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
430    pub ambient_brightness: f32,
431    /// Key light intensity in lumens (default: 1500.0)
432    pub key_light_intensity: f32,
433    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
434    pub key_light_position: [f32; 3],
435    /// Fill light intensity in lumens (default: 500.0)
436    pub fill_light_intensity: f32,
437    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
438    pub fill_light_position: [f32; 3],
439    /// Enable shadows (default: false for performance)
440    pub shadows_enabled: bool,
441}
442
443impl Default for LightingConfig {
444    fn default() -> Self {
445        Self {
446            ambient_brightness: 0.3,
447            key_light_intensity: 1500.0,
448            key_light_position: [4.0, 8.0, 4.0],
449            fill_light_intensity: 500.0,
450            fill_light_position: [-4.0, 2.0, -4.0],
451            shadows_enabled: false,
452        }
453    }
454}
455
456impl LightingConfig {
457    /// Bright lighting for clear visibility
458    pub fn bright() -> Self {
459        Self {
460            ambient_brightness: 0.5,
461            key_light_intensity: 2000.0,
462            key_light_position: [4.0, 8.0, 4.0],
463            fill_light_intensity: 800.0,
464            fill_light_position: [-4.0, 2.0, -4.0],
465            shadows_enabled: false,
466        }
467    }
468
469    /// Soft lighting with minimal shadows
470    pub fn soft() -> Self {
471        Self {
472            ambient_brightness: 0.4,
473            key_light_intensity: 1000.0,
474            key_light_position: [3.0, 6.0, 3.0],
475            fill_light_intensity: 600.0,
476            fill_light_position: [-3.0, 3.0, -3.0],
477            shadows_enabled: false,
478        }
479    }
480
481    /// Unlit mode - ambient only, no point lights
482    pub fn unlit() -> Self {
483        Self {
484            ambient_brightness: 1.0,
485            key_light_intensity: 0.0,
486            key_light_position: [0.0, 0.0, 0.0],
487            fill_light_intensity: 0.0,
488            fill_light_position: [0.0, 0.0, 0.0],
489            shadows_enabled: false,
490        }
491    }
492}
493
494impl Default for RenderConfig {
495    fn default() -> Self {
496        Self::tbp_default()
497    }
498}
499
500impl RenderConfig {
501    /// TBP-compatible 64x64 RGBD sensor configuration.
502    ///
503    /// This matches the default resolution used in TBP's habitat sensor.
504    pub fn tbp_default() -> Self {
505        Self {
506            width: 64,
507            height: 64,
508            zoom: 1.0,
509            near_plane: 0.01,
510            far_plane: 10.0,
511            lighting: LightingConfig::default(),
512        }
513    }
514
515    /// Higher resolution configuration for debugging and visualization.
516    pub fn preview() -> Self {
517        Self {
518            width: 256,
519            height: 256,
520            zoom: 1.0,
521            near_plane: 0.01,
522            far_plane: 10.0,
523            lighting: LightingConfig::default(),
524        }
525    }
526
527    /// High resolution configuration for detailed captures.
528    pub fn high_res() -> Self {
529        Self {
530            width: 512,
531            height: 512,
532            zoom: 1.0,
533            near_plane: 0.01,
534            far_plane: 10.0,
535            lighting: LightingConfig::default(),
536        }
537    }
538
539    /// Calculate vertical field of view in radians based on zoom.
540    ///
541    /// Base FOV is 60 degrees, adjusted by zoom factor.
542    pub fn fov_radians(&self) -> f32 {
543        let base_fov_deg = 60.0_f32;
544        (base_fov_deg / self.zoom).to_radians()
545    }
546
547    /// Compute camera intrinsics for use with neocortx.
548    ///
549    /// Returns focal length and principal point based on resolution and FOV.
550    /// Uses f64 for TBP numerical precision compatibility.
551    pub fn intrinsics(&self) -> CameraIntrinsics {
552        let fov = self.fov_radians() as f64;
553        // focal_length = (height/2) / tan(fov/2)
554        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
555        let fx = fy; // Assuming square pixels
556
557        CameraIntrinsics {
558            focal_length: [fx, fy],
559            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
560            image_size: [self.width, self.height],
561        }
562    }
563}
564
565/// Camera intrinsic parameters for 3D reconstruction.
566///
567/// Compatible with neocortx's VisionIntrinsics format.
568/// Uses f64 for TBP numerical precision compatibility.
569#[derive(Clone, Debug, PartialEq)]
570pub struct CameraIntrinsics {
571    /// Focal length in pixels (fx, fy)
572    pub focal_length: [f64; 2],
573    /// Principal point (cx, cy) - typically image center
574    pub principal_point: [f64; 2],
575    /// Image dimensions (width, height)
576    pub image_size: [u32; 2],
577}
578
579impl CameraIntrinsics {
580    /// Project a 3D point to 2D pixel coordinates.
581    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
582        if point.z <= 0.0 {
583            return None;
584        }
585        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
586        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
587        Some([x, y])
588    }
589
590    /// Unproject a 2D pixel to a 3D point at given depth.
591    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
592        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
593        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
594        [x, y, depth]
595    }
596}
597
598/// Output from headless rendering containing RGBA and depth data.
599#[derive(Clone, Debug)]
600pub struct RenderOutput {
601    /// RGBA pixel data in row-major order (width * height * 4 bytes)
602    pub rgba: Vec<u8>,
603    /// Depth values in meters, row-major order (width * height f64s)
604    /// Values are linear depth from camera, not normalized.
605    /// Uses f64 for TBP numerical precision compatibility.
606    pub depth: Vec<f64>,
607    /// Image width in pixels
608    pub width: u32,
609    /// Image height in pixels
610    pub height: u32,
611    /// Camera intrinsics used for this render
612    pub intrinsics: CameraIntrinsics,
613    /// Camera transform (world position and orientation)
614    pub camera_transform: Transform,
615    /// Object rotation applied during render
616    pub object_rotation: ObjectRotation,
617}
618
619impl RenderOutput {
620    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
621    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
622        if x >= self.width || y >= self.height {
623            return None;
624        }
625        let idx = ((y * self.width + x) * 4) as usize;
626        Some([
627            self.rgba[idx],
628            self.rgba[idx + 1],
629            self.rgba[idx + 2],
630            self.rgba[idx + 3],
631        ])
632    }
633
634    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
635    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
636        if x >= self.width || y >= self.height {
637            return None;
638        }
639        let idx = (y * self.width + x) as usize;
640        Some(self.depth[idx])
641    }
642
643    /// Get RGB pixel (without alpha) at (x, y).
644    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
645        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
646    }
647
648    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
649    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
650        let mut image = Vec::with_capacity(self.height as usize);
651        for y in 0..self.height {
652            let mut row = Vec::with_capacity(self.width as usize);
653            for x in 0..self.width {
654                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
655            }
656            image.push(row);
657        }
658        image
659    }
660
661    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
662    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
663        let mut image = Vec::with_capacity(self.height as usize);
664        for y in 0..self.height {
665            let mut row = Vec::with_capacity(self.width as usize);
666            for x in 0..self.width {
667                row.push(self.get_depth(x, y).unwrap_or(0.0));
668            }
669            image.push(row);
670        }
671        image
672    }
673}
674
675/// Errors that can occur during rendering and file operations.
676#[derive(Debug, Clone)]
677pub enum RenderError {
678    /// Object mesh file not found
679    MeshNotFound(String),
680    /// Object texture file not found
681    TextureNotFound(String),
682    /// Generic file not found error
683    FileNotFound { path: String, reason: String },
684    /// File write failed
685    FileWriteFailed { path: String, reason: String },
686    /// Directory creation failed
687    DirectoryCreationFailed { path: String, reason: String },
688    /// Bevy rendering failed
689    RenderFailed(String),
690    /// Invalid configuration
691    InvalidConfig(String),
692    /// Invalid input parameters
693    InvalidInput(String),
694    /// JSON serialization/deserialization error
695    SerializationError(String),
696    /// Binary data parsing error
697    DataParsingError(String),
698    /// Render timeout
699    RenderTimeout { duration_secs: u64 },
700}
701
702impl std::fmt::Display for RenderError {
703    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
704        match self {
705            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
706            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
707            RenderError::FileNotFound { path, reason } => {
708                write!(f, "File not found at {}: {}", path, reason)
709            }
710            RenderError::FileWriteFailed { path, reason } => {
711                write!(f, "Failed to write file {}: {}", path, reason)
712            }
713            RenderError::DirectoryCreationFailed { path, reason } => {
714                write!(f, "Failed to create directory {}: {}", path, reason)
715            }
716            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
717            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
718            RenderError::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
719            RenderError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
720            RenderError::DataParsingError(msg) => write!(f, "Data parsing error: {}", msg),
721            RenderError::RenderTimeout { duration_secs } => {
722                write!(f, "Render timeout after {} seconds", duration_secs)
723            }
724        }
725    }
726}
727
728impl std::error::Error for RenderError {}
729
730/// Render a YCB object to an in-memory buffer.
731///
732/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
733/// renders a single frame, extracts the RGBA and depth data, and shuts down.
734///
735/// # Arguments
736/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
737/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
738/// * `object_rotation` - Rotation to apply to the object
739/// * `config` - Render configuration (resolution, depth range, etc.)
740///
741/// # Example
742/// ```ignore
743/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
744/// use std::path::Path;
745///
746/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
747/// let output = render_to_buffer(
748///     Path::new("/tmp/ycb/003_cracker_box"),
749///     &viewpoints[0],
750///     &ObjectRotation::identity(),
751///     &RenderConfig::tbp_default(),
752/// )?;
753/// ```
754pub fn render_to_buffer(
755    object_dir: &Path,
756    camera_transform: &Transform,
757    object_rotation: &ObjectRotation,
758    config: &RenderConfig,
759) -> Result<RenderOutput, RenderError> {
760    // Use the actual Bevy headless renderer
761    render::render_headless(object_dir, camera_transform, object_rotation, config)
762}
763
764/// Render all viewpoints and rotations for a YCB object.
765///
766/// Convenience function that renders all combinations of viewpoints and rotations.
767///
768/// # Arguments
769/// * `object_dir` - Path to YCB object directory
770/// * `viewpoint_config` - Viewpoint configuration (camera positions)
771/// * `rotations` - Object rotations to render
772/// * `render_config` - Render configuration
773///
774/// # Returns
775/// Vector of RenderOutput, one per viewpoint × rotation combination.
776pub fn render_all_viewpoints(
777    object_dir: &Path,
778    viewpoint_config: &ViewpointConfig,
779    rotations: &[ObjectRotation],
780    render_config: &RenderConfig,
781) -> Result<Vec<RenderOutput>, RenderError> {
782    let viewpoints = generate_viewpoints(viewpoint_config);
783    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
784
785    for rotation in rotations {
786        for viewpoint in &viewpoints {
787            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
788            outputs.push(output);
789        }
790    }
791
792    Ok(outputs)
793}
794
795/// Render with model caching support for efficient multi-viewpoint rendering.
796///
797/// This function tracks which models have been loaded and provides performance
798/// insights. For maximum efficiency when rendering many viewpoints of the same
799/// object, use the batch rendering API (`create_batch_renderer`, `render_batch`).
800///
801/// # Arguments
802/// * `object_dir` - Path to YCB object directory
803/// * `camera_transform` - Camera position and orientation
804/// * `object_rotation` - Rotation to apply to the object
805/// * `config` - Render configuration
806/// * `cache` - Model cache to track loaded assets
807///
808/// # Returns
809/// RenderOutput with rendered RGBA and depth data
810///
811/// # Example
812/// ```ignore
813/// use bevy_sensor::{render_to_buffer_cached, cache::ModelCache, RenderConfig, ObjectRotation};
814/// use std::path::PathBuf;
815///
816/// let mut cache = ModelCache::new();
817/// let object_dir = PathBuf::from("/tmp/ycb/003_cracker_box");
818/// let config = RenderConfig::tbp_default();
819/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
820///
821/// // First render: loads from disk and caches
822/// let output1 = render_to_buffer_cached(
823///     &object_dir,
824///     &viewpoints[0],
825///     &ObjectRotation::identity(),
826///     &config,
827///     &mut cache,
828/// )?;
829///
830/// // Subsequent renders: tracks in cache (actual speedup comes from batch API)
831/// for viewpoint in &viewpoints[1..] {
832///     let output = render_to_buffer_cached(
833///         &object_dir,
834///         viewpoint,
835///         &ObjectRotation::identity(),
836///         &config,
837///         &mut cache,
838///     )?;
839/// }
840/// ```
841///
842/// # Note
843/// This function uses the same rendering engine as `render_to_buffer()`. For true
844/// asset caching performance gains (2-3x speedup), combine with batch rendering:
845///
846/// ```ignore
847/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig, RenderConfig, ObjectRotation};
848///
849/// let requests: Vec<_> = viewpoints.iter().map(|vp| {
850///     BatchRenderRequest {
851///         object_dir: object_dir.clone(),
852///         viewpoint: *vp,
853///         object_rotation: ObjectRotation::identity(),
854///         render_config: RenderConfig::tbp_default(),
855///     }
856/// }).collect();
857///
858/// let outputs = render_batch(requests, &BatchRenderConfig::default())?;
859/// ```
860pub fn render_to_buffer_cached(
861    object_dir: &Path,
862    camera_transform: &Transform,
863    object_rotation: &ObjectRotation,
864    config: &RenderConfig,
865    cache: &mut cache::ModelCache,
866) -> Result<RenderOutput, RenderError> {
867    let mesh_path = object_dir.join("google_16k/textured.obj");
868    let texture_path = object_dir.join("google_16k/texture_map.png");
869
870    // Track in cache
871    cache.cache_scene(mesh_path.clone());
872    cache.cache_texture(texture_path.clone());
873
874    // Render using standard pipeline
875    render::render_headless(object_dir, camera_transform, object_rotation, config)
876}
877
878/// Render directly to files (for subprocess mode).
879///
880/// This function is designed for subprocess rendering where the process will exit
881/// after rendering. It saves RGBA and depth data directly to the specified files
882/// before the process terminates.
883///
884/// # Arguments
885/// * `object_dir` - Path to YCB object directory
886/// * `camera_transform` - Camera position and orientation
887/// * `object_rotation` - Rotation to apply to the object
888/// * `config` - Render configuration
889/// * `rgba_path` - Output path for RGBA PNG
890/// * `depth_path` - Output path for depth data (raw f32 bytes)
891///
892/// # Note
893/// This function may call `std::process::exit(0)` and not return.
894pub fn render_to_files(
895    object_dir: &Path,
896    camera_transform: &Transform,
897    object_rotation: &ObjectRotation,
898    config: &RenderConfig,
899    rgba_path: &Path,
900    depth_path: &Path,
901) -> Result<(), RenderError> {
902    render::render_to_files(
903        object_dir,
904        camera_transform,
905        object_rotation,
906        config,
907        rgba_path,
908        depth_path,
909    )
910}
911
912// Re-export batch types for convenient API access
913pub use batch::{
914    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
915    BatchState, RenderStatus,
916};
917
918/// Create a new batch renderer for efficient multi-viewpoint rendering.
919///
920/// This creates a persistent Bevy app that can render multiple viewpoints without
921/// subprocess spawning overhead. Achieves 10-100x speedup vs individual render_to_buffer calls.
922///
923/// # Arguments
924/// * `config` - Batch rendering configuration
925///
926/// # Returns
927/// A BatchRenderer instance ready to queue render requests
928///
929/// # Example
930/// ```ignore
931/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
932///
933/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
934/// ```
935pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
936    // For now, just create an empty renderer that will need a Bevy app
937    // The actual app creation happens when rendering starts
938    Ok(BatchRenderer::new(config.clone()))
939}
940
941/// Queue a render request for batch processing.
942///
943/// Adds a render request to the batch queue. Requests are processed in order
944/// when you call render_next_in_batch().
945///
946/// # Arguments
947/// * `renderer` - The batch renderer instance
948/// * `request` - The render request
949///
950/// # Returns
951/// Ok if queued successfully, Err if queue is full
952///
953/// # Example
954/// ```ignore
955/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
956/// use std::path::PathBuf;
957///
958/// queue_render_request(&mut renderer, BatchRenderRequest {
959///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
960///     viewpoint: camera_transform,
961///     object_rotation: ObjectRotation::identity(),
962///     render_config: RenderConfig::tbp_default(),
963/// })?;
964/// ```
965pub fn queue_render_request(
966    renderer: &mut BatchRenderer,
967    request: BatchRenderRequest,
968) -> Result<(), RenderError> {
969    renderer
970        .queue_request(request)
971        .map_err(|e| RenderError::RenderFailed(e.to_string()))
972}
973
974/// Process and execute the next render in the batch queue.
975///
976/// Executes a single render from the queued requests. Returns None when the queue is empty.
977/// Use this in a loop to process all queued renders.
978///
979/// # Arguments
980/// * `renderer` - The batch renderer instance
981/// * `timeout_ms` - Timeout in milliseconds for this render
982///
983/// # Returns
984/// Some(output) if a render completed, None if queue is empty
985///
986/// # Example
987/// ```ignore
988/// loop {
989///     match render_next_in_batch(&mut renderer, 500)? {
990///         Some(output) => println!("Render complete: {:?}", output.status),
991///         None => break, // All renders done
992///     }
993/// }
994/// ```
995pub fn render_next_in_batch(
996    renderer: &mut BatchRenderer,
997    _timeout_ms: u32,
998) -> Result<Option<BatchRenderOutput>, RenderError> {
999    // This is a stub - the actual implementation will require a running Bevy app
1000    // For now, just render single batches immediately using render_to_buffer
1001    if let Some(request) = renderer.pending_requests.pop_front() {
1002        let output = render_to_buffer(
1003            &request.object_dir,
1004            &request.viewpoint,
1005            &request.object_rotation,
1006            &request.render_config,
1007        )?;
1008        let batch_output = BatchRenderOutput::from_render_output(request, output);
1009        renderer.completed_results.push(batch_output.clone());
1010        renderer.renders_processed += 1;
1011        Ok(Some(batch_output))
1012    } else {
1013        Ok(None)
1014    }
1015}
1016
1017/// Render multiple requests in batch (convenience function).
1018///
1019/// Queues all requests and executes them in batch, returning all results.
1020/// Simpler than manage queue + loop for one-off batches.
1021///
1022/// # Arguments
1023/// * `requests` - Vector of render requests
1024/// * `config` - Batch rendering configuration
1025///
1026/// # Returns
1027/// Vector of BatchRenderOutput results in same order as input
1028///
1029/// # Example
1030/// ```ignore
1031/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
1032///
1033/// let results = render_batch(requests, &BatchRenderConfig::default())?;
1034/// ```
1035pub fn render_batch(
1036    requests: Vec<BatchRenderRequest>,
1037    config: &BatchRenderConfig,
1038) -> Result<Vec<BatchRenderOutput>, RenderError> {
1039    let mut renderer = create_batch_renderer(config)?;
1040
1041    // Queue all requests
1042    for request in requests {
1043        queue_render_request(&mut renderer, request)?;
1044    }
1045
1046    // Execute all and collect results
1047    let mut results = Vec::new();
1048    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
1049        results.push(output);
1050    }
1051
1052    Ok(results)
1053}
1054
1055// Re-export bevy types that consumers will need
1056pub use bevy::prelude::{Quat, Transform, Vec3};
1057
1058#[cfg(test)]
1059mod tests {
1060    use super::*;
1061
1062    #[test]
1063    fn test_object_rotation_identity() {
1064        let rot = ObjectRotation::identity();
1065        assert_eq!(rot.pitch, 0.0);
1066        assert_eq!(rot.yaw, 0.0);
1067        assert_eq!(rot.roll, 0.0);
1068    }
1069
1070    #[test]
1071    fn test_object_rotation_from_array() {
1072        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
1073        assert_eq!(rot.pitch, 10.0);
1074        assert_eq!(rot.yaw, 20.0);
1075        assert_eq!(rot.roll, 30.0);
1076    }
1077
1078    #[test]
1079    fn test_tbp_benchmark_rotations() {
1080        let rotations = ObjectRotation::tbp_benchmark_rotations();
1081        assert_eq!(rotations.len(), 3);
1082        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
1083        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
1084        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
1085    }
1086
1087    #[test]
1088    fn test_tbp_known_orientations_count() {
1089        let orientations = ObjectRotation::tbp_known_orientations();
1090        assert_eq!(orientations.len(), 14);
1091    }
1092
1093    #[test]
1094    fn test_rotation_to_quat() {
1095        let rot = ObjectRotation::identity();
1096        let quat = rot.to_quat();
1097        // Identity quaternion should be approximately (1, 0, 0, 0)
1098        assert!((quat.w - 1.0).abs() < 0.001);
1099        assert!(quat.x.abs() < 0.001);
1100        assert!(quat.y.abs() < 0.001);
1101        assert!(quat.z.abs() < 0.001);
1102    }
1103
1104    #[test]
1105    fn test_rotation_90_yaw() {
1106        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
1107        let quat = rot.to_quat();
1108        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
1109        assert!((quat.w - 0.707).abs() < 0.01);
1110        assert!((quat.y - 0.707).abs() < 0.01);
1111    }
1112
1113    #[test]
1114    fn test_viewpoint_config_default() {
1115        let config = ViewpointConfig::default();
1116        assert_eq!(config.radius, 0.5);
1117        assert_eq!(config.yaw_count, 8);
1118        assert_eq!(config.pitch_angles_deg.len(), 3);
1119    }
1120
1121    #[test]
1122    fn test_viewpoint_count() {
1123        let config = ViewpointConfig::default();
1124        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
1125    }
1126
1127    #[test]
1128    fn test_generate_viewpoints_count() {
1129        let config = ViewpointConfig::default();
1130        let viewpoints = generate_viewpoints(&config);
1131        assert_eq!(viewpoints.len(), 24);
1132    }
1133
1134    #[test]
1135    fn test_viewpoints_spherical_radius() {
1136        let config = ViewpointConfig::default();
1137        let viewpoints = generate_viewpoints(&config);
1138
1139        for (i, transform) in viewpoints.iter().enumerate() {
1140            let actual_radius = transform.translation.length();
1141            assert!(
1142                (actual_radius - config.radius).abs() < 0.001,
1143                "Viewpoint {} has incorrect radius: {} (expected {})",
1144                i,
1145                actual_radius,
1146                config.radius
1147            );
1148        }
1149    }
1150
1151    #[test]
1152    fn test_viewpoints_looking_at_origin() {
1153        let config = ViewpointConfig::default();
1154        let viewpoints = generate_viewpoints(&config);
1155
1156        for (i, transform) in viewpoints.iter().enumerate() {
1157            let forward = transform.forward();
1158            let to_origin = (Vec3::ZERO - transform.translation).normalize();
1159            let dot = forward.dot(to_origin);
1160            assert!(
1161                dot > 0.99,
1162                "Viewpoint {} not looking at origin, dot product: {}",
1163                i,
1164                dot
1165            );
1166        }
1167    }
1168
1169    #[test]
1170    fn test_sensor_config_default() {
1171        let config = SensorConfig::default();
1172        assert_eq!(config.object_rotations.len(), 1);
1173        assert_eq!(config.total_captures(), 24);
1174    }
1175
1176    #[test]
1177    fn test_sensor_config_tbp_benchmark() {
1178        let config = SensorConfig::tbp_benchmark();
1179        assert_eq!(config.object_rotations.len(), 3);
1180        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1181    }
1182
1183    #[test]
1184    fn test_sensor_config_tbp_full() {
1185        let config = SensorConfig::tbp_full_training();
1186        assert_eq!(config.object_rotations.len(), 14);
1187        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1188    }
1189
1190    #[test]
1191    fn test_ycb_representative_objects() {
1192        // Verify representative objects are defined
1193        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1194        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1195    }
1196
1197    #[test]
1198    fn test_ycb_ten_objects() {
1199        // Verify ten objects subset is defined
1200        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1201    }
1202
1203    #[test]
1204    fn test_ycb_object_mesh_path() {
1205        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1206        assert_eq!(
1207            path.to_string_lossy(),
1208            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
1209        );
1210    }
1211
1212    #[test]
1213    fn test_ycb_object_texture_path() {
1214        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1215        assert_eq!(
1216            path.to_string_lossy(),
1217            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
1218        );
1219    }
1220
1221    // =========================================================================
1222    // Headless Rendering API Tests
1223    // =========================================================================
1224
1225    #[test]
1226    fn test_render_config_tbp_default() {
1227        let config = RenderConfig::tbp_default();
1228        assert_eq!(config.width, 64);
1229        assert_eq!(config.height, 64);
1230        assert_eq!(config.zoom, 1.0);
1231        assert_eq!(config.near_plane, 0.01);
1232        assert_eq!(config.far_plane, 10.0);
1233    }
1234
1235    #[test]
1236    fn test_render_config_preview() {
1237        let config = RenderConfig::preview();
1238        assert_eq!(config.width, 256);
1239        assert_eq!(config.height, 256);
1240    }
1241
1242    #[test]
1243    fn test_render_config_default_is_tbp() {
1244        let default = RenderConfig::default();
1245        let tbp = RenderConfig::tbp_default();
1246        assert_eq!(default.width, tbp.width);
1247        assert_eq!(default.height, tbp.height);
1248    }
1249
1250    #[test]
1251    fn test_render_config_fov() {
1252        let config = RenderConfig::tbp_default();
1253        let fov = config.fov_radians();
1254        // Base FOV is 60 degrees = ~1.047 radians
1255        assert!((fov - 1.047).abs() < 0.01);
1256
1257        // Zoom in should reduce FOV
1258        let zoomed = RenderConfig {
1259            zoom: 2.0,
1260            ..config
1261        };
1262        assert!(zoomed.fov_radians() < fov);
1263    }
1264
1265    #[test]
1266    fn test_render_config_intrinsics() {
1267        let config = RenderConfig::tbp_default();
1268        let intrinsics = config.intrinsics();
1269
1270        assert_eq!(intrinsics.image_size, [64, 64]);
1271        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1272        // Focal length should be positive and reasonable
1273        assert!(intrinsics.focal_length[0] > 0.0);
1274        assert!(intrinsics.focal_length[1] > 0.0);
1275        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1276        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1277    }
1278
1279    #[test]
1280    fn test_camera_intrinsics_project() {
1281        let intrinsics = CameraIntrinsics {
1282            focal_length: [100.0, 100.0],
1283            principal_point: [32.0, 32.0],
1284            image_size: [64, 64],
1285        };
1286
1287        // Point at origin of camera frame projects to principal point
1288        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1289        assert!(center.is_some());
1290        let [x, y] = center.unwrap();
1291        assert!((x - 32.0).abs() < 0.001);
1292        assert!((y - 32.0).abs() < 0.001);
1293
1294        // Point behind camera returns None
1295        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1296        assert!(behind.is_none());
1297    }
1298
1299    #[test]
1300    fn test_camera_intrinsics_unproject() {
1301        let intrinsics = CameraIntrinsics {
1302            focal_length: [100.0, 100.0],
1303            principal_point: [32.0, 32.0],
1304            image_size: [64, 64],
1305        };
1306
1307        // Unproject principal point at depth 1.0
1308        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1309        assert!((point[0]).abs() < 0.001); // x
1310        assert!((point[1]).abs() < 0.001); // y
1311        assert!((point[2] - 1.0).abs() < 0.001); // z
1312    }
1313
1314    #[test]
1315    fn test_render_output_get_rgba() {
1316        let output = RenderOutput {
1317            rgba: vec![
1318                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1319            ],
1320            depth: vec![1.0, 2.0, 3.0, 4.0],
1321            width: 2,
1322            height: 2,
1323            intrinsics: RenderConfig::tbp_default().intrinsics(),
1324            camera_transform: Transform::IDENTITY,
1325            object_rotation: ObjectRotation::identity(),
1326        };
1327
1328        // Top-left: red
1329        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1330        // Top-right: green
1331        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1332        // Bottom-left: blue
1333        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1334        // Bottom-right: white
1335        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1336        // Out of bounds
1337        assert_eq!(output.get_rgba(2, 0), None);
1338    }
1339
1340    #[test]
1341    fn test_render_output_get_depth() {
1342        let output = RenderOutput {
1343            rgba: vec![0u8; 16],
1344            depth: vec![1.0, 2.0, 3.0, 4.0],
1345            width: 2,
1346            height: 2,
1347            intrinsics: RenderConfig::tbp_default().intrinsics(),
1348            camera_transform: Transform::IDENTITY,
1349            object_rotation: ObjectRotation::identity(),
1350        };
1351
1352        assert_eq!(output.get_depth(0, 0), Some(1.0));
1353        assert_eq!(output.get_depth(1, 0), Some(2.0));
1354        assert_eq!(output.get_depth(0, 1), Some(3.0));
1355        assert_eq!(output.get_depth(1, 1), Some(4.0));
1356        assert_eq!(output.get_depth(2, 0), None);
1357    }
1358
1359    #[test]
1360    fn test_render_output_to_rgb_image() {
1361        let output = RenderOutput {
1362            rgba: vec![
1363                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1364            ],
1365            depth: vec![1.0, 2.0, 3.0, 4.0],
1366            width: 2,
1367            height: 2,
1368            intrinsics: RenderConfig::tbp_default().intrinsics(),
1369            camera_transform: Transform::IDENTITY,
1370            object_rotation: ObjectRotation::identity(),
1371        };
1372
1373        let image = output.to_rgb_image();
1374        assert_eq!(image.len(), 2); // 2 rows
1375        assert_eq!(image[0].len(), 2); // 2 columns
1376        assert_eq!(image[0][0], [255, 0, 0]); // Red
1377        assert_eq!(image[0][1], [0, 255, 0]); // Green
1378        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1379        assert_eq!(image[1][1], [255, 255, 255]); // White
1380    }
1381
1382    #[test]
1383    fn test_render_output_to_depth_image() {
1384        let output = RenderOutput {
1385            rgba: vec![0u8; 16],
1386            depth: vec![1.0, 2.0, 3.0, 4.0],
1387            width: 2,
1388            height: 2,
1389            intrinsics: RenderConfig::tbp_default().intrinsics(),
1390            camera_transform: Transform::IDENTITY,
1391            object_rotation: ObjectRotation::identity(),
1392        };
1393
1394        let depth_image = output.to_depth_image();
1395        assert_eq!(depth_image.len(), 2);
1396        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1397        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1398    }
1399
1400    #[test]
1401    fn test_render_error_display() {
1402        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1403        assert!(err.to_string().contains("Mesh not found"));
1404        assert!(err.to_string().contains("/path/to/mesh.obj"));
1405    }
1406
1407    // =========================================================================
1408    // Edge Case Tests
1409    // =========================================================================
1410
1411    #[test]
1412    fn test_object_rotation_extreme_angles() {
1413        // Test angles beyond 360 degrees
1414        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1415        let quat = rot.to_quat();
1416        // Quaternion should still be valid (normalized)
1417        assert!((quat.length() - 1.0).abs() < 0.001);
1418    }
1419
1420    #[test]
1421    fn test_object_rotation_to_transform() {
1422        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1423        let transform = rot.to_transform();
1424        // Transform should have no translation
1425        assert_eq!(transform.translation, Vec3::ZERO);
1426        // Should have rotation
1427        assert!(transform.rotation != Quat::IDENTITY);
1428    }
1429
1430    #[test]
1431    fn test_viewpoint_config_single_viewpoint() {
1432        let config = ViewpointConfig {
1433            radius: 1.0,
1434            yaw_count: 1,
1435            pitch_angles_deg: vec![0.0],
1436        };
1437        assert_eq!(config.viewpoint_count(), 1);
1438        let viewpoints = generate_viewpoints(&config);
1439        assert_eq!(viewpoints.len(), 1);
1440        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1441        let pos = viewpoints[0].translation;
1442        assert!((pos.x).abs() < 0.001);
1443        assert!((pos.y).abs() < 0.001);
1444        assert!((pos.z - 1.0).abs() < 0.001);
1445    }
1446
1447    #[test]
1448    fn test_viewpoint_radius_scaling() {
1449        let config1 = ViewpointConfig {
1450            radius: 0.5,
1451            yaw_count: 4,
1452            pitch_angles_deg: vec![0.0],
1453        };
1454        let config2 = ViewpointConfig {
1455            radius: 2.0,
1456            yaw_count: 4,
1457            pitch_angles_deg: vec![0.0],
1458        };
1459
1460        let v1 = generate_viewpoints(&config1);
1461        let v2 = generate_viewpoints(&config2);
1462
1463        // Viewpoints should scale proportionally
1464        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1465            let ratio = vp2.translation.length() / vp1.translation.length();
1466            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1467        }
1468    }
1469
1470    #[test]
1471    fn test_camera_intrinsics_project_at_z_zero() {
1472        let intrinsics = CameraIntrinsics {
1473            focal_length: [100.0, 100.0],
1474            principal_point: [32.0, 32.0],
1475            image_size: [64, 64],
1476        };
1477
1478        // Point at z=0 should return None (division by zero protection)
1479        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1480        assert!(result.is_none());
1481    }
1482
1483    #[test]
1484    fn test_camera_intrinsics_roundtrip() {
1485        let intrinsics = CameraIntrinsics {
1486            focal_length: [100.0, 100.0],
1487            principal_point: [32.0, 32.0],
1488            image_size: [64, 64],
1489        };
1490
1491        // Project a 3D point
1492        let original = Vec3::new(0.5, -0.3, 2.0);
1493        let projected = intrinsics.project(original).unwrap();
1494
1495        // Unproject back with the same depth (convert f32 to f64)
1496        let unprojected = intrinsics.unproject(projected, original.z as f64);
1497
1498        // Should get back approximately the same point
1499        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1500        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1501        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1502    }
1503
1504    #[test]
1505    fn test_render_output_empty() {
1506        let output = RenderOutput {
1507            rgba: vec![],
1508            depth: vec![],
1509            width: 0,
1510            height: 0,
1511            intrinsics: RenderConfig::tbp_default().intrinsics(),
1512            camera_transform: Transform::IDENTITY,
1513            object_rotation: ObjectRotation::identity(),
1514        };
1515
1516        // Should handle empty gracefully
1517        assert_eq!(output.get_rgba(0, 0), None);
1518        assert_eq!(output.get_depth(0, 0), None);
1519        assert!(output.to_rgb_image().is_empty());
1520        assert!(output.to_depth_image().is_empty());
1521    }
1522
1523    #[test]
1524    fn test_render_output_1x1() {
1525        let output = RenderOutput {
1526            rgba: vec![128, 64, 32, 255],
1527            depth: vec![0.5],
1528            width: 1,
1529            height: 1,
1530            intrinsics: RenderConfig::tbp_default().intrinsics(),
1531            camera_transform: Transform::IDENTITY,
1532            object_rotation: ObjectRotation::identity(),
1533        };
1534
1535        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1536        assert_eq!(output.get_depth(0, 0), Some(0.5));
1537        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1538
1539        let rgb_img = output.to_rgb_image();
1540        assert_eq!(rgb_img.len(), 1);
1541        assert_eq!(rgb_img[0].len(), 1);
1542        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1543    }
1544
1545    #[test]
1546    fn test_render_config_high_res() {
1547        let config = RenderConfig::high_res();
1548        assert_eq!(config.width, 512);
1549        assert_eq!(config.height, 512);
1550
1551        let intrinsics = config.intrinsics();
1552        assert_eq!(intrinsics.image_size, [512, 512]);
1553        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1554    }
1555
1556    #[test]
1557    fn test_render_config_zoom_affects_fov() {
1558        let base = RenderConfig::tbp_default();
1559        let zoomed = RenderConfig {
1560            zoom: 2.0,
1561            ..base.clone()
1562        };
1563
1564        // Higher zoom = lower FOV
1565        assert!(zoomed.fov_radians() < base.fov_radians());
1566        // Specifically, 2x zoom = half FOV
1567        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1568    }
1569
1570    #[test]
1571    fn test_render_config_zoom_affects_intrinsics() {
1572        let base = RenderConfig::tbp_default();
1573        let zoomed = RenderConfig {
1574            zoom: 2.0,
1575            ..base.clone()
1576        };
1577
1578        // Higher zoom = higher focal length
1579        let base_intrinsics = base.intrinsics();
1580        let zoomed_intrinsics = zoomed.intrinsics();
1581
1582        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1583    }
1584
1585    #[test]
1586    fn test_lighting_config_variants() {
1587        let default = LightingConfig::default();
1588        let bright = LightingConfig::bright();
1589        let soft = LightingConfig::soft();
1590        let unlit = LightingConfig::unlit();
1591
1592        // Bright should have higher intensity than default
1593        assert!(bright.key_light_intensity > default.key_light_intensity);
1594
1595        // Unlit should have no point lights
1596        assert_eq!(unlit.key_light_intensity, 0.0);
1597        assert_eq!(unlit.fill_light_intensity, 0.0);
1598        assert_eq!(unlit.ambient_brightness, 1.0);
1599
1600        // Soft should have lower intensity
1601        assert!(soft.key_light_intensity < default.key_light_intensity);
1602    }
1603
1604    #[test]
1605    fn test_all_render_error_variants() {
1606        let errors = vec![
1607            RenderError::MeshNotFound("mesh.obj".to_string()),
1608            RenderError::TextureNotFound("texture.png".to_string()),
1609            RenderError::RenderFailed("GPU error".to_string()),
1610            RenderError::InvalidConfig("bad config".to_string()),
1611        ];
1612
1613        for err in errors {
1614            // All variants should have Display impl
1615            let msg = err.to_string();
1616            assert!(!msg.is_empty());
1617        }
1618    }
1619
1620    #[test]
1621    fn test_tbp_known_orientations_unique() {
1622        let orientations = ObjectRotation::tbp_known_orientations();
1623
1624        // All 14 orientations should produce unique quaternions
1625        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1626
1627        for (i, q1) in quats.iter().enumerate() {
1628            for (j, q2) in quats.iter().enumerate() {
1629                if i != j {
1630                    // Quaternions should be different (accounting for q == -q equivalence)
1631                    let dot = q1.dot(*q2).abs();
1632                    assert!(
1633                        dot < 0.999,
1634                        "Orientations {} and {} produce same quaternion",
1635                        i,
1636                        j
1637                    );
1638                }
1639            }
1640        }
1641    }
1642}