Skip to main content

bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// WebGPU and cross-platform backend support
65pub mod backend;
66
67// Model caching system for efficient multi-viewpoint rendering
68pub mod cache;
69
70// Test fixtures for pre-rendered images (CI/CD support)
71pub mod fixtures;
72
73// Re-export ycbust types for convenience
74#[allow(deprecated)]
75pub use ycbust::{
76    self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TBP_STANDARD_OBJECTS,
77    TEN_OBJECTS,
78};
79
80/// YCB dataset utilities
81pub mod ycb {
82    #[allow(deprecated)]
83    pub use ycbust::{
84        download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TBP_STANDARD_OBJECTS,
85        TEN_OBJECTS,
86    };
87
88    use reqwest::Client;
89    use std::path::Path;
90
91    /// Download YCB models to the specified directory.
92    ///
93    /// # Arguments
94    /// * `output_dir` - Directory to download models to
95    /// * `subset` - Which subset of objects to download
96    ///
97    /// # Example
98    /// ```ignore
99    /// use bevy_sensor::ycb::{download_models, Subset};
100    ///
101    /// download_models("/tmp/ycb", Subset::Representative).await?;
102    /// ```
103    pub async fn download_models<P: AsRef<Path>>(
104        output_dir: P,
105        subset: Subset,
106    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
107        let options = DownloadOptions {
108            overwrite: false,
109            full: false,
110            show_progress: true,
111            delete_archives: true,
112        };
113        download_ycb(subset, output_dir.as_ref(), options).await?;
114        Ok(())
115    }
116
117    /// Download YCB models with custom options.
118    pub async fn download_models_with_options<P: AsRef<Path>>(
119        output_dir: P,
120        subset: Subset,
121        options: DownloadOptions,
122    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
123        download_ycb(subset, output_dir.as_ref(), options).await?;
124        Ok(())
125    }
126
127    /// Download specific YCB objects by object ID using the standard `google_16k` meshes.
128    pub async fn download_objects<P: AsRef<Path>>(
129        output_dir: P,
130        object_ids: &[&str],
131    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
132        let output_dir = output_dir.as_ref();
133        let client = Client::new();
134        let options = DownloadOptions {
135            overwrite: false,
136            full: false,
137            show_progress: true,
138            delete_archives: true,
139        };
140
141        std::fs::create_dir_all(output_dir)?;
142
143        for object_id in object_ids {
144            let url = ycbust::get_tgz_url(object_id, "google_16k");
145            let archive_path = output_dir.join(format!("{object_id}_google_16k.tgz"));
146
147            if archive_path.exists() && !options.overwrite {
148                continue;
149            }
150
151            ycbust::download_file(&client, &url, &archive_path, options.show_progress).await?;
152            ycbust::extract_tgz(&archive_path, output_dir, options.delete_archives)?;
153        }
154
155        Ok(())
156    }
157
158    /// Check if YCB models exist at the given path
159    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
160        let path = output_dir.as_ref();
161        // Check for at least one representative object
162        path.join("003_cracker_box/google_16k/textured.obj")
163            .exists()
164    }
165
166    /// Get the path to a specific YCB object's OBJ file
167    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
168        output_dir
169            .as_ref()
170            .join(object_id)
171            .join("google_16k")
172            .join("textured.obj")
173    }
174
175    /// Get the path to a specific YCB object's texture file
176    pub fn object_texture_path<P: AsRef<Path>>(
177        output_dir: P,
178        object_id: &str,
179    ) -> std::path::PathBuf {
180        output_dir
181            .as_ref()
182            .join(object_id)
183            .join("google_16k")
184            .join("texture_map.png")
185    }
186}
187
188/// Initialize bevy-sensor rendering backend configuration.
189///
190/// **IMPORTANT**: Call this function ONCE at the start of your application,
191/// before any rendering operations, especially when using bevy-sensor as a library.
192///
193/// This ensures proper backend selection (WebGPU for WSL2, Vulkan for Linux, etc.)
194/// and is critical for GPU rendering on WSL2 environments.
195///
196/// # Why This Matters
197///
198/// The WGPU rendering backend caches its backend selection early during initialization.
199/// When bevy-sensor is used as a library, environment variables must be set BEFORE
200/// any GPU rendering code runs. This function does that automatically.
201///
202/// # Example
203///
204/// ```ignore
205/// use bevy_sensor;
206///
207/// fn main() {
208///     // Initialize FIRST, before any rendering
209///     bevy_sensor::initialize();
210///
211///     // Now use the rendering API
212///     let output = bevy_sensor::render_to_buffer(
213///         object_dir, &viewpoint, &rotation, &config
214///     )?;
215/// }
216/// ```
217///
218/// # Calling Multiple Times
219///
220/// Safe to call multiple times - subsequent calls are no-ops after the first call.
221pub fn initialize() {
222    // Use a OnceCell equivalent to ensure this only runs once
223    use std::sync::atomic::{AtomicBool, Ordering};
224    static INITIALIZED: AtomicBool = AtomicBool::new(false);
225
226    if !INITIALIZED.swap(true, Ordering::SeqCst) {
227        // First call - initialize backend
228        let config = backend::BackendConfig::new();
229        config.apply_env();
230    }
231}
232
233/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
234/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
235#[derive(Clone, Debug, PartialEq)]
236pub struct ObjectRotation {
237    /// Rotation around X-axis (pitch) in degrees
238    pub pitch: f64,
239    /// Rotation around Y-axis (yaw) in degrees
240    pub yaw: f64,
241    /// Rotation around Z-axis (roll) in degrees
242    pub roll: f64,
243}
244
245impl ObjectRotation {
246    /// Create a new rotation from Euler angles in degrees
247    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
248        Self { pitch, yaw, roll }
249    }
250
251    /// Create from TBP-style array [pitch, yaw, roll] in degrees
252    pub fn from_array(arr: [f64; 3]) -> Self {
253        Self {
254            pitch: arr[0],
255            yaw: arr[1],
256            roll: arr[2],
257        }
258    }
259
260    /// Identity rotation (no rotation)
261    pub fn identity() -> Self {
262        Self::new(0.0, 0.0, 0.0)
263    }
264
265    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
266    /// Used in shorter YCB experiments to reduce computational load.
267    pub fn tbp_benchmark_rotations() -> Vec<Self> {
268        vec![
269            Self::from_array([0.0, 0.0, 0.0]),
270            Self::from_array([0.0, 90.0, 0.0]),
271            Self::from_array([0.0, 180.0, 0.0]),
272        ]
273    }
274
275    /// TBP 14 known orientations (cube faces and corners)
276    /// These are the orientations objects are learned in during training.
277    pub fn tbp_known_orientations() -> Vec<Self> {
278        vec![
279            // 6 cube faces (90° rotations around each axis)
280            Self::from_array([0.0, 0.0, 0.0]),   // Front
281            Self::from_array([0.0, 90.0, 0.0]),  // Right
282            Self::from_array([0.0, 180.0, 0.0]), // Back
283            Self::from_array([0.0, 270.0, 0.0]), // Left
284            Self::from_array([90.0, 0.0, 0.0]),  // Top
285            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
286            // 8 cube corners (45° rotations)
287            Self::from_array([45.0, 45.0, 0.0]),
288            Self::from_array([45.0, 135.0, 0.0]),
289            Self::from_array([45.0, 225.0, 0.0]),
290            Self::from_array([45.0, 315.0, 0.0]),
291            Self::from_array([-45.0, 45.0, 0.0]),
292            Self::from_array([-45.0, 135.0, 0.0]),
293            Self::from_array([-45.0, 225.0, 0.0]),
294            Self::from_array([-45.0, 315.0, 0.0]),
295        ]
296    }
297
298    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
299    pub fn to_quat(&self) -> Quat {
300        Quat::from_euler(
301            EulerRot::XYZ,
302            (self.pitch as f32).to_radians(),
303            (self.yaw as f32).to_radians(),
304            (self.roll as f32).to_radians(),
305        )
306    }
307
308    /// Convert to Bevy Transform (rotation only, no translation)
309    pub fn to_transform(&self) -> Transform {
310        Transform::from_rotation(self.to_quat())
311    }
312}
313
314impl Default for ObjectRotation {
315    fn default() -> Self {
316        Self::identity()
317    }
318}
319
320/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
321/// Uses spherical coordinates to capture objects from multiple elevations.
322#[derive(Clone, Debug)]
323pub struct ViewpointConfig {
324    /// Distance from camera to object center (meters)
325    pub radius: f32,
326    /// Number of horizontal positions (yaw angles) around the object
327    pub yaw_count: usize,
328    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
329    pub pitch_angles_deg: Vec<f32>,
330}
331
332impl Default for ViewpointConfig {
333    fn default() -> Self {
334        Self {
335            radius: 0.5,
336            yaw_count: 8,
337            // Three elevations: below (-30°), level (0°), above (+30°)
338            // This matches TBP's look_up/look_down capability
339            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
340        }
341    }
342}
343
344impl ViewpointConfig {
345    /// Total number of viewpoints this config will generate
346    pub fn viewpoint_count(&self) -> usize {
347        self.yaw_count * self.pitch_angles_deg.len()
348    }
349}
350
351/// Full sensor configuration for capture sessions
352#[derive(Clone, Debug, Resource)]
353pub struct SensorConfig {
354    /// Viewpoint configuration (camera positions)
355    pub viewpoints: ViewpointConfig,
356    /// Object rotations to capture (each rotation generates a full viewpoint set)
357    pub object_rotations: Vec<ObjectRotation>,
358    /// Output directory for captures
359    pub output_dir: String,
360    /// Filename pattern (use {view} for view index, {rot} for rotation index)
361    pub filename_pattern: String,
362}
363
364impl Default for SensorConfig {
365    fn default() -> Self {
366        Self {
367            viewpoints: ViewpointConfig::default(),
368            object_rotations: vec![ObjectRotation::identity()],
369            output_dir: ".".to_string(),
370            filename_pattern: "capture_{rot}_{view}.png".to_string(),
371        }
372    }
373}
374
375impl SensorConfig {
376    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
377    pub fn tbp_benchmark() -> Self {
378        Self {
379            viewpoints: ViewpointConfig::default(),
380            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
381            output_dir: ".".to_string(),
382            filename_pattern: "capture_{rot}_{view}.png".to_string(),
383        }
384    }
385
386    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
387    pub fn tbp_full_training() -> Self {
388        Self {
389            viewpoints: ViewpointConfig::default(),
390            object_rotations: ObjectRotation::tbp_known_orientations(),
391            output_dir: ".".to_string(),
392            filename_pattern: "capture_{rot}_{view}.png".to_string(),
393        }
394    }
395
396    /// Total number of captures this config will generate
397    pub fn total_captures(&self) -> usize {
398        self.viewpoints.viewpoint_count() * self.object_rotations.len()
399    }
400}
401
402/// Generate camera viewpoints using spherical coordinates.
403///
404/// Spherical coordinate system (matching TBP habitat sensor conventions):
405/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
406/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
407/// - Radius: distance from origin (object center)
408pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
409    let mut views = Vec::with_capacity(config.viewpoint_count());
410
411    for pitch_deg in &config.pitch_angles_deg {
412        let pitch = pitch_deg.to_radians();
413
414        for i in 0..config.yaw_count {
415            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
416
417            // Spherical to Cartesian conversion (Y-up coordinate system)
418            // x = r * cos(pitch) * sin(yaw)
419            // y = r * sin(pitch)
420            // z = r * cos(pitch) * cos(yaw)
421            let x = config.radius * pitch.cos() * yaw.sin();
422            let y = config.radius * pitch.sin();
423            let z = config.radius * pitch.cos() * yaw.cos();
424
425            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
426            views.push(transform);
427        }
428    }
429    views
430}
431
432/// Marker component for the target object being captured
433#[derive(Component)]
434pub struct CaptureTarget;
435
436/// Marker component for the capture camera
437#[derive(Component)]
438pub struct CaptureCamera;
439
440// ============================================================================
441// Headless Rendering API (NEW)
442// ============================================================================
443
444/// Configuration for headless rendering.
445///
446/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
447#[derive(Clone, Debug)]
448pub struct RenderConfig {
449    /// Image width in pixels (default: 64)
450    pub width: u32,
451    /// Image height in pixels (default: 64)
452    pub height: u32,
453    /// Zoom factor affecting field of view (default: 1.0)
454    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
455    pub zoom: f32,
456    /// Near clipping plane in meters (default: 0.01)
457    pub near_plane: f32,
458    /// Far clipping plane in meters (default: 10.0)
459    pub far_plane: f32,
460    /// Lighting configuration
461    pub lighting: LightingConfig,
462}
463
464/// Lighting configuration for rendering.
465///
466/// Controls ambient light and point lights in the scene.
467#[derive(Clone, Debug)]
468pub struct LightingConfig {
469    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
470    pub ambient_brightness: f32,
471    /// Key light intensity in lumens (default: 1500.0)
472    pub key_light_intensity: f32,
473    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
474    pub key_light_position: [f32; 3],
475    /// Fill light intensity in lumens (default: 500.0)
476    pub fill_light_intensity: f32,
477    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
478    pub fill_light_position: [f32; 3],
479    /// Enable shadows (default: false for performance)
480    pub shadows_enabled: bool,
481}
482
483impl Default for LightingConfig {
484    fn default() -> Self {
485        Self {
486            ambient_brightness: 0.3,
487            key_light_intensity: 1500.0,
488            key_light_position: [4.0, 8.0, 4.0],
489            fill_light_intensity: 500.0,
490            fill_light_position: [-4.0, 2.0, -4.0],
491            shadows_enabled: false,
492        }
493    }
494}
495
496impl LightingConfig {
497    /// Bright lighting for clear visibility
498    pub fn bright() -> Self {
499        Self {
500            ambient_brightness: 0.5,
501            key_light_intensity: 2000.0,
502            key_light_position: [4.0, 8.0, 4.0],
503            fill_light_intensity: 800.0,
504            fill_light_position: [-4.0, 2.0, -4.0],
505            shadows_enabled: false,
506        }
507    }
508
509    /// Soft lighting with minimal shadows
510    pub fn soft() -> Self {
511        Self {
512            ambient_brightness: 0.4,
513            key_light_intensity: 1000.0,
514            key_light_position: [3.0, 6.0, 3.0],
515            fill_light_intensity: 600.0,
516            fill_light_position: [-3.0, 3.0, -3.0],
517            shadows_enabled: false,
518        }
519    }
520
521    /// Unlit mode - ambient only, no point lights
522    pub fn unlit() -> Self {
523        Self {
524            ambient_brightness: 1.0,
525            key_light_intensity: 0.0,
526            key_light_position: [0.0, 0.0, 0.0],
527            fill_light_intensity: 0.0,
528            fill_light_position: [0.0, 0.0, 0.0],
529            shadows_enabled: false,
530        }
531    }
532}
533
534impl Default for RenderConfig {
535    fn default() -> Self {
536        Self::tbp_default()
537    }
538}
539
540impl RenderConfig {
541    /// TBP-compatible 64x64 RGBD sensor configuration.
542    ///
543    /// This matches the default resolution used in TBP's habitat sensor.
544    pub fn tbp_default() -> Self {
545        Self {
546            width: 64,
547            height: 64,
548            zoom: 1.0,
549            near_plane: 0.01,
550            far_plane: 10.0,
551            lighting: LightingConfig::default(),
552        }
553    }
554
555    /// Higher resolution configuration for debugging and visualization.
556    pub fn preview() -> Self {
557        Self {
558            width: 256,
559            height: 256,
560            zoom: 1.0,
561            near_plane: 0.01,
562            far_plane: 10.0,
563            lighting: LightingConfig::default(),
564        }
565    }
566
567    /// High resolution configuration for detailed captures.
568    pub fn high_res() -> Self {
569        Self {
570            width: 512,
571            height: 512,
572            zoom: 1.0,
573            near_plane: 0.01,
574            far_plane: 10.0,
575            lighting: LightingConfig::default(),
576        }
577    }
578
579    /// Calculate vertical field of view in radians based on zoom.
580    ///
581    /// Base FOV is 60 degrees, adjusted by zoom factor.
582    pub fn fov_radians(&self) -> f32 {
583        let base_fov_deg = 60.0_f32;
584        (base_fov_deg / self.zoom).to_radians()
585    }
586
587    /// Compute camera intrinsics for use with neocortx.
588    ///
589    /// Returns focal length and principal point based on resolution and FOV.
590    /// Uses f64 for TBP numerical precision compatibility.
591    pub fn intrinsics(&self) -> CameraIntrinsics {
592        let fov = self.fov_radians() as f64;
593        // focal_length = (height/2) / tan(fov/2)
594        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
595        let fx = fy; // Assuming square pixels
596
597        CameraIntrinsics {
598            focal_length: [fx, fy],
599            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
600            image_size: [self.width, self.height],
601        }
602    }
603}
604
605/// Camera intrinsic parameters for 3D reconstruction.
606///
607/// Compatible with neocortx's VisionIntrinsics format.
608/// Uses f64 for TBP numerical precision compatibility.
609#[derive(Clone, Debug, PartialEq)]
610pub struct CameraIntrinsics {
611    /// Focal length in pixels (fx, fy)
612    pub focal_length: [f64; 2],
613    /// Principal point (cx, cy) - typically image center
614    pub principal_point: [f64; 2],
615    /// Image dimensions (width, height)
616    pub image_size: [u32; 2],
617}
618
619impl CameraIntrinsics {
620    /// Project a 3D point to 2D pixel coordinates.
621    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
622        if point.z <= 0.0 {
623            return None;
624        }
625        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
626        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
627        Some([x, y])
628    }
629
630    /// Unproject a 2D pixel to a 3D point at given depth.
631    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
632        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
633        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
634        [x, y, depth]
635    }
636}
637
638/// Output from headless rendering containing RGBA and depth data.
639#[derive(Clone, Debug)]
640pub struct RenderOutput {
641    /// RGBA pixel data in row-major order (width * height * 4 bytes)
642    pub rgba: Vec<u8>,
643    /// Depth values in meters, row-major order (width * height f64s)
644    /// Values are linear depth from camera, not normalized.
645    /// Uses f64 for TBP numerical precision compatibility.
646    pub depth: Vec<f64>,
647    /// Image width in pixels
648    pub width: u32,
649    /// Image height in pixels
650    pub height: u32,
651    /// Camera intrinsics used for this render
652    pub intrinsics: CameraIntrinsics,
653    /// Camera transform (world position and orientation)
654    pub camera_transform: Transform,
655    /// Object rotation applied during render
656    pub object_rotation: ObjectRotation,
657}
658
659impl RenderOutput {
660    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
661    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
662        if x >= self.width || y >= self.height {
663            return None;
664        }
665        let idx = ((y * self.width + x) * 4) as usize;
666        Some([
667            self.rgba[idx],
668            self.rgba[idx + 1],
669            self.rgba[idx + 2],
670            self.rgba[idx + 3],
671        ])
672    }
673
674    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
675    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
676        if x >= self.width || y >= self.height {
677            return None;
678        }
679        let idx = (y * self.width + x) as usize;
680        Some(self.depth[idx])
681    }
682
683    /// Get RGB pixel (without alpha) at (x, y).
684    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
685        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
686    }
687
688    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
689    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
690        let mut image = Vec::with_capacity(self.height as usize);
691        for y in 0..self.height {
692            let mut row = Vec::with_capacity(self.width as usize);
693            for x in 0..self.width {
694                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
695            }
696            image.push(row);
697        }
698        image
699    }
700
701    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
702    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
703        let mut image = Vec::with_capacity(self.height as usize);
704        for y in 0..self.height {
705            let mut row = Vec::with_capacity(self.width as usize);
706            for x in 0..self.width {
707                row.push(self.get_depth(x, y).unwrap_or(0.0));
708            }
709            image.push(row);
710        }
711        image
712    }
713}
714
715/// Errors that can occur during rendering and file operations.
716#[derive(Debug, Clone)]
717pub enum RenderError {
718    /// Object mesh file not found
719    MeshNotFound(String),
720    /// Object texture file not found
721    TextureNotFound(String),
722    /// Generic file not found error
723    FileNotFound { path: String, reason: String },
724    /// File write failed
725    FileWriteFailed { path: String, reason: String },
726    /// Directory creation failed
727    DirectoryCreationFailed { path: String, reason: String },
728    /// Bevy rendering failed
729    RenderFailed(String),
730    /// Invalid configuration
731    InvalidConfig(String),
732    /// Invalid input parameters
733    InvalidInput(String),
734    /// JSON serialization/deserialization error
735    SerializationError(String),
736    /// Binary data parsing error
737    DataParsingError(String),
738    /// Render timeout
739    RenderTimeout { duration_secs: u64 },
740}
741
742impl std::fmt::Display for RenderError {
743    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
744        match self {
745            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
746            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
747            RenderError::FileNotFound { path, reason } => {
748                write!(f, "File not found at {}: {}", path, reason)
749            }
750            RenderError::FileWriteFailed { path, reason } => {
751                write!(f, "Failed to write file {}: {}", path, reason)
752            }
753            RenderError::DirectoryCreationFailed { path, reason } => {
754                write!(f, "Failed to create directory {}: {}", path, reason)
755            }
756            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
757            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
758            RenderError::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
759            RenderError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
760            RenderError::DataParsingError(msg) => write!(f, "Data parsing error: {}", msg),
761            RenderError::RenderTimeout { duration_secs } => {
762                write!(f, "Render timeout after {} seconds", duration_secs)
763            }
764        }
765    }
766}
767
768impl std::error::Error for RenderError {}
769
770/// Render a YCB object to an in-memory buffer.
771///
772/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
773/// renders a single frame, extracts the RGBA and depth data, and shuts down.
774///
775/// # Arguments
776/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
777/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
778/// * `object_rotation` - Rotation to apply to the object
779/// * `config` - Render configuration (resolution, depth range, etc.)
780///
781/// # Example
782/// ```ignore
783/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
784/// use std::path::Path;
785///
786/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
787/// let output = render_to_buffer(
788///     Path::new("/tmp/ycb/003_cracker_box"),
789///     &viewpoints[0],
790///     &ObjectRotation::identity(),
791///     &RenderConfig::tbp_default(),
792/// )?;
793/// ```
794pub fn render_to_buffer(
795    object_dir: &Path,
796    camera_transform: &Transform,
797    object_rotation: &ObjectRotation,
798    config: &RenderConfig,
799) -> Result<RenderOutput, RenderError> {
800    // Use the actual Bevy headless renderer
801    render::render_headless(object_dir, camera_transform, object_rotation, config)
802}
803
804/// Render all viewpoints and rotations for a YCB object.
805///
806/// Convenience function that renders all combinations of viewpoints and rotations.
807///
808/// # Arguments
809/// * `object_dir` - Path to YCB object directory
810/// * `viewpoint_config` - Viewpoint configuration (camera positions)
811/// * `rotations` - Object rotations to render
812/// * `render_config` - Render configuration
813///
814/// # Returns
815/// Vector of RenderOutput, one per viewpoint × rotation combination.
816pub fn render_all_viewpoints(
817    object_dir: &Path,
818    viewpoint_config: &ViewpointConfig,
819    rotations: &[ObjectRotation],
820    render_config: &RenderConfig,
821) -> Result<Vec<RenderOutput>, RenderError> {
822    let viewpoints = generate_viewpoints(viewpoint_config);
823    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
824
825    for rotation in rotations {
826        for viewpoint in &viewpoints {
827            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
828            outputs.push(output);
829        }
830    }
831
832    Ok(outputs)
833}
834
835/// Render with model caching support for efficient multi-viewpoint rendering.
836///
837/// This function tracks which models have been loaded and provides performance
838/// insights. For maximum efficiency when rendering many viewpoints of the same
839/// object, use the batch rendering API (`create_batch_renderer`, `render_batch`).
840///
841/// # Arguments
842/// * `object_dir` - Path to YCB object directory
843/// * `camera_transform` - Camera position and orientation
844/// * `object_rotation` - Rotation to apply to the object
845/// * `config` - Render configuration
846/// * `cache` - Model cache to track loaded assets
847///
848/// # Returns
849/// RenderOutput with rendered RGBA and depth data
850///
851/// # Example
852/// ```ignore
853/// use bevy_sensor::{render_to_buffer_cached, cache::ModelCache, RenderConfig, ObjectRotation};
854/// use std::path::PathBuf;
855///
856/// let mut cache = ModelCache::new();
857/// let object_dir = PathBuf::from("/tmp/ycb/003_cracker_box");
858/// let config = RenderConfig::tbp_default();
859/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
860///
861/// // First render: loads from disk and caches
862/// let output1 = render_to_buffer_cached(
863///     &object_dir,
864///     &viewpoints[0],
865///     &ObjectRotation::identity(),
866///     &config,
867///     &mut cache,
868/// )?;
869///
870/// // Subsequent renders: tracks in cache (actual speedup comes from batch API)
871/// for viewpoint in &viewpoints[1..] {
872///     let output = render_to_buffer_cached(
873///         &object_dir,
874///         viewpoint,
875///         &ObjectRotation::identity(),
876///         &config,
877///         &mut cache,
878///     )?;
879/// }
880/// ```
881///
882/// # Note
883/// This function uses the same rendering engine as `render_to_buffer()`. For true
884/// asset caching performance gains (2-3x speedup), combine with batch rendering:
885///
886/// ```ignore
887/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig, RenderConfig, ObjectRotation};
888///
889/// let requests: Vec<_> = viewpoints.iter().map(|vp| {
890///     BatchRenderRequest {
891///         object_dir: object_dir.clone(),
892///         viewpoint: *vp,
893///         object_rotation: ObjectRotation::identity(),
894///         render_config: RenderConfig::tbp_default(),
895///     }
896/// }).collect();
897///
898/// let outputs = render_batch(requests, &BatchRenderConfig::default())?;
899/// ```
900pub fn render_to_buffer_cached(
901    object_dir: &Path,
902    camera_transform: &Transform,
903    object_rotation: &ObjectRotation,
904    config: &RenderConfig,
905    cache: &mut cache::ModelCache,
906) -> Result<RenderOutput, RenderError> {
907    let mesh_path = object_dir.join("google_16k/textured.obj");
908    let texture_path = object_dir.join("google_16k/texture_map.png");
909
910    // Track in cache
911    cache.cache_scene(mesh_path.clone());
912    cache.cache_texture(texture_path.clone());
913
914    // Render using standard pipeline
915    render::render_headless(object_dir, camera_transform, object_rotation, config)
916}
917
918/// Render directly to files (for subprocess mode).
919///
920/// This function is designed for subprocess rendering where the process will exit
921/// after rendering. It saves RGBA and depth data directly to the specified files
922/// before the process terminates.
923///
924/// # Arguments
925/// * `object_dir` - Path to YCB object directory
926/// * `camera_transform` - Camera position and orientation
927/// * `object_rotation` - Rotation to apply to the object
928/// * `config` - Render configuration
929/// * `rgba_path` - Output path for RGBA PNG
930/// * `depth_path` - Output path for depth data (raw f32 bytes)
931///
932/// # Note
933/// This function may call `std::process::exit(0)` and not return.
934pub fn render_to_files(
935    object_dir: &Path,
936    camera_transform: &Transform,
937    object_rotation: &ObjectRotation,
938    config: &RenderConfig,
939    rgba_path: &Path,
940    depth_path: &Path,
941) -> Result<(), RenderError> {
942    render::render_to_files(
943        object_dir,
944        camera_transform,
945        object_rotation,
946        config,
947        rgba_path,
948        depth_path,
949    )
950}
951
952// Re-export batch types for convenient API access
953pub use batch::{
954    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
955    BatchState, RenderStatus,
956};
957
958/// Create a new batch renderer for efficient multi-viewpoint rendering.
959///
960/// This creates a persistent Bevy app that can render multiple viewpoints without
961/// subprocess spawning overhead. Achieves 10-100x speedup vs individual render_to_buffer calls.
962///
963/// # Arguments
964/// * `config` - Batch rendering configuration
965///
966/// # Returns
967/// A BatchRenderer instance ready to queue render requests
968///
969/// # Example
970/// ```ignore
971/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
972///
973/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
974/// ```
975pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
976    // For now, just create an empty renderer that will need a Bevy app
977    // The actual app creation happens when rendering starts
978    Ok(BatchRenderer::new(config.clone()))
979}
980
981/// Queue a render request for batch processing.
982///
983/// Adds a render request to the batch queue. Requests are processed in order
984/// when you call render_next_in_batch().
985///
986/// # Arguments
987/// * `renderer` - The batch renderer instance
988/// * `request` - The render request
989///
990/// # Returns
991/// Ok if queued successfully, Err if queue is full
992///
993/// # Example
994/// ```ignore
995/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
996/// use std::path::PathBuf;
997///
998/// queue_render_request(&mut renderer, BatchRenderRequest {
999///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
1000///     viewpoint: camera_transform,
1001///     object_rotation: ObjectRotation::identity(),
1002///     render_config: RenderConfig::tbp_default(),
1003/// })?;
1004/// ```
1005pub fn queue_render_request(
1006    renderer: &mut BatchRenderer,
1007    request: BatchRenderRequest,
1008) -> Result<(), RenderError> {
1009    renderer
1010        .queue_request(request)
1011        .map_err(|e| RenderError::RenderFailed(e.to_string()))
1012}
1013
1014/// Process and execute the next render in the batch queue.
1015///
1016/// Executes a single render from the queued requests. Returns None when the queue is empty.
1017/// Use this in a loop to process all queued renders.
1018///
1019/// # Arguments
1020/// * `renderer` - The batch renderer instance
1021/// * `timeout_ms` - Timeout in milliseconds for this render
1022///
1023/// # Returns
1024/// Some(output) if a render completed, None if queue is empty
1025///
1026/// # Example
1027/// ```ignore
1028/// loop {
1029///     match render_next_in_batch(&mut renderer, 500)? {
1030///         Some(output) => println!("Render complete: {:?}", output.status),
1031///         None => break, // All renders done
1032///     }
1033/// }
1034/// ```
1035pub fn render_next_in_batch(
1036    renderer: &mut BatchRenderer,
1037    _timeout_ms: u32,
1038) -> Result<Option<BatchRenderOutput>, RenderError> {
1039    // This is a stub - the actual implementation will require a running Bevy app
1040    // For now, just render single batches immediately using render_to_buffer
1041    if let Some(request) = renderer.pending_requests.pop_front() {
1042        let output = render_to_buffer(
1043            &request.object_dir,
1044            &request.viewpoint,
1045            &request.object_rotation,
1046            &request.render_config,
1047        )?;
1048        let batch_output = BatchRenderOutput::from_render_output(request, output);
1049        renderer.completed_results.push(batch_output.clone());
1050        renderer.renders_processed += 1;
1051        Ok(Some(batch_output))
1052    } else {
1053        Ok(None)
1054    }
1055}
1056
1057/// Render multiple requests in batch (convenience function).
1058///
1059/// Queues all requests and executes them in batch, returning all results.
1060/// Simpler than manage queue + loop for one-off batches.
1061///
1062/// # Arguments
1063/// * `requests` - Vector of render requests
1064/// * `config` - Batch rendering configuration
1065///
1066/// # Returns
1067/// Vector of BatchRenderOutput results in same order as input
1068///
1069/// # Example
1070/// ```ignore
1071/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
1072///
1073/// let results = render_batch(requests, &BatchRenderConfig::default())?;
1074/// ```
1075pub fn render_batch(
1076    requests: Vec<BatchRenderRequest>,
1077    config: &BatchRenderConfig,
1078) -> Result<Vec<BatchRenderOutput>, RenderError> {
1079    let mut renderer = create_batch_renderer(config)?;
1080
1081    // Queue all requests
1082    for request in requests {
1083        queue_render_request(&mut renderer, request)?;
1084    }
1085
1086    // Execute all and collect results
1087    let mut results = Vec::new();
1088    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
1089        results.push(output);
1090    }
1091
1092    Ok(results)
1093}
1094
1095// Re-export bevy types that consumers will need
1096pub use bevy::prelude::{Quat, Transform, Vec3};
1097
1098#[cfg(test)]
1099mod tests {
1100    use super::*;
1101
1102    #[test]
1103    fn test_object_rotation_identity() {
1104        let rot = ObjectRotation::identity();
1105        assert_eq!(rot.pitch, 0.0);
1106        assert_eq!(rot.yaw, 0.0);
1107        assert_eq!(rot.roll, 0.0);
1108    }
1109
1110    #[test]
1111    fn test_object_rotation_from_array() {
1112        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
1113        assert_eq!(rot.pitch, 10.0);
1114        assert_eq!(rot.yaw, 20.0);
1115        assert_eq!(rot.roll, 30.0);
1116    }
1117
1118    #[test]
1119    fn test_tbp_benchmark_rotations() {
1120        let rotations = ObjectRotation::tbp_benchmark_rotations();
1121        assert_eq!(rotations.len(), 3);
1122        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
1123        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
1124        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
1125    }
1126
1127    #[test]
1128    fn test_tbp_known_orientations_count() {
1129        let orientations = ObjectRotation::tbp_known_orientations();
1130        assert_eq!(orientations.len(), 14);
1131    }
1132
1133    #[test]
1134    fn test_rotation_to_quat() {
1135        let rot = ObjectRotation::identity();
1136        let quat = rot.to_quat();
1137        // Identity quaternion should be approximately (1, 0, 0, 0)
1138        assert!((quat.w - 1.0).abs() < 0.001);
1139        assert!(quat.x.abs() < 0.001);
1140        assert!(quat.y.abs() < 0.001);
1141        assert!(quat.z.abs() < 0.001);
1142    }
1143
1144    #[test]
1145    fn test_rotation_90_yaw() {
1146        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
1147        let quat = rot.to_quat();
1148        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
1149        assert!((quat.w - 0.707).abs() < 0.01);
1150        assert!((quat.y - 0.707).abs() < 0.01);
1151    }
1152
1153    #[test]
1154    fn test_viewpoint_config_default() {
1155        let config = ViewpointConfig::default();
1156        assert_eq!(config.radius, 0.5);
1157        assert_eq!(config.yaw_count, 8);
1158        assert_eq!(config.pitch_angles_deg.len(), 3);
1159    }
1160
1161    #[test]
1162    fn test_viewpoint_count() {
1163        let config = ViewpointConfig::default();
1164        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
1165    }
1166
1167    #[test]
1168    fn test_generate_viewpoints_count() {
1169        let config = ViewpointConfig::default();
1170        let viewpoints = generate_viewpoints(&config);
1171        assert_eq!(viewpoints.len(), 24);
1172    }
1173
1174    #[test]
1175    fn test_viewpoints_spherical_radius() {
1176        let config = ViewpointConfig::default();
1177        let viewpoints = generate_viewpoints(&config);
1178
1179        for (i, transform) in viewpoints.iter().enumerate() {
1180            let actual_radius = transform.translation.length();
1181            assert!(
1182                (actual_radius - config.radius).abs() < 0.001,
1183                "Viewpoint {} has incorrect radius: {} (expected {})",
1184                i,
1185                actual_radius,
1186                config.radius
1187            );
1188        }
1189    }
1190
1191    #[test]
1192    fn test_viewpoints_looking_at_origin() {
1193        let config = ViewpointConfig::default();
1194        let viewpoints = generate_viewpoints(&config);
1195
1196        for (i, transform) in viewpoints.iter().enumerate() {
1197            let forward = transform.forward();
1198            let to_origin = (Vec3::ZERO - transform.translation).normalize();
1199            let dot = forward.dot(to_origin);
1200            assert!(
1201                dot > 0.99,
1202                "Viewpoint {} not looking at origin, dot product: {}",
1203                i,
1204                dot
1205            );
1206        }
1207    }
1208
1209    #[test]
1210    fn test_sensor_config_default() {
1211        let config = SensorConfig::default();
1212        assert_eq!(config.object_rotations.len(), 1);
1213        assert_eq!(config.total_captures(), 24);
1214    }
1215
1216    #[test]
1217    fn test_sensor_config_tbp_benchmark() {
1218        let config = SensorConfig::tbp_benchmark();
1219        assert_eq!(config.object_rotations.len(), 3);
1220        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1221    }
1222
1223    #[test]
1224    fn test_sensor_config_tbp_full() {
1225        let config = SensorConfig::tbp_full_training();
1226        assert_eq!(config.object_rotations.len(), 14);
1227        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1228    }
1229
1230    #[test]
1231    fn test_ycb_representative_objects() {
1232        // Verify representative objects are defined
1233        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1234        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1235    }
1236
1237    #[test]
1238    #[allow(deprecated)]
1239    fn test_ycb_ten_objects() {
1240        // Verify ten objects subset is defined
1241        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1242    }
1243
1244    #[test]
1245    fn test_ycb_object_mesh_path() {
1246        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1247        assert_eq!(
1248            path,
1249            std::path::Path::new("/tmp/ycb")
1250                .join("003_cracker_box")
1251                .join("google_16k")
1252                .join("textured.obj")
1253        );
1254    }
1255
1256    #[test]
1257    fn test_ycb_object_texture_path() {
1258        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1259        assert_eq!(
1260            path,
1261            std::path::Path::new("/tmp/ycb")
1262                .join("003_cracker_box")
1263                .join("google_16k")
1264                .join("texture_map.png")
1265        );
1266    }
1267
1268    // =========================================================================
1269    // Headless Rendering API Tests
1270    // =========================================================================
1271
1272    #[test]
1273    fn test_render_config_tbp_default() {
1274        let config = RenderConfig::tbp_default();
1275        assert_eq!(config.width, 64);
1276        assert_eq!(config.height, 64);
1277        assert_eq!(config.zoom, 1.0);
1278        assert_eq!(config.near_plane, 0.01);
1279        assert_eq!(config.far_plane, 10.0);
1280    }
1281
1282    #[test]
1283    fn test_render_config_preview() {
1284        let config = RenderConfig::preview();
1285        assert_eq!(config.width, 256);
1286        assert_eq!(config.height, 256);
1287    }
1288
1289    #[test]
1290    fn test_render_config_default_is_tbp() {
1291        let default = RenderConfig::default();
1292        let tbp = RenderConfig::tbp_default();
1293        assert_eq!(default.width, tbp.width);
1294        assert_eq!(default.height, tbp.height);
1295    }
1296
1297    #[test]
1298    fn test_render_config_fov() {
1299        let config = RenderConfig::tbp_default();
1300        let fov = config.fov_radians();
1301        // Base FOV is 60 degrees = ~1.047 radians
1302        assert!((fov - 1.047).abs() < 0.01);
1303
1304        // Zoom in should reduce FOV
1305        let zoomed = RenderConfig {
1306            zoom: 2.0,
1307            ..config
1308        };
1309        assert!(zoomed.fov_radians() < fov);
1310    }
1311
1312    #[test]
1313    fn test_render_config_intrinsics() {
1314        let config = RenderConfig::tbp_default();
1315        let intrinsics = config.intrinsics();
1316
1317        assert_eq!(intrinsics.image_size, [64, 64]);
1318        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1319        // Focal length should be positive and reasonable
1320        assert!(intrinsics.focal_length[0] > 0.0);
1321        assert!(intrinsics.focal_length[1] > 0.0);
1322        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1323        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1324    }
1325
1326    #[test]
1327    fn test_camera_intrinsics_project() {
1328        let intrinsics = CameraIntrinsics {
1329            focal_length: [100.0, 100.0],
1330            principal_point: [32.0, 32.0],
1331            image_size: [64, 64],
1332        };
1333
1334        // Point at origin of camera frame projects to principal point
1335        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1336        assert!(center.is_some());
1337        let [x, y] = center.unwrap();
1338        assert!((x - 32.0).abs() < 0.001);
1339        assert!((y - 32.0).abs() < 0.001);
1340
1341        // Point behind camera returns None
1342        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1343        assert!(behind.is_none());
1344    }
1345
1346    #[test]
1347    fn test_camera_intrinsics_unproject() {
1348        let intrinsics = CameraIntrinsics {
1349            focal_length: [100.0, 100.0],
1350            principal_point: [32.0, 32.0],
1351            image_size: [64, 64],
1352        };
1353
1354        // Unproject principal point at depth 1.0
1355        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1356        assert!((point[0]).abs() < 0.001); // x
1357        assert!((point[1]).abs() < 0.001); // y
1358        assert!((point[2] - 1.0).abs() < 0.001); // z
1359    }
1360
1361    #[test]
1362    fn test_render_output_get_rgba() {
1363        let output = RenderOutput {
1364            rgba: vec![
1365                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1366            ],
1367            depth: vec![1.0, 2.0, 3.0, 4.0],
1368            width: 2,
1369            height: 2,
1370            intrinsics: RenderConfig::tbp_default().intrinsics(),
1371            camera_transform: Transform::IDENTITY,
1372            object_rotation: ObjectRotation::identity(),
1373        };
1374
1375        // Top-left: red
1376        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1377        // Top-right: green
1378        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1379        // Bottom-left: blue
1380        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1381        // Bottom-right: white
1382        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1383        // Out of bounds
1384        assert_eq!(output.get_rgba(2, 0), None);
1385    }
1386
1387    #[test]
1388    fn test_render_output_get_depth() {
1389        let output = RenderOutput {
1390            rgba: vec![0u8; 16],
1391            depth: vec![1.0, 2.0, 3.0, 4.0],
1392            width: 2,
1393            height: 2,
1394            intrinsics: RenderConfig::tbp_default().intrinsics(),
1395            camera_transform: Transform::IDENTITY,
1396            object_rotation: ObjectRotation::identity(),
1397        };
1398
1399        assert_eq!(output.get_depth(0, 0), Some(1.0));
1400        assert_eq!(output.get_depth(1, 0), Some(2.0));
1401        assert_eq!(output.get_depth(0, 1), Some(3.0));
1402        assert_eq!(output.get_depth(1, 1), Some(4.0));
1403        assert_eq!(output.get_depth(2, 0), None);
1404    }
1405
1406    #[test]
1407    fn test_render_output_to_rgb_image() {
1408        let output = RenderOutput {
1409            rgba: vec![
1410                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1411            ],
1412            depth: vec![1.0, 2.0, 3.0, 4.0],
1413            width: 2,
1414            height: 2,
1415            intrinsics: RenderConfig::tbp_default().intrinsics(),
1416            camera_transform: Transform::IDENTITY,
1417            object_rotation: ObjectRotation::identity(),
1418        };
1419
1420        let image = output.to_rgb_image();
1421        assert_eq!(image.len(), 2); // 2 rows
1422        assert_eq!(image[0].len(), 2); // 2 columns
1423        assert_eq!(image[0][0], [255, 0, 0]); // Red
1424        assert_eq!(image[0][1], [0, 255, 0]); // Green
1425        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1426        assert_eq!(image[1][1], [255, 255, 255]); // White
1427    }
1428
1429    #[test]
1430    fn test_render_output_to_depth_image() {
1431        let output = RenderOutput {
1432            rgba: vec![0u8; 16],
1433            depth: vec![1.0, 2.0, 3.0, 4.0],
1434            width: 2,
1435            height: 2,
1436            intrinsics: RenderConfig::tbp_default().intrinsics(),
1437            camera_transform: Transform::IDENTITY,
1438            object_rotation: ObjectRotation::identity(),
1439        };
1440
1441        let depth_image = output.to_depth_image();
1442        assert_eq!(depth_image.len(), 2);
1443        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1444        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1445    }
1446
1447    #[test]
1448    fn test_render_error_display() {
1449        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1450        assert!(err.to_string().contains("Mesh not found"));
1451        assert!(err.to_string().contains("/path/to/mesh.obj"));
1452    }
1453
1454    // =========================================================================
1455    // Edge Case Tests
1456    // =========================================================================
1457
1458    #[test]
1459    fn test_object_rotation_extreme_angles() {
1460        // Test angles beyond 360 degrees
1461        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1462        let quat = rot.to_quat();
1463        // Quaternion should still be valid (normalized)
1464        assert!((quat.length() - 1.0).abs() < 0.001);
1465    }
1466
1467    #[test]
1468    fn test_object_rotation_to_transform() {
1469        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1470        let transform = rot.to_transform();
1471        // Transform should have no translation
1472        assert_eq!(transform.translation, Vec3::ZERO);
1473        // Should have rotation
1474        assert!(transform.rotation != Quat::IDENTITY);
1475    }
1476
1477    #[test]
1478    fn test_viewpoint_config_single_viewpoint() {
1479        let config = ViewpointConfig {
1480            radius: 1.0,
1481            yaw_count: 1,
1482            pitch_angles_deg: vec![0.0],
1483        };
1484        assert_eq!(config.viewpoint_count(), 1);
1485        let viewpoints = generate_viewpoints(&config);
1486        assert_eq!(viewpoints.len(), 1);
1487        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1488        let pos = viewpoints[0].translation;
1489        assert!((pos.x).abs() < 0.001);
1490        assert!((pos.y).abs() < 0.001);
1491        assert!((pos.z - 1.0).abs() < 0.001);
1492    }
1493
1494    #[test]
1495    fn test_viewpoint_radius_scaling() {
1496        let config1 = ViewpointConfig {
1497            radius: 0.5,
1498            yaw_count: 4,
1499            pitch_angles_deg: vec![0.0],
1500        };
1501        let config2 = ViewpointConfig {
1502            radius: 2.0,
1503            yaw_count: 4,
1504            pitch_angles_deg: vec![0.0],
1505        };
1506
1507        let v1 = generate_viewpoints(&config1);
1508        let v2 = generate_viewpoints(&config2);
1509
1510        // Viewpoints should scale proportionally
1511        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1512            let ratio = vp2.translation.length() / vp1.translation.length();
1513            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1514        }
1515    }
1516
1517    #[test]
1518    fn test_camera_intrinsics_project_at_z_zero() {
1519        let intrinsics = CameraIntrinsics {
1520            focal_length: [100.0, 100.0],
1521            principal_point: [32.0, 32.0],
1522            image_size: [64, 64],
1523        };
1524
1525        // Point at z=0 should return None (division by zero protection)
1526        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1527        assert!(result.is_none());
1528    }
1529
1530    #[test]
1531    fn test_camera_intrinsics_roundtrip() {
1532        let intrinsics = CameraIntrinsics {
1533            focal_length: [100.0, 100.0],
1534            principal_point: [32.0, 32.0],
1535            image_size: [64, 64],
1536        };
1537
1538        // Project a 3D point
1539        let original = Vec3::new(0.5, -0.3, 2.0);
1540        let projected = intrinsics.project(original).unwrap();
1541
1542        // Unproject back with the same depth (convert f32 to f64)
1543        let unprojected = intrinsics.unproject(projected, original.z as f64);
1544
1545        // Should get back approximately the same point
1546        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1547        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1548        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1549    }
1550
1551    #[test]
1552    fn test_render_output_empty() {
1553        let output = RenderOutput {
1554            rgba: vec![],
1555            depth: vec![],
1556            width: 0,
1557            height: 0,
1558            intrinsics: RenderConfig::tbp_default().intrinsics(),
1559            camera_transform: Transform::IDENTITY,
1560            object_rotation: ObjectRotation::identity(),
1561        };
1562
1563        // Should handle empty gracefully
1564        assert_eq!(output.get_rgba(0, 0), None);
1565        assert_eq!(output.get_depth(0, 0), None);
1566        assert!(output.to_rgb_image().is_empty());
1567        assert!(output.to_depth_image().is_empty());
1568    }
1569
1570    #[test]
1571    fn test_render_output_1x1() {
1572        let output = RenderOutput {
1573            rgba: vec![128, 64, 32, 255],
1574            depth: vec![0.5],
1575            width: 1,
1576            height: 1,
1577            intrinsics: RenderConfig::tbp_default().intrinsics(),
1578            camera_transform: Transform::IDENTITY,
1579            object_rotation: ObjectRotation::identity(),
1580        };
1581
1582        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1583        assert_eq!(output.get_depth(0, 0), Some(0.5));
1584        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1585
1586        let rgb_img = output.to_rgb_image();
1587        assert_eq!(rgb_img.len(), 1);
1588        assert_eq!(rgb_img[0].len(), 1);
1589        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1590    }
1591
1592    #[test]
1593    fn test_render_config_high_res() {
1594        let config = RenderConfig::high_res();
1595        assert_eq!(config.width, 512);
1596        assert_eq!(config.height, 512);
1597
1598        let intrinsics = config.intrinsics();
1599        assert_eq!(intrinsics.image_size, [512, 512]);
1600        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1601    }
1602
1603    #[test]
1604    fn test_render_config_zoom_affects_fov() {
1605        let base = RenderConfig::tbp_default();
1606        let zoomed = RenderConfig {
1607            zoom: 2.0,
1608            ..base.clone()
1609        };
1610
1611        // Higher zoom = lower FOV
1612        assert!(zoomed.fov_radians() < base.fov_radians());
1613        // Specifically, 2x zoom = half FOV
1614        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1615    }
1616
1617    #[test]
1618    fn test_render_config_zoom_affects_intrinsics() {
1619        let base = RenderConfig::tbp_default();
1620        let zoomed = RenderConfig {
1621            zoom: 2.0,
1622            ..base.clone()
1623        };
1624
1625        // Higher zoom = higher focal length
1626        let base_intrinsics = base.intrinsics();
1627        let zoomed_intrinsics = zoomed.intrinsics();
1628
1629        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1630    }
1631
1632    #[test]
1633    fn test_lighting_config_variants() {
1634        let default = LightingConfig::default();
1635        let bright = LightingConfig::bright();
1636        let soft = LightingConfig::soft();
1637        let unlit = LightingConfig::unlit();
1638
1639        // Bright should have higher intensity than default
1640        assert!(bright.key_light_intensity > default.key_light_intensity);
1641
1642        // Unlit should have no point lights
1643        assert_eq!(unlit.key_light_intensity, 0.0);
1644        assert_eq!(unlit.fill_light_intensity, 0.0);
1645        assert_eq!(unlit.ambient_brightness, 1.0);
1646
1647        // Soft should have lower intensity
1648        assert!(soft.key_light_intensity < default.key_light_intensity);
1649    }
1650
1651    #[test]
1652    fn test_all_render_error_variants() {
1653        let errors = vec![
1654            RenderError::MeshNotFound("mesh.obj".to_string()),
1655            RenderError::TextureNotFound("texture.png".to_string()),
1656            RenderError::RenderFailed("GPU error".to_string()),
1657            RenderError::InvalidConfig("bad config".to_string()),
1658        ];
1659
1660        for err in errors {
1661            // All variants should have Display impl
1662            let msg = err.to_string();
1663            assert!(!msg.is_empty());
1664        }
1665    }
1666
1667    #[test]
1668    fn test_tbp_known_orientations_unique() {
1669        let orientations = ObjectRotation::tbp_known_orientations();
1670
1671        // All 14 orientations should produce unique quaternions
1672        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1673
1674        for (i, q1) in quats.iter().enumerate() {
1675            for (j, q2) in quats.iter().enumerate() {
1676                if i != j {
1677                    // Quaternions should be different (accounting for q == -q equivalence)
1678                    let dot = q1.dot(*q2).abs();
1679                    assert!(
1680                        dot < 0.999,
1681                        "Orientations {} and {} produce same quaternion",
1682                        i,
1683                        j
1684                    );
1685                }
1686            }
1687        }
1688    }
1689}