Skip to main content

bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Batch rendering API for efficient multi-viewpoint rendering
62pub mod batch;
63
64// WebGPU and cross-platform backend support
65pub mod backend;
66
67// Model caching system for efficient multi-viewpoint rendering
68pub mod cache;
69
70// Test fixtures for pre-rendered images (CI/CD support)
71pub mod fixtures;
72
73// Re-export ycbust types for convenience
74#[allow(deprecated)]
75pub use ycbust::{
76    self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TBP_STANDARD_OBJECTS,
77    TEN_OBJECTS,
78};
79
80/// YCB dataset utilities
81pub mod ycb {
82    #[allow(deprecated)]
83    pub use ycbust::{
84        download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TBP_STANDARD_OBJECTS,
85        TEN_OBJECTS,
86    };
87
88    use reqwest::Client;
89    use std::path::Path;
90
91    /// Download YCB models to the specified directory.
92    ///
93    /// # Arguments
94    /// * `output_dir` - Directory to download models to
95    /// * `subset` - Which subset of objects to download
96    ///
97    /// # Example
98    /// ```ignore
99    /// use bevy_sensor::ycb::{download_models, Subset};
100    ///
101    /// download_models("/tmp/ycb", Subset::Representative).await?;
102    /// ```
103    pub async fn download_models<P: AsRef<Path>>(
104        output_dir: P,
105        subset: Subset,
106    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
107        let options = DownloadOptions {
108            overwrite: false,
109            full: false,
110            show_progress: true,
111            delete_archives: true,
112        };
113        download_ycb(subset, output_dir.as_ref(), options).await?;
114        Ok(())
115    }
116
117    /// Download YCB models with custom options.
118    pub async fn download_models_with_options<P: AsRef<Path>>(
119        output_dir: P,
120        subset: Subset,
121        options: DownloadOptions,
122    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
123        download_ycb(subset, output_dir.as_ref(), options).await?;
124        Ok(())
125    }
126
127    /// Download specific YCB objects by object ID using the standard `google_16k` meshes.
128    pub async fn download_objects<P: AsRef<Path>>(
129        output_dir: P,
130        object_ids: &[&str],
131    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
132        let output_dir = output_dir.as_ref();
133        let client = Client::new();
134        let options = DownloadOptions {
135            overwrite: false,
136            full: false,
137            show_progress: true,
138            delete_archives: true,
139        };
140
141        std::fs::create_dir_all(output_dir)?;
142
143        for object_id in object_ids {
144            let url = ycbust::get_tgz_url(object_id, "google_16k");
145            let archive_path = output_dir.join(format!("{object_id}_google_16k.tgz"));
146
147            if archive_path.exists() && !options.overwrite {
148                continue;
149            }
150
151            ycbust::download_file(&client, &url, &archive_path, options.show_progress).await?;
152            ycbust::extract_tgz(&archive_path, output_dir, options.delete_archives)?;
153        }
154
155        Ok(())
156    }
157
158    /// Check if YCB models exist at the given path
159    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
160        let path = output_dir.as_ref();
161        // Check for at least one representative object
162        path.join("003_cracker_box/google_16k/textured.obj")
163            .exists()
164    }
165
166    /// Get the path to a specific YCB object's OBJ file
167    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
168        output_dir
169            .as_ref()
170            .join(object_id)
171            .join("google_16k")
172            .join("textured.obj")
173    }
174
175    /// Get the path to a specific YCB object's texture file
176    pub fn object_texture_path<P: AsRef<Path>>(
177        output_dir: P,
178        object_id: &str,
179    ) -> std::path::PathBuf {
180        output_dir
181            .as_ref()
182            .join(object_id)
183            .join("google_16k")
184            .join("texture_map.png")
185    }
186}
187
188/// Initialize bevy-sensor rendering backend configuration.
189///
190/// **IMPORTANT**: Call this function ONCE at the start of your application,
191/// before any rendering operations, especially when using bevy-sensor as a library.
192///
193/// This ensures proper backend selection (WebGPU for WSL2, Vulkan for Linux, etc.)
194/// and is critical for GPU rendering on WSL2 environments.
195///
196/// # Why This Matters
197///
198/// The WGPU rendering backend caches its backend selection early during initialization.
199/// When bevy-sensor is used as a library, environment variables must be set BEFORE
200/// any GPU rendering code runs. This function does that automatically.
201///
202/// # Example
203///
204/// ```ignore
205/// use bevy_sensor;
206///
207/// fn main() {
208///     // Initialize FIRST, before any rendering
209///     bevy_sensor::initialize();
210///
211///     // Now use the rendering API
212///     let output = bevy_sensor::render_to_buffer(
213///         object_dir, &viewpoint, &rotation, &config
214///     )?;
215/// }
216/// ```
217///
218/// # Calling Multiple Times
219///
220/// Safe to call multiple times - subsequent calls are no-ops after the first call.
221pub fn initialize() {
222    // Use a OnceCell equivalent to ensure this only runs once
223    use std::sync::atomic::{AtomicBool, Ordering};
224    static INITIALIZED: AtomicBool = AtomicBool::new(false);
225
226    if !INITIALIZED.swap(true, Ordering::SeqCst) {
227        // First call - initialize backend
228        let config = backend::BackendConfig::new();
229        config.apply_env();
230    }
231}
232
233/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
234/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
235#[derive(Clone, Debug, PartialEq)]
236pub struct ObjectRotation {
237    /// Rotation around X-axis (pitch) in degrees
238    pub pitch: f64,
239    /// Rotation around Y-axis (yaw) in degrees
240    pub yaw: f64,
241    /// Rotation around Z-axis (roll) in degrees
242    pub roll: f64,
243}
244
245impl ObjectRotation {
246    /// Create a new rotation from Euler angles in degrees
247    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
248        Self { pitch, yaw, roll }
249    }
250
251    /// Create from TBP-style array [pitch, yaw, roll] in degrees
252    pub fn from_array(arr: [f64; 3]) -> Self {
253        Self {
254            pitch: arr[0],
255            yaw: arr[1],
256            roll: arr[2],
257        }
258    }
259
260    /// Identity rotation (no rotation)
261    pub fn identity() -> Self {
262        Self::new(0.0, 0.0, 0.0)
263    }
264
265    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
266    /// Used in shorter YCB experiments to reduce computational load.
267    pub fn tbp_benchmark_rotations() -> Vec<Self> {
268        vec![
269            Self::from_array([0.0, 0.0, 0.0]),
270            Self::from_array([0.0, 90.0, 0.0]),
271            Self::from_array([0.0, 180.0, 0.0]),
272        ]
273    }
274
275    /// TBP 14 known orientations (cube faces and corners)
276    /// These are the orientations objects are learned in during training.
277    pub fn tbp_known_orientations() -> Vec<Self> {
278        vec![
279            // 6 cube faces (90° rotations around each axis)
280            Self::from_array([0.0, 0.0, 0.0]),   // Front
281            Self::from_array([0.0, 90.0, 0.0]),  // Right
282            Self::from_array([0.0, 180.0, 0.0]), // Back
283            Self::from_array([0.0, 270.0, 0.0]), // Left
284            Self::from_array([90.0, 0.0, 0.0]),  // Top
285            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
286            // 8 cube corners (45° rotations)
287            Self::from_array([45.0, 45.0, 0.0]),
288            Self::from_array([45.0, 135.0, 0.0]),
289            Self::from_array([45.0, 225.0, 0.0]),
290            Self::from_array([45.0, 315.0, 0.0]),
291            Self::from_array([-45.0, 45.0, 0.0]),
292            Self::from_array([-45.0, 135.0, 0.0]),
293            Self::from_array([-45.0, 225.0, 0.0]),
294            Self::from_array([-45.0, 315.0, 0.0]),
295        ]
296    }
297
298    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
299    pub fn to_quat(&self) -> Quat {
300        Quat::from_euler(
301            EulerRot::XYZ,
302            (self.pitch as f32).to_radians(),
303            (self.yaw as f32).to_radians(),
304            (self.roll as f32).to_radians(),
305        )
306    }
307
308    /// Convert to Bevy Transform (rotation only, no translation)
309    pub fn to_transform(&self) -> Transform {
310        Transform::from_rotation(self.to_quat())
311    }
312}
313
314impl Default for ObjectRotation {
315    fn default() -> Self {
316        Self::identity()
317    }
318}
319
320/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
321/// Uses spherical coordinates to capture objects from multiple elevations.
322#[derive(Clone, Debug)]
323pub struct ViewpointConfig {
324    /// Distance from camera to object center (meters)
325    pub radius: f32,
326    /// Number of horizontal positions (yaw angles) around the object
327    pub yaw_count: usize,
328    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
329    pub pitch_angles_deg: Vec<f32>,
330}
331
332impl Default for ViewpointConfig {
333    fn default() -> Self {
334        Self {
335            radius: 0.5,
336            yaw_count: 8,
337            // Three elevations: below (-30°), level (0°), above (+30°)
338            // This matches TBP's look_up/look_down capability
339            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
340        }
341    }
342}
343
344impl ViewpointConfig {
345    /// Total number of viewpoints this config will generate
346    pub fn viewpoint_count(&self) -> usize {
347        self.yaw_count * self.pitch_angles_deg.len()
348    }
349}
350
351/// Full sensor configuration for capture sessions
352#[derive(Clone, Debug, Resource)]
353pub struct SensorConfig {
354    /// Viewpoint configuration (camera positions)
355    pub viewpoints: ViewpointConfig,
356    /// Object rotations to capture (each rotation generates a full viewpoint set)
357    pub object_rotations: Vec<ObjectRotation>,
358    /// Output directory for captures
359    pub output_dir: String,
360    /// Filename pattern (use {view} for view index, {rot} for rotation index)
361    pub filename_pattern: String,
362}
363
364impl Default for SensorConfig {
365    fn default() -> Self {
366        Self {
367            viewpoints: ViewpointConfig::default(),
368            object_rotations: vec![ObjectRotation::identity()],
369            output_dir: ".".to_string(),
370            filename_pattern: "capture_{rot}_{view}.png".to_string(),
371        }
372    }
373}
374
375impl SensorConfig {
376    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
377    pub fn tbp_benchmark() -> Self {
378        Self {
379            viewpoints: ViewpointConfig::default(),
380            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
381            output_dir: ".".to_string(),
382            filename_pattern: "capture_{rot}_{view}.png".to_string(),
383        }
384    }
385
386    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
387    pub fn tbp_full_training() -> Self {
388        Self {
389            viewpoints: ViewpointConfig::default(),
390            object_rotations: ObjectRotation::tbp_known_orientations(),
391            output_dir: ".".to_string(),
392            filename_pattern: "capture_{rot}_{view}.png".to_string(),
393        }
394    }
395
396    /// Total number of captures this config will generate
397    pub fn total_captures(&self) -> usize {
398        self.viewpoints.viewpoint_count() * self.object_rotations.len()
399    }
400}
401
402/// Generate camera viewpoints using spherical coordinates.
403///
404/// Spherical coordinate system (matching TBP habitat sensor conventions):
405/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
406/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
407/// - Radius: distance from origin (object center)
408pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
409    let mut views = Vec::with_capacity(config.viewpoint_count());
410
411    for pitch_deg in &config.pitch_angles_deg {
412        let pitch = pitch_deg.to_radians();
413
414        for i in 0..config.yaw_count {
415            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
416
417            // Spherical to Cartesian conversion (Y-up coordinate system)
418            // x = r * cos(pitch) * sin(yaw)
419            // y = r * sin(pitch)
420            // z = r * cos(pitch) * cos(yaw)
421            let x = config.radius * pitch.cos() * yaw.sin();
422            let y = config.radius * pitch.sin();
423            let z = config.radius * pitch.cos() * yaw.cos();
424
425            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
426            views.push(transform);
427        }
428    }
429    views
430}
431
432/// Marker component for the target object being captured
433#[derive(Component)]
434pub struct CaptureTarget;
435
436/// Marker component for the capture camera
437#[derive(Component)]
438pub struct CaptureCamera;
439
440// ============================================================================
441// Headless Rendering API (NEW)
442// ============================================================================
443
444/// Configuration for headless rendering.
445///
446/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
447#[derive(Clone, Debug, PartialEq)]
448pub struct RenderConfig {
449    /// Image width in pixels (default: 64)
450    pub width: u32,
451    /// Image height in pixels (default: 64)
452    pub height: u32,
453    /// Zoom factor affecting field of view (default: 1.0)
454    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
455    pub zoom: f32,
456    /// Near clipping plane in meters (default: 0.01)
457    pub near_plane: f32,
458    /// Far clipping plane in meters (default: 10.0)
459    pub far_plane: f32,
460    /// Lighting configuration
461    pub lighting: LightingConfig,
462}
463
464/// Lighting configuration for rendering.
465///
466/// Controls ambient light and point lights in the scene.
467#[derive(Clone, Debug, PartialEq)]
468pub struct LightingConfig {
469    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
470    pub ambient_brightness: f32,
471    /// Key light intensity in lumens (default: 1500.0)
472    pub key_light_intensity: f32,
473    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
474    pub key_light_position: [f32; 3],
475    /// Fill light intensity in lumens (default: 500.0)
476    pub fill_light_intensity: f32,
477    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
478    pub fill_light_position: [f32; 3],
479    /// Enable shadows (default: false for performance)
480    pub shadows_enabled: bool,
481}
482
483impl Default for LightingConfig {
484    fn default() -> Self {
485        Self {
486            ambient_brightness: 0.3,
487            key_light_intensity: 1500.0,
488            key_light_position: [4.0, 8.0, 4.0],
489            fill_light_intensity: 500.0,
490            fill_light_position: [-4.0, 2.0, -4.0],
491            shadows_enabled: false,
492        }
493    }
494}
495
496impl LightingConfig {
497    /// Bright lighting for clear visibility
498    pub fn bright() -> Self {
499        Self {
500            ambient_brightness: 0.5,
501            key_light_intensity: 2000.0,
502            key_light_position: [4.0, 8.0, 4.0],
503            fill_light_intensity: 800.0,
504            fill_light_position: [-4.0, 2.0, -4.0],
505            shadows_enabled: false,
506        }
507    }
508
509    /// Soft lighting with minimal shadows
510    pub fn soft() -> Self {
511        Self {
512            ambient_brightness: 0.4,
513            key_light_intensity: 1000.0,
514            key_light_position: [3.0, 6.0, 3.0],
515            fill_light_intensity: 600.0,
516            fill_light_position: [-3.0, 3.0, -3.0],
517            shadows_enabled: false,
518        }
519    }
520
521    /// Unlit mode - ambient only, no point lights
522    pub fn unlit() -> Self {
523        Self {
524            ambient_brightness: 1.0,
525            key_light_intensity: 0.0,
526            key_light_position: [0.0, 0.0, 0.0],
527            fill_light_intensity: 0.0,
528            fill_light_position: [0.0, 0.0, 0.0],
529            shadows_enabled: false,
530        }
531    }
532}
533
534impl Default for RenderConfig {
535    fn default() -> Self {
536        Self::tbp_default()
537    }
538}
539
540impl RenderConfig {
541    /// TBP-compatible 64x64 RGBD sensor configuration.
542    ///
543    /// This matches the default resolution used in TBP's habitat sensor.
544    pub fn tbp_default() -> Self {
545        Self {
546            width: 64,
547            height: 64,
548            zoom: 1.0,
549            near_plane: 0.01,
550            far_plane: 10.0,
551            lighting: LightingConfig::default(),
552        }
553    }
554
555    /// Higher resolution configuration for debugging and visualization.
556    pub fn preview() -> Self {
557        Self {
558            width: 256,
559            height: 256,
560            zoom: 1.0,
561            near_plane: 0.01,
562            far_plane: 10.0,
563            lighting: LightingConfig::default(),
564        }
565    }
566
567    /// High resolution configuration for detailed captures.
568    pub fn high_res() -> Self {
569        Self {
570            width: 512,
571            height: 512,
572            zoom: 1.0,
573            near_plane: 0.01,
574            far_plane: 10.0,
575            lighting: LightingConfig::default(),
576        }
577    }
578
579    /// Calculate vertical field of view in radians based on zoom.
580    ///
581    /// Base FOV is 60 degrees, adjusted by zoom factor.
582    pub fn fov_radians(&self) -> f32 {
583        let base_fov_deg = 60.0_f32;
584        (base_fov_deg / self.zoom).to_radians()
585    }
586
587    /// Compute camera intrinsics for use with neocortx.
588    ///
589    /// Returns focal length and principal point based on resolution and FOV.
590    /// Uses f64 for TBP numerical precision compatibility.
591    pub fn intrinsics(&self) -> CameraIntrinsics {
592        let fov = self.fov_radians() as f64;
593        // focal_length = (height/2) / tan(fov/2)
594        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
595        let fx = fy; // Assuming square pixels
596
597        CameraIntrinsics {
598            focal_length: [fx, fy],
599            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
600            image_size: [self.width, self.height],
601        }
602    }
603}
604
605/// Camera intrinsic parameters for 3D reconstruction.
606///
607/// Compatible with neocortx's VisionIntrinsics format.
608/// Uses f64 for TBP numerical precision compatibility.
609#[derive(Clone, Debug, PartialEq)]
610pub struct CameraIntrinsics {
611    /// Focal length in pixels (fx, fy)
612    pub focal_length: [f64; 2],
613    /// Principal point (cx, cy) - typically image center
614    pub principal_point: [f64; 2],
615    /// Image dimensions (width, height)
616    pub image_size: [u32; 2],
617}
618
619impl CameraIntrinsics {
620    /// Project a 3D point to 2D pixel coordinates.
621    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
622        if point.z <= 0.0 {
623            return None;
624        }
625        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
626        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
627        Some([x, y])
628    }
629
630    /// Unproject a 2D pixel to a 3D point at given depth.
631    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
632        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
633        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
634        [x, y, depth]
635    }
636}
637
638/// Output from headless rendering containing RGBA and depth data.
639#[derive(Clone, Debug)]
640pub struct RenderOutput {
641    /// RGBA pixel data in row-major order (width * height * 4 bytes)
642    pub rgba: Vec<u8>,
643    /// Depth values in meters, row-major order (width * height f64s)
644    /// Values are linear depth from camera, not normalized.
645    /// Uses f64 for TBP numerical precision compatibility.
646    pub depth: Vec<f64>,
647    /// Image width in pixels
648    pub width: u32,
649    /// Image height in pixels
650    pub height: u32,
651    /// Camera intrinsics used for this render
652    pub intrinsics: CameraIntrinsics,
653    /// Camera transform (world position and orientation)
654    pub camera_transform: Transform,
655    /// Object rotation applied during render
656    pub object_rotation: ObjectRotation,
657}
658
659impl RenderOutput {
660    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
661    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
662        if x >= self.width || y >= self.height {
663            return None;
664        }
665        let idx = ((y * self.width + x) * 4) as usize;
666        Some([
667            self.rgba[idx],
668            self.rgba[idx + 1],
669            self.rgba[idx + 2],
670            self.rgba[idx + 3],
671        ])
672    }
673
674    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
675    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
676        if x >= self.width || y >= self.height {
677            return None;
678        }
679        let idx = (y * self.width + x) as usize;
680        Some(self.depth[idx])
681    }
682
683    /// Get RGB pixel (without alpha) at (x, y).
684    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
685        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
686    }
687
688    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
689    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
690        let mut image = Vec::with_capacity(self.height as usize);
691        for y in 0..self.height {
692            let mut row = Vec::with_capacity(self.width as usize);
693            for x in 0..self.width {
694                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
695            }
696            image.push(row);
697        }
698        image
699    }
700
701    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
702    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
703        let mut image = Vec::with_capacity(self.height as usize);
704        for y in 0..self.height {
705            let mut row = Vec::with_capacity(self.width as usize);
706            for x in 0..self.width {
707                row.push(self.get_depth(x, y).unwrap_or(0.0));
708            }
709            image.push(row);
710        }
711        image
712    }
713}
714
715/// Errors that can occur during rendering and file operations.
716#[derive(Debug, Clone)]
717pub enum RenderError {
718    /// Object mesh file not found
719    MeshNotFound(String),
720    /// Object texture file not found
721    TextureNotFound(String),
722    /// Generic file not found error
723    FileNotFound { path: String, reason: String },
724    /// File write failed
725    FileWriteFailed { path: String, reason: String },
726    /// Directory creation failed
727    DirectoryCreationFailed { path: String, reason: String },
728    /// Bevy rendering failed
729    RenderFailed(String),
730    /// Invalid configuration
731    InvalidConfig(String),
732    /// Invalid input parameters
733    InvalidInput(String),
734    /// JSON serialization/deserialization error
735    SerializationError(String),
736    /// Binary data parsing error
737    DataParsingError(String),
738    /// Render timeout
739    RenderTimeout { duration_secs: u64 },
740}
741
742impl std::fmt::Display for RenderError {
743    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
744        match self {
745            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
746            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
747            RenderError::FileNotFound { path, reason } => {
748                write!(f, "File not found at {}: {}", path, reason)
749            }
750            RenderError::FileWriteFailed { path, reason } => {
751                write!(f, "Failed to write file {}: {}", path, reason)
752            }
753            RenderError::DirectoryCreationFailed { path, reason } => {
754                write!(f, "Failed to create directory {}: {}", path, reason)
755            }
756            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
757            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
758            RenderError::InvalidInput(msg) => write!(f, "Invalid input: {}", msg),
759            RenderError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
760            RenderError::DataParsingError(msg) => write!(f, "Data parsing error: {}", msg),
761            RenderError::RenderTimeout { duration_secs } => {
762                write!(f, "Render timeout after {} seconds", duration_secs)
763            }
764        }
765    }
766}
767
768impl std::error::Error for RenderError {}
769
770/// Render a YCB object to an in-memory buffer.
771///
772/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
773/// renders a single frame, extracts the RGBA and depth data, and shuts down.
774///
775/// # Arguments
776/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
777/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
778/// * `object_rotation` - Rotation to apply to the object
779/// * `config` - Render configuration (resolution, depth range, etc.)
780///
781/// # Example
782/// ```ignore
783/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
784/// use std::path::Path;
785///
786/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
787/// let output = render_to_buffer(
788///     Path::new("/tmp/ycb/003_cracker_box"),
789///     &viewpoints[0],
790///     &ObjectRotation::identity(),
791///     &RenderConfig::tbp_default(),
792/// )?;
793/// ```
794pub fn render_to_buffer(
795    object_dir: &Path,
796    camera_transform: &Transform,
797    object_rotation: &ObjectRotation,
798    config: &RenderConfig,
799) -> Result<RenderOutput, RenderError> {
800    // Use the actual Bevy headless renderer
801    render::render_headless(object_dir, camera_transform, object_rotation, config)
802}
803
804/// Render all viewpoints and rotations for a YCB object.
805///
806/// Convenience function that renders all combinations of viewpoints and rotations.
807///
808/// # Arguments
809/// * `object_dir` - Path to YCB object directory
810/// * `viewpoint_config` - Viewpoint configuration (camera positions)
811/// * `rotations` - Object rotations to render
812/// * `render_config` - Render configuration
813///
814/// # Returns
815/// Vector of RenderOutput, one per viewpoint × rotation combination.
816pub fn render_all_viewpoints(
817    object_dir: &Path,
818    viewpoint_config: &ViewpointConfig,
819    rotations: &[ObjectRotation],
820    render_config: &RenderConfig,
821) -> Result<Vec<RenderOutput>, RenderError> {
822    let viewpoints = generate_viewpoints(viewpoint_config);
823    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
824
825    for rotation in rotations {
826        for viewpoint in &viewpoints {
827            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
828            outputs.push(output);
829        }
830    }
831
832    Ok(outputs)
833}
834
835/// Render with model caching support for efficient multi-viewpoint rendering.
836///
837/// This function tracks which models have been loaded and provides performance
838/// insights. The current batch API is a queue-oriented wrapper, not a persistent
839/// renderer, so this function and `render_to_buffer()` use the same underlying
840/// headless app-per-render path today.
841///
842/// # Arguments
843/// * `object_dir` - Path to YCB object directory
844/// * `camera_transform` - Camera position and orientation
845/// * `object_rotation` - Rotation to apply to the object
846/// * `config` - Render configuration
847/// * `cache` - Model cache to track loaded assets
848///
849/// # Returns
850/// RenderOutput with rendered RGBA and depth data
851///
852/// # Example
853/// ```ignore
854/// use bevy_sensor::{render_to_buffer_cached, cache::ModelCache, RenderConfig, ObjectRotation};
855/// use std::path::PathBuf;
856///
857/// let mut cache = ModelCache::new();
858/// let object_dir = PathBuf::from("/tmp/ycb/003_cracker_box");
859/// let config = RenderConfig::tbp_default();
860/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
861///
862/// // First render: loads from disk and caches
863/// let output1 = render_to_buffer_cached(
864///     &object_dir,
865///     &viewpoints[0],
866///     &ObjectRotation::identity(),
867///     &config,
868///     &mut cache,
869/// )?;
870///
871/// // Subsequent renders: tracks in cache
872/// for viewpoint in &viewpoints[1..] {
873///     let output = render_to_buffer_cached(
874///         &object_dir,
875///         viewpoint,
876///         &ObjectRotation::identity(),
877///         &config,
878///         &mut cache,
879///     )?;
880/// }
881/// ```
882///
883/// # Note
884/// This function uses the same rendering engine as `render_to_buffer()`. The current
885/// batch API preserves ordering and output structure but does not yet reuse a live
886/// Bevy renderer across calls.
887///
888/// ```ignore
889/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig, RenderConfig, ObjectRotation};
890///
891/// let requests: Vec<_> = viewpoints.iter().map(|vp| {
892///     BatchRenderRequest {
893///         object_dir: object_dir.clone(),
894///         viewpoint: *vp,
895///         object_rotation: ObjectRotation::identity(),
896///         render_config: RenderConfig::tbp_default(),
897///     }
898/// }).collect();
899///
900/// let outputs = render_batch(requests, &BatchRenderConfig::default())?;
901/// ```
902pub fn render_to_buffer_cached(
903    object_dir: &Path,
904    camera_transform: &Transform,
905    object_rotation: &ObjectRotation,
906    config: &RenderConfig,
907    cache: &mut cache::ModelCache,
908) -> Result<RenderOutput, RenderError> {
909    let mesh_path = object_dir.join("google_16k/textured.obj");
910    let texture_path = object_dir.join("google_16k/texture_map.png");
911
912    // Track in cache
913    cache.cache_scene(mesh_path.clone());
914    cache.cache_texture(texture_path.clone());
915
916    // Render using standard pipeline
917    render::render_headless(object_dir, camera_transform, object_rotation, config)
918}
919
920/// Render directly to files (for subprocess mode).
921///
922/// This function is designed for subprocess rendering where the process will exit
923/// after rendering. It saves RGBA and depth data directly to the specified files
924/// before the process terminates.
925///
926/// # Arguments
927/// * `object_dir` - Path to YCB object directory
928/// * `camera_transform` - Camera position and orientation
929/// * `object_rotation` - Rotation to apply to the object
930/// * `config` - Render configuration
931/// * `rgba_path` - Output path for RGBA PNG
932/// * `depth_path` - Output path for depth data (raw f32 bytes)
933///
934/// # Note
935/// This function may call `std::process::exit(0)` and not return.
936pub fn render_to_files(
937    object_dir: &Path,
938    camera_transform: &Transform,
939    object_rotation: &ObjectRotation,
940    config: &RenderConfig,
941    rgba_path: &Path,
942    depth_path: &Path,
943) -> Result<(), RenderError> {
944    render::render_to_files(
945        object_dir,
946        camera_transform,
947        object_rotation,
948        config,
949        rgba_path,
950        depth_path,
951    )
952}
953
954// Re-export batch types for convenient API access
955pub use batch::{
956    BatchRenderConfig, BatchRenderError, BatchRenderOutput, BatchRenderRequest, BatchRenderer,
957    BatchState, RenderStatus,
958};
959
960/// Create a new batch renderer helper for multi-viewpoint workflows.
961///
962/// The current implementation stores queued requests and executes them sequentially via
963/// `render_to_buffer()`. It does not yet keep a persistent Bevy app alive across renders.
964///
965/// # Arguments
966/// * `config` - Batch rendering configuration
967///
968/// # Returns
969/// A BatchRenderer instance ready to queue render requests
970///
971/// # Example
972/// ```ignore
973/// use bevy_sensor::{create_batch_renderer, queue_render_request, render_next_in_batch, BatchRenderConfig};
974///
975/// let mut renderer = create_batch_renderer(&BatchRenderConfig::default())?;
976/// ```
977pub fn create_batch_renderer(config: &BatchRenderConfig) -> Result<BatchRenderer, RenderError> {
978    Ok(BatchRenderer::new(config.clone()))
979}
980
981/// Queue a render request for batch processing.
982///
983/// Adds a render request to the batch queue. Requests are processed in order
984/// when you call render_next_in_batch().
985///
986/// # Arguments
987/// * `renderer` - The batch renderer instance
988/// * `request` - The render request
989///
990/// # Returns
991/// Ok if queued successfully, Err if queue is full
992///
993/// # Example
994/// ```ignore
995/// use bevy_sensor::{batch::BatchRenderRequest, RenderConfig, ObjectRotation};
996/// use std::path::PathBuf;
997///
998/// queue_render_request(&mut renderer, BatchRenderRequest {
999///     object_dir: PathBuf::from("/tmp/ycb/003_cracker_box"),
1000///     viewpoint: camera_transform,
1001///     object_rotation: ObjectRotation::identity(),
1002///     render_config: RenderConfig::tbp_default(),
1003/// })?;
1004/// ```
1005pub fn queue_render_request(
1006    renderer: &mut BatchRenderer,
1007    request: BatchRenderRequest,
1008) -> Result<(), RenderError> {
1009    renderer
1010        .queue_request(request)
1011        .map_err(|e| RenderError::RenderFailed(e.to_string()))
1012}
1013
1014/// Process and execute the next render in the batch queue.
1015///
1016/// Executes a single queued request via `render_to_buffer()`. Returns None when the queue
1017/// is empty. Use this in a loop to process all queued renders in a stable order.
1018///
1019/// # Arguments
1020/// * `renderer` - The batch renderer instance
1021/// * `timeout_ms` - Timeout in milliseconds for this render
1022///
1023/// # Returns
1024/// Some(output) if a render completed, None if queue is empty
1025///
1026/// # Example
1027/// ```ignore
1028/// loop {
1029///     match render_next_in_batch(&mut renderer, 500)? {
1030///         Some(output) => println!("Render complete: {:?}", output.status),
1031///         None => break, // All renders done
1032///     }
1033/// }
1034/// ```
1035pub fn render_next_in_batch(
1036    renderer: &mut BatchRenderer,
1037    _timeout_ms: u32,
1038) -> Result<Option<BatchRenderOutput>, RenderError> {
1039    if let Some(request) = renderer.pending_requests.pop_front() {
1040        let output = render_to_buffer(
1041            &request.object_dir,
1042            &request.viewpoint,
1043            &request.object_rotation,
1044            &request.render_config,
1045        )?;
1046        let batch_output = BatchRenderOutput::from_render_output(request, output);
1047        renderer.completed_results.push(batch_output.clone());
1048        renderer.renders_processed += 1;
1049        Ok(Some(batch_output))
1050    } else {
1051        Ok(None)
1052    }
1053}
1054
1055/// Render multiple requests in batch (convenience function).
1056///
1057/// Queues all requests and executes them in batch, returning all results.
1058/// Simpler than manage queue + loop for one-off batches.
1059///
1060/// # Arguments
1061/// * `requests` - Vector of render requests
1062/// * `config` - Batch rendering configuration
1063///
1064/// # Returns
1065/// Vector of BatchRenderOutput results in same order as input
1066///
1067/// # Example
1068/// ```ignore
1069/// use bevy_sensor::{render_batch, batch::BatchRenderRequest, BatchRenderConfig};
1070///
1071/// let results = render_batch(requests, &BatchRenderConfig::default())?;
1072/// ```
1073pub fn render_batch(
1074    requests: Vec<BatchRenderRequest>,
1075    config: &BatchRenderConfig,
1076) -> Result<Vec<BatchRenderOutput>, RenderError> {
1077    if requests.is_empty() {
1078        return Ok(Vec::new());
1079    }
1080
1081    if requests.len() > 1 && requests_share_batch_context(&requests) {
1082        let first_request = requests[0].clone();
1083        let viewpoints: Vec<Transform> = requests.iter().map(|request| request.viewpoint).collect();
1084        let outputs = render::render_headless_sequence(
1085            &first_request.object_dir,
1086            &viewpoints,
1087            &first_request.object_rotation,
1088            &first_request.render_config,
1089        )?;
1090
1091        return Ok(requests
1092            .into_iter()
1093            .zip(outputs)
1094            .map(|(request, output)| BatchRenderOutput::from_render_output(request, output))
1095            .collect());
1096    }
1097
1098    let mut renderer = create_batch_renderer(config)?;
1099
1100    // Queue all requests
1101    for request in requests {
1102        queue_render_request(&mut renderer, request)?;
1103    }
1104
1105    // Execute all and collect results
1106    let mut results = Vec::new();
1107    while let Some(output) = render_next_in_batch(&mut renderer, config.frame_timeout_ms)? {
1108        results.push(output);
1109    }
1110
1111    Ok(results)
1112}
1113
1114fn requests_share_batch_context(requests: &[BatchRenderRequest]) -> bool {
1115    let Some(first) = requests.first() else {
1116        return true;
1117    };
1118
1119    requests.iter().all(|request| {
1120        request.object_dir == first.object_dir
1121            && request.object_rotation == first.object_rotation
1122            && request.render_config == first.render_config
1123    })
1124}
1125
1126// Re-export bevy types that consumers will need
1127pub use bevy::prelude::{Quat, Transform, Vec3};
1128
1129#[cfg(test)]
1130mod tests {
1131    use super::*;
1132
1133    #[test]
1134    fn test_object_rotation_identity() {
1135        let rot = ObjectRotation::identity();
1136        assert_eq!(rot.pitch, 0.0);
1137        assert_eq!(rot.yaw, 0.0);
1138        assert_eq!(rot.roll, 0.0);
1139    }
1140
1141    #[test]
1142    fn test_object_rotation_from_array() {
1143        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
1144        assert_eq!(rot.pitch, 10.0);
1145        assert_eq!(rot.yaw, 20.0);
1146        assert_eq!(rot.roll, 30.0);
1147    }
1148
1149    #[test]
1150    fn test_requests_share_batch_context_for_homogeneous_batch() {
1151        let config = RenderConfig::tbp_default();
1152        let request = BatchRenderRequest {
1153            object_dir: "/tmp/ycb/003_cracker_box".into(),
1154            viewpoint: Transform::IDENTITY,
1155            object_rotation: ObjectRotation::identity(),
1156            render_config: config.clone(),
1157        };
1158
1159        assert!(requests_share_batch_context(&[
1160            request.clone(),
1161            BatchRenderRequest {
1162                viewpoint: Transform::from_xyz(1.0, 0.0, 0.0),
1163                ..request
1164            },
1165        ]));
1166    }
1167
1168    #[test]
1169    fn test_requests_share_batch_context_rejects_mixed_objects() {
1170        let config = RenderConfig::tbp_default();
1171        let request = BatchRenderRequest {
1172            object_dir: "/tmp/ycb/003_cracker_box".into(),
1173            viewpoint: Transform::IDENTITY,
1174            object_rotation: ObjectRotation::identity(),
1175            render_config: config.clone(),
1176        };
1177
1178        assert!(!requests_share_batch_context(&[
1179            request.clone(),
1180            BatchRenderRequest {
1181                object_dir: "/tmp/ycb/005_tomato_soup_can".into(),
1182                ..request
1183            },
1184        ]));
1185    }
1186
1187    #[test]
1188    fn test_tbp_benchmark_rotations() {
1189        let rotations = ObjectRotation::tbp_benchmark_rotations();
1190        assert_eq!(rotations.len(), 3);
1191        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
1192        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
1193        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
1194    }
1195
1196    #[test]
1197    fn test_tbp_known_orientations_count() {
1198        let orientations = ObjectRotation::tbp_known_orientations();
1199        assert_eq!(orientations.len(), 14);
1200    }
1201
1202    #[test]
1203    fn test_rotation_to_quat() {
1204        let rot = ObjectRotation::identity();
1205        let quat = rot.to_quat();
1206        // Identity quaternion should be approximately (1, 0, 0, 0)
1207        assert!((quat.w - 1.0).abs() < 0.001);
1208        assert!(quat.x.abs() < 0.001);
1209        assert!(quat.y.abs() < 0.001);
1210        assert!(quat.z.abs() < 0.001);
1211    }
1212
1213    #[test]
1214    fn test_rotation_90_yaw() {
1215        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
1216        let quat = rot.to_quat();
1217        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
1218        assert!((quat.w - 0.707).abs() < 0.01);
1219        assert!((quat.y - 0.707).abs() < 0.01);
1220    }
1221
1222    #[test]
1223    fn test_viewpoint_config_default() {
1224        let config = ViewpointConfig::default();
1225        assert_eq!(config.radius, 0.5);
1226        assert_eq!(config.yaw_count, 8);
1227        assert_eq!(config.pitch_angles_deg.len(), 3);
1228    }
1229
1230    #[test]
1231    fn test_viewpoint_count() {
1232        let config = ViewpointConfig::default();
1233        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
1234    }
1235
1236    #[test]
1237    fn test_generate_viewpoints_count() {
1238        let config = ViewpointConfig::default();
1239        let viewpoints = generate_viewpoints(&config);
1240        assert_eq!(viewpoints.len(), 24);
1241    }
1242
1243    #[test]
1244    fn test_viewpoints_spherical_radius() {
1245        let config = ViewpointConfig::default();
1246        let viewpoints = generate_viewpoints(&config);
1247
1248        for (i, transform) in viewpoints.iter().enumerate() {
1249            let actual_radius = transform.translation.length();
1250            assert!(
1251                (actual_radius - config.radius).abs() < 0.001,
1252                "Viewpoint {} has incorrect radius: {} (expected {})",
1253                i,
1254                actual_radius,
1255                config.radius
1256            );
1257        }
1258    }
1259
1260    #[test]
1261    fn test_viewpoints_looking_at_origin() {
1262        let config = ViewpointConfig::default();
1263        let viewpoints = generate_viewpoints(&config);
1264
1265        for (i, transform) in viewpoints.iter().enumerate() {
1266            let forward = transform.forward();
1267            let to_origin = (Vec3::ZERO - transform.translation).normalize();
1268            let dot = forward.dot(to_origin);
1269            assert!(
1270                dot > 0.99,
1271                "Viewpoint {} not looking at origin, dot product: {}",
1272                i,
1273                dot
1274            );
1275        }
1276    }
1277
1278    #[test]
1279    fn test_sensor_config_default() {
1280        let config = SensorConfig::default();
1281        assert_eq!(config.object_rotations.len(), 1);
1282        assert_eq!(config.total_captures(), 24);
1283    }
1284
1285    #[test]
1286    fn test_sensor_config_tbp_benchmark() {
1287        let config = SensorConfig::tbp_benchmark();
1288        assert_eq!(config.object_rotations.len(), 3);
1289        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
1290    }
1291
1292    #[test]
1293    fn test_sensor_config_tbp_full() {
1294        let config = SensorConfig::tbp_full_training();
1295        assert_eq!(config.object_rotations.len(), 14);
1296        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
1297    }
1298
1299    #[test]
1300    fn test_ycb_representative_objects() {
1301        // Verify representative objects are defined
1302        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
1303        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
1304    }
1305
1306    #[test]
1307    #[allow(deprecated)]
1308    fn test_ycb_ten_objects() {
1309        // Verify ten objects subset is defined
1310        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
1311    }
1312
1313    #[test]
1314    fn test_ycb_object_mesh_path() {
1315        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
1316        assert_eq!(
1317            path,
1318            std::path::Path::new("/tmp/ycb")
1319                .join("003_cracker_box")
1320                .join("google_16k")
1321                .join("textured.obj")
1322        );
1323    }
1324
1325    #[test]
1326    fn test_ycb_object_texture_path() {
1327        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
1328        assert_eq!(
1329            path,
1330            std::path::Path::new("/tmp/ycb")
1331                .join("003_cracker_box")
1332                .join("google_16k")
1333                .join("texture_map.png")
1334        );
1335    }
1336
1337    // =========================================================================
1338    // Headless Rendering API Tests
1339    // =========================================================================
1340
1341    #[test]
1342    fn test_render_config_tbp_default() {
1343        let config = RenderConfig::tbp_default();
1344        assert_eq!(config.width, 64);
1345        assert_eq!(config.height, 64);
1346        assert_eq!(config.zoom, 1.0);
1347        assert_eq!(config.near_plane, 0.01);
1348        assert_eq!(config.far_plane, 10.0);
1349    }
1350
1351    #[test]
1352    fn test_render_config_preview() {
1353        let config = RenderConfig::preview();
1354        assert_eq!(config.width, 256);
1355        assert_eq!(config.height, 256);
1356    }
1357
1358    #[test]
1359    fn test_render_config_default_is_tbp() {
1360        let default = RenderConfig::default();
1361        let tbp = RenderConfig::tbp_default();
1362        assert_eq!(default.width, tbp.width);
1363        assert_eq!(default.height, tbp.height);
1364    }
1365
1366    #[test]
1367    fn test_render_config_fov() {
1368        let config = RenderConfig::tbp_default();
1369        let fov = config.fov_radians();
1370        // Base FOV is 60 degrees = ~1.047 radians
1371        assert!((fov - 1.047).abs() < 0.01);
1372
1373        // Zoom in should reduce FOV
1374        let zoomed = RenderConfig {
1375            zoom: 2.0,
1376            ..config
1377        };
1378        assert!(zoomed.fov_radians() < fov);
1379    }
1380
1381    #[test]
1382    fn test_render_config_intrinsics() {
1383        let config = RenderConfig::tbp_default();
1384        let intrinsics = config.intrinsics();
1385
1386        assert_eq!(intrinsics.image_size, [64, 64]);
1387        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
1388        // Focal length should be positive and reasonable
1389        assert!(intrinsics.focal_length[0] > 0.0);
1390        assert!(intrinsics.focal_length[1] > 0.0);
1391        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
1392        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
1393    }
1394
1395    #[test]
1396    fn test_camera_intrinsics_project() {
1397        let intrinsics = CameraIntrinsics {
1398            focal_length: [100.0, 100.0],
1399            principal_point: [32.0, 32.0],
1400            image_size: [64, 64],
1401        };
1402
1403        // Point at origin of camera frame projects to principal point
1404        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
1405        assert!(center.is_some());
1406        let [x, y] = center.unwrap();
1407        assert!((x - 32.0).abs() < 0.001);
1408        assert!((y - 32.0).abs() < 0.001);
1409
1410        // Point behind camera returns None
1411        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
1412        assert!(behind.is_none());
1413    }
1414
1415    #[test]
1416    fn test_camera_intrinsics_unproject() {
1417        let intrinsics = CameraIntrinsics {
1418            focal_length: [100.0, 100.0],
1419            principal_point: [32.0, 32.0],
1420            image_size: [64, 64],
1421        };
1422
1423        // Unproject principal point at depth 1.0
1424        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1425        assert!((point[0]).abs() < 0.001); // x
1426        assert!((point[1]).abs() < 0.001); // y
1427        assert!((point[2] - 1.0).abs() < 0.001); // z
1428    }
1429
1430    #[test]
1431    fn test_render_output_get_rgba() {
1432        let output = RenderOutput {
1433            rgba: vec![
1434                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1435            ],
1436            depth: vec![1.0, 2.0, 3.0, 4.0],
1437            width: 2,
1438            height: 2,
1439            intrinsics: RenderConfig::tbp_default().intrinsics(),
1440            camera_transform: Transform::IDENTITY,
1441            object_rotation: ObjectRotation::identity(),
1442        };
1443
1444        // Top-left: red
1445        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1446        // Top-right: green
1447        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1448        // Bottom-left: blue
1449        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1450        // Bottom-right: white
1451        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1452        // Out of bounds
1453        assert_eq!(output.get_rgba(2, 0), None);
1454    }
1455
1456    #[test]
1457    fn test_render_output_get_depth() {
1458        let output = RenderOutput {
1459            rgba: vec![0u8; 16],
1460            depth: vec![1.0, 2.0, 3.0, 4.0],
1461            width: 2,
1462            height: 2,
1463            intrinsics: RenderConfig::tbp_default().intrinsics(),
1464            camera_transform: Transform::IDENTITY,
1465            object_rotation: ObjectRotation::identity(),
1466        };
1467
1468        assert_eq!(output.get_depth(0, 0), Some(1.0));
1469        assert_eq!(output.get_depth(1, 0), Some(2.0));
1470        assert_eq!(output.get_depth(0, 1), Some(3.0));
1471        assert_eq!(output.get_depth(1, 1), Some(4.0));
1472        assert_eq!(output.get_depth(2, 0), None);
1473    }
1474
1475    #[test]
1476    fn test_render_output_to_rgb_image() {
1477        let output = RenderOutput {
1478            rgba: vec![
1479                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1480            ],
1481            depth: vec![1.0, 2.0, 3.0, 4.0],
1482            width: 2,
1483            height: 2,
1484            intrinsics: RenderConfig::tbp_default().intrinsics(),
1485            camera_transform: Transform::IDENTITY,
1486            object_rotation: ObjectRotation::identity(),
1487        };
1488
1489        let image = output.to_rgb_image();
1490        assert_eq!(image.len(), 2); // 2 rows
1491        assert_eq!(image[0].len(), 2); // 2 columns
1492        assert_eq!(image[0][0], [255, 0, 0]); // Red
1493        assert_eq!(image[0][1], [0, 255, 0]); // Green
1494        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1495        assert_eq!(image[1][1], [255, 255, 255]); // White
1496    }
1497
1498    #[test]
1499    fn test_render_output_to_depth_image() {
1500        let output = RenderOutput {
1501            rgba: vec![0u8; 16],
1502            depth: vec![1.0, 2.0, 3.0, 4.0],
1503            width: 2,
1504            height: 2,
1505            intrinsics: RenderConfig::tbp_default().intrinsics(),
1506            camera_transform: Transform::IDENTITY,
1507            object_rotation: ObjectRotation::identity(),
1508        };
1509
1510        let depth_image = output.to_depth_image();
1511        assert_eq!(depth_image.len(), 2);
1512        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1513        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1514    }
1515
1516    #[test]
1517    fn test_render_error_display() {
1518        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1519        assert!(err.to_string().contains("Mesh not found"));
1520        assert!(err.to_string().contains("/path/to/mesh.obj"));
1521    }
1522
1523    // =========================================================================
1524    // Edge Case Tests
1525    // =========================================================================
1526
1527    #[test]
1528    fn test_object_rotation_extreme_angles() {
1529        // Test angles beyond 360 degrees
1530        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1531        let quat = rot.to_quat();
1532        // Quaternion should still be valid (normalized)
1533        assert!((quat.length() - 1.0).abs() < 0.001);
1534    }
1535
1536    #[test]
1537    fn test_object_rotation_to_transform() {
1538        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1539        let transform = rot.to_transform();
1540        // Transform should have no translation
1541        assert_eq!(transform.translation, Vec3::ZERO);
1542        // Should have rotation
1543        assert!(transform.rotation != Quat::IDENTITY);
1544    }
1545
1546    #[test]
1547    fn test_viewpoint_config_single_viewpoint() {
1548        let config = ViewpointConfig {
1549            radius: 1.0,
1550            yaw_count: 1,
1551            pitch_angles_deg: vec![0.0],
1552        };
1553        assert_eq!(config.viewpoint_count(), 1);
1554        let viewpoints = generate_viewpoints(&config);
1555        assert_eq!(viewpoints.len(), 1);
1556        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1557        let pos = viewpoints[0].translation;
1558        assert!((pos.x).abs() < 0.001);
1559        assert!((pos.y).abs() < 0.001);
1560        assert!((pos.z - 1.0).abs() < 0.001);
1561    }
1562
1563    #[test]
1564    fn test_viewpoint_radius_scaling() {
1565        let config1 = ViewpointConfig {
1566            radius: 0.5,
1567            yaw_count: 4,
1568            pitch_angles_deg: vec![0.0],
1569        };
1570        let config2 = ViewpointConfig {
1571            radius: 2.0,
1572            yaw_count: 4,
1573            pitch_angles_deg: vec![0.0],
1574        };
1575
1576        let v1 = generate_viewpoints(&config1);
1577        let v2 = generate_viewpoints(&config2);
1578
1579        // Viewpoints should scale proportionally
1580        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1581            let ratio = vp2.translation.length() / vp1.translation.length();
1582            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1583        }
1584    }
1585
1586    #[test]
1587    fn test_camera_intrinsics_project_at_z_zero() {
1588        let intrinsics = CameraIntrinsics {
1589            focal_length: [100.0, 100.0],
1590            principal_point: [32.0, 32.0],
1591            image_size: [64, 64],
1592        };
1593
1594        // Point at z=0 should return None (division by zero protection)
1595        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1596        assert!(result.is_none());
1597    }
1598
1599    #[test]
1600    fn test_camera_intrinsics_roundtrip() {
1601        let intrinsics = CameraIntrinsics {
1602            focal_length: [100.0, 100.0],
1603            principal_point: [32.0, 32.0],
1604            image_size: [64, 64],
1605        };
1606
1607        // Project a 3D point
1608        let original = Vec3::new(0.5, -0.3, 2.0);
1609        let projected = intrinsics.project(original).unwrap();
1610
1611        // Unproject back with the same depth (convert f32 to f64)
1612        let unprojected = intrinsics.unproject(projected, original.z as f64);
1613
1614        // Should get back approximately the same point
1615        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1616        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1617        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1618    }
1619
1620    #[test]
1621    fn test_render_output_empty() {
1622        let output = RenderOutput {
1623            rgba: vec![],
1624            depth: vec![],
1625            width: 0,
1626            height: 0,
1627            intrinsics: RenderConfig::tbp_default().intrinsics(),
1628            camera_transform: Transform::IDENTITY,
1629            object_rotation: ObjectRotation::identity(),
1630        };
1631
1632        // Should handle empty gracefully
1633        assert_eq!(output.get_rgba(0, 0), None);
1634        assert_eq!(output.get_depth(0, 0), None);
1635        assert!(output.to_rgb_image().is_empty());
1636        assert!(output.to_depth_image().is_empty());
1637    }
1638
1639    #[test]
1640    fn test_render_output_1x1() {
1641        let output = RenderOutput {
1642            rgba: vec![128, 64, 32, 255],
1643            depth: vec![0.5],
1644            width: 1,
1645            height: 1,
1646            intrinsics: RenderConfig::tbp_default().intrinsics(),
1647            camera_transform: Transform::IDENTITY,
1648            object_rotation: ObjectRotation::identity(),
1649        };
1650
1651        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1652        assert_eq!(output.get_depth(0, 0), Some(0.5));
1653        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1654
1655        let rgb_img = output.to_rgb_image();
1656        assert_eq!(rgb_img.len(), 1);
1657        assert_eq!(rgb_img[0].len(), 1);
1658        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1659    }
1660
1661    #[test]
1662    fn test_render_config_high_res() {
1663        let config = RenderConfig::high_res();
1664        assert_eq!(config.width, 512);
1665        assert_eq!(config.height, 512);
1666
1667        let intrinsics = config.intrinsics();
1668        assert_eq!(intrinsics.image_size, [512, 512]);
1669        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1670    }
1671
1672    #[test]
1673    fn test_render_config_zoom_affects_fov() {
1674        let base = RenderConfig::tbp_default();
1675        let zoomed = RenderConfig {
1676            zoom: 2.0,
1677            ..base.clone()
1678        };
1679
1680        // Higher zoom = lower FOV
1681        assert!(zoomed.fov_radians() < base.fov_radians());
1682        // Specifically, 2x zoom = half FOV
1683        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1684    }
1685
1686    #[test]
1687    fn test_render_config_zoom_affects_intrinsics() {
1688        let base = RenderConfig::tbp_default();
1689        let zoomed = RenderConfig {
1690            zoom: 2.0,
1691            ..base.clone()
1692        };
1693
1694        // Higher zoom = higher focal length
1695        let base_intrinsics = base.intrinsics();
1696        let zoomed_intrinsics = zoomed.intrinsics();
1697
1698        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1699    }
1700
1701    #[test]
1702    fn test_lighting_config_variants() {
1703        let default = LightingConfig::default();
1704        let bright = LightingConfig::bright();
1705        let soft = LightingConfig::soft();
1706        let unlit = LightingConfig::unlit();
1707
1708        // Bright should have higher intensity than default
1709        assert!(bright.key_light_intensity > default.key_light_intensity);
1710
1711        // Unlit should have no point lights
1712        assert_eq!(unlit.key_light_intensity, 0.0);
1713        assert_eq!(unlit.fill_light_intensity, 0.0);
1714        assert_eq!(unlit.ambient_brightness, 1.0);
1715
1716        // Soft should have lower intensity
1717        assert!(soft.key_light_intensity < default.key_light_intensity);
1718    }
1719
1720    #[test]
1721    fn test_all_render_error_variants() {
1722        let errors = vec![
1723            RenderError::MeshNotFound("mesh.obj".to_string()),
1724            RenderError::TextureNotFound("texture.png".to_string()),
1725            RenderError::RenderFailed("GPU error".to_string()),
1726            RenderError::InvalidConfig("bad config".to_string()),
1727        ];
1728
1729        for err in errors {
1730            // All variants should have Display impl
1731            let msg = err.to_string();
1732            assert!(!msg.is_empty());
1733        }
1734    }
1735
1736    #[test]
1737    fn test_tbp_known_orientations_unique() {
1738        let orientations = ObjectRotation::tbp_known_orientations();
1739
1740        // All 14 orientations should produce unique quaternions
1741        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1742
1743        for (i, q1) in quats.iter().enumerate() {
1744            for (j, q2) in quats.iter().enumerate() {
1745                if i != j {
1746                    // Quaternions should be different (accounting for q == -q equivalence)
1747                    let dot = q1.dot(*q2).abs();
1748                    assert!(
1749                        dot < 0.999,
1750                        "Orientations {} and {} produce same quaternion",
1751                        i,
1752                        j
1753                    );
1754                }
1755            }
1756        }
1757    }
1758}