bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Test fixtures for pre-rendered images (CI/CD support)
62pub mod fixtures;
63
64// Re-export ycbust types for convenience
65pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
66
67/// YCB dataset utilities
68pub mod ycb {
69    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
70
71    use std::path::Path;
72
73    /// Download YCB models to the specified directory.
74    ///
75    /// # Arguments
76    /// * `output_dir` - Directory to download models to
77    /// * `subset` - Which subset of objects to download
78    ///
79    /// # Example
80    /// ```ignore
81    /// use bevy_sensor::ycb::{download_models, Subset};
82    ///
83    /// download_models("/tmp/ycb", Subset::Representative).await?;
84    /// ```
85    pub async fn download_models<P: AsRef<Path>>(
86        output_dir: P,
87        subset: Subset,
88    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
89        let options = DownloadOptions {
90            overwrite: false,
91            full: false,
92            show_progress: true,
93            delete_archives: true,
94        };
95        download_ycb(subset, output_dir.as_ref(), options).await?;
96        Ok(())
97    }
98
99    /// Download YCB models with custom options.
100    pub async fn download_models_with_options<P: AsRef<Path>>(
101        output_dir: P,
102        subset: Subset,
103        options: DownloadOptions,
104    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
105        download_ycb(subset, output_dir.as_ref(), options).await?;
106        Ok(())
107    }
108
109    /// Check if YCB models exist at the given path
110    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
111        let path = output_dir.as_ref();
112        // Check for at least one representative object
113        path.join("003_cracker_box/google_16k/textured.obj")
114            .exists()
115    }
116
117    /// Get the path to a specific YCB object's OBJ file
118    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
119        output_dir
120            .as_ref()
121            .join(object_id)
122            .join("google_16k")
123            .join("textured.obj")
124    }
125
126    /// Get the path to a specific YCB object's texture file
127    pub fn object_texture_path<P: AsRef<Path>>(
128        output_dir: P,
129        object_id: &str,
130    ) -> std::path::PathBuf {
131        output_dir
132            .as_ref()
133            .join(object_id)
134            .join("google_16k")
135            .join("texture_map.png")
136    }
137}
138
139/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
140/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
141#[derive(Clone, Debug, PartialEq)]
142pub struct ObjectRotation {
143    /// Rotation around X-axis (pitch) in degrees
144    pub pitch: f64,
145    /// Rotation around Y-axis (yaw) in degrees
146    pub yaw: f64,
147    /// Rotation around Z-axis (roll) in degrees
148    pub roll: f64,
149}
150
151impl ObjectRotation {
152    /// Create a new rotation from Euler angles in degrees
153    pub fn new(pitch: f64, yaw: f64, roll: f64) -> Self {
154        Self { pitch, yaw, roll }
155    }
156
157    /// Create from TBP-style array [pitch, yaw, roll] in degrees
158    pub fn from_array(arr: [f64; 3]) -> Self {
159        Self {
160            pitch: arr[0],
161            yaw: arr[1],
162            roll: arr[2],
163        }
164    }
165
166    /// Identity rotation (no rotation)
167    pub fn identity() -> Self {
168        Self::new(0.0, 0.0, 0.0)
169    }
170
171    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
172    /// Used in shorter YCB experiments to reduce computational load.
173    pub fn tbp_benchmark_rotations() -> Vec<Self> {
174        vec![
175            Self::from_array([0.0, 0.0, 0.0]),
176            Self::from_array([0.0, 90.0, 0.0]),
177            Self::from_array([0.0, 180.0, 0.0]),
178        ]
179    }
180
181    /// TBP 14 known orientations (cube faces and corners)
182    /// These are the orientations objects are learned in during training.
183    pub fn tbp_known_orientations() -> Vec<Self> {
184        vec![
185            // 6 cube faces (90° rotations around each axis)
186            Self::from_array([0.0, 0.0, 0.0]),   // Front
187            Self::from_array([0.0, 90.0, 0.0]),  // Right
188            Self::from_array([0.0, 180.0, 0.0]), // Back
189            Self::from_array([0.0, 270.0, 0.0]), // Left
190            Self::from_array([90.0, 0.0, 0.0]),  // Top
191            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
192            // 8 cube corners (45° rotations)
193            Self::from_array([45.0, 45.0, 0.0]),
194            Self::from_array([45.0, 135.0, 0.0]),
195            Self::from_array([45.0, 225.0, 0.0]),
196            Self::from_array([45.0, 315.0, 0.0]),
197            Self::from_array([-45.0, 45.0, 0.0]),
198            Self::from_array([-45.0, 135.0, 0.0]),
199            Self::from_array([-45.0, 225.0, 0.0]),
200            Self::from_array([-45.0, 315.0, 0.0]),
201        ]
202    }
203
204    /// Convert to Bevy Quat (converts f64 to f32 for Bevy compatibility)
205    pub fn to_quat(&self) -> Quat {
206        Quat::from_euler(
207            EulerRot::XYZ,
208            (self.pitch as f32).to_radians(),
209            (self.yaw as f32).to_radians(),
210            (self.roll as f32).to_radians(),
211        )
212    }
213
214    /// Convert to Bevy Transform (rotation only, no translation)
215    pub fn to_transform(&self) -> Transform {
216        Transform::from_rotation(self.to_quat())
217    }
218}
219
220impl Default for ObjectRotation {
221    fn default() -> Self {
222        Self::identity()
223    }
224}
225
226/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
227/// Uses spherical coordinates to capture objects from multiple elevations.
228#[derive(Clone, Debug)]
229pub struct ViewpointConfig {
230    /// Distance from camera to object center (meters)
231    pub radius: f32,
232    /// Number of horizontal positions (yaw angles) around the object
233    pub yaw_count: usize,
234    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
235    pub pitch_angles_deg: Vec<f32>,
236}
237
238impl Default for ViewpointConfig {
239    fn default() -> Self {
240        Self {
241            radius: 0.5,
242            yaw_count: 8,
243            // Three elevations: below (-30°), level (0°), above (+30°)
244            // This matches TBP's look_up/look_down capability
245            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
246        }
247    }
248}
249
250impl ViewpointConfig {
251    /// Total number of viewpoints this config will generate
252    pub fn viewpoint_count(&self) -> usize {
253        self.yaw_count * self.pitch_angles_deg.len()
254    }
255}
256
257/// Full sensor configuration for capture sessions
258#[derive(Clone, Debug, Resource)]
259pub struct SensorConfig {
260    /// Viewpoint configuration (camera positions)
261    pub viewpoints: ViewpointConfig,
262    /// Object rotations to capture (each rotation generates a full viewpoint set)
263    pub object_rotations: Vec<ObjectRotation>,
264    /// Output directory for captures
265    pub output_dir: String,
266    /// Filename pattern (use {view} for view index, {rot} for rotation index)
267    pub filename_pattern: String,
268}
269
270impl Default for SensorConfig {
271    fn default() -> Self {
272        Self {
273            viewpoints: ViewpointConfig::default(),
274            object_rotations: vec![ObjectRotation::identity()],
275            output_dir: ".".to_string(),
276            filename_pattern: "capture_{rot}_{view}.png".to_string(),
277        }
278    }
279}
280
281impl SensorConfig {
282    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
283    pub fn tbp_benchmark() -> Self {
284        Self {
285            viewpoints: ViewpointConfig::default(),
286            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
287            output_dir: ".".to_string(),
288            filename_pattern: "capture_{rot}_{view}.png".to_string(),
289        }
290    }
291
292    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
293    pub fn tbp_full_training() -> Self {
294        Self {
295            viewpoints: ViewpointConfig::default(),
296            object_rotations: ObjectRotation::tbp_known_orientations(),
297            output_dir: ".".to_string(),
298            filename_pattern: "capture_{rot}_{view}.png".to_string(),
299        }
300    }
301
302    /// Total number of captures this config will generate
303    pub fn total_captures(&self) -> usize {
304        self.viewpoints.viewpoint_count() * self.object_rotations.len()
305    }
306}
307
308/// Generate camera viewpoints using spherical coordinates.
309///
310/// Spherical coordinate system (matching TBP habitat sensor conventions):
311/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
312/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
313/// - Radius: distance from origin (object center)
314pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
315    let mut views = Vec::with_capacity(config.viewpoint_count());
316
317    for pitch_deg in &config.pitch_angles_deg {
318        let pitch = pitch_deg.to_radians();
319
320        for i in 0..config.yaw_count {
321            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
322
323            // Spherical to Cartesian conversion (Y-up coordinate system)
324            // x = r * cos(pitch) * sin(yaw)
325            // y = r * sin(pitch)
326            // z = r * cos(pitch) * cos(yaw)
327            let x = config.radius * pitch.cos() * yaw.sin();
328            let y = config.radius * pitch.sin();
329            let z = config.radius * pitch.cos() * yaw.cos();
330
331            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
332            views.push(transform);
333        }
334    }
335    views
336}
337
338/// Marker component for the target object being captured
339#[derive(Component)]
340pub struct CaptureTarget;
341
342/// Marker component for the capture camera
343#[derive(Component)]
344pub struct CaptureCamera;
345
346// ============================================================================
347// Headless Rendering API (NEW)
348// ============================================================================
349
350/// Configuration for headless rendering.
351///
352/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
353#[derive(Clone, Debug)]
354pub struct RenderConfig {
355    /// Image width in pixels (default: 64)
356    pub width: u32,
357    /// Image height in pixels (default: 64)
358    pub height: u32,
359    /// Zoom factor affecting field of view (default: 1.0)
360    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
361    pub zoom: f32,
362    /// Near clipping plane in meters (default: 0.01)
363    pub near_plane: f32,
364    /// Far clipping plane in meters (default: 10.0)
365    pub far_plane: f32,
366    /// Lighting configuration
367    pub lighting: LightingConfig,
368}
369
370/// Lighting configuration for rendering.
371///
372/// Controls ambient light and point lights in the scene.
373#[derive(Clone, Debug)]
374pub struct LightingConfig {
375    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
376    pub ambient_brightness: f32,
377    /// Key light intensity in lumens (default: 1500.0)
378    pub key_light_intensity: f32,
379    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
380    pub key_light_position: [f32; 3],
381    /// Fill light intensity in lumens (default: 500.0)
382    pub fill_light_intensity: f32,
383    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
384    pub fill_light_position: [f32; 3],
385    /// Enable shadows (default: false for performance)
386    pub shadows_enabled: bool,
387}
388
389impl Default for LightingConfig {
390    fn default() -> Self {
391        Self {
392            ambient_brightness: 0.3,
393            key_light_intensity: 1500.0,
394            key_light_position: [4.0, 8.0, 4.0],
395            fill_light_intensity: 500.0,
396            fill_light_position: [-4.0, 2.0, -4.0],
397            shadows_enabled: false,
398        }
399    }
400}
401
402impl LightingConfig {
403    /// Bright lighting for clear visibility
404    pub fn bright() -> Self {
405        Self {
406            ambient_brightness: 0.5,
407            key_light_intensity: 2000.0,
408            key_light_position: [4.0, 8.0, 4.0],
409            fill_light_intensity: 800.0,
410            fill_light_position: [-4.0, 2.0, -4.0],
411            shadows_enabled: false,
412        }
413    }
414
415    /// Soft lighting with minimal shadows
416    pub fn soft() -> Self {
417        Self {
418            ambient_brightness: 0.4,
419            key_light_intensity: 1000.0,
420            key_light_position: [3.0, 6.0, 3.0],
421            fill_light_intensity: 600.0,
422            fill_light_position: [-3.0, 3.0, -3.0],
423            shadows_enabled: false,
424        }
425    }
426
427    /// Unlit mode - ambient only, no point lights
428    pub fn unlit() -> Self {
429        Self {
430            ambient_brightness: 1.0,
431            key_light_intensity: 0.0,
432            key_light_position: [0.0, 0.0, 0.0],
433            fill_light_intensity: 0.0,
434            fill_light_position: [0.0, 0.0, 0.0],
435            shadows_enabled: false,
436        }
437    }
438}
439
440impl Default for RenderConfig {
441    fn default() -> Self {
442        Self::tbp_default()
443    }
444}
445
446impl RenderConfig {
447    /// TBP-compatible 64x64 RGBD sensor configuration.
448    ///
449    /// This matches the default resolution used in TBP's habitat sensor.
450    pub fn tbp_default() -> Self {
451        Self {
452            width: 64,
453            height: 64,
454            zoom: 1.0,
455            near_plane: 0.01,
456            far_plane: 10.0,
457            lighting: LightingConfig::default(),
458        }
459    }
460
461    /// Higher resolution configuration for debugging and visualization.
462    pub fn preview() -> Self {
463        Self {
464            width: 256,
465            height: 256,
466            zoom: 1.0,
467            near_plane: 0.01,
468            far_plane: 10.0,
469            lighting: LightingConfig::default(),
470        }
471    }
472
473    /// High resolution configuration for detailed captures.
474    pub fn high_res() -> Self {
475        Self {
476            width: 512,
477            height: 512,
478            zoom: 1.0,
479            near_plane: 0.01,
480            far_plane: 10.0,
481            lighting: LightingConfig::default(),
482        }
483    }
484
485    /// Calculate vertical field of view in radians based on zoom.
486    ///
487    /// Base FOV is 60 degrees, adjusted by zoom factor.
488    pub fn fov_radians(&self) -> f32 {
489        let base_fov_deg = 60.0_f32;
490        (base_fov_deg / self.zoom).to_radians()
491    }
492
493    /// Compute camera intrinsics for use with neocortx.
494    ///
495    /// Returns focal length and principal point based on resolution and FOV.
496    /// Uses f64 for TBP numerical precision compatibility.
497    pub fn intrinsics(&self) -> CameraIntrinsics {
498        let fov = self.fov_radians() as f64;
499        // focal_length = (height/2) / tan(fov/2)
500        let fy = (self.height as f64 / 2.0) / (fov / 2.0).tan();
501        let fx = fy; // Assuming square pixels
502
503        CameraIntrinsics {
504            focal_length: [fx, fy],
505            principal_point: [self.width as f64 / 2.0, self.height as f64 / 2.0],
506            image_size: [self.width, self.height],
507        }
508    }
509}
510
511/// Camera intrinsic parameters for 3D reconstruction.
512///
513/// Compatible with neocortx's VisionIntrinsics format.
514/// Uses f64 for TBP numerical precision compatibility.
515#[derive(Clone, Debug, PartialEq)]
516pub struct CameraIntrinsics {
517    /// Focal length in pixels (fx, fy)
518    pub focal_length: [f64; 2],
519    /// Principal point (cx, cy) - typically image center
520    pub principal_point: [f64; 2],
521    /// Image dimensions (width, height)
522    pub image_size: [u32; 2],
523}
524
525impl CameraIntrinsics {
526    /// Project a 3D point to 2D pixel coordinates.
527    pub fn project(&self, point: Vec3) -> Option<[f64; 2]> {
528        if point.z <= 0.0 {
529            return None;
530        }
531        let x = (point.x as f64 / point.z as f64) * self.focal_length[0] + self.principal_point[0];
532        let y = (point.y as f64 / point.z as f64) * self.focal_length[1] + self.principal_point[1];
533        Some([x, y])
534    }
535
536    /// Unproject a 2D pixel to a 3D point at given depth.
537    pub fn unproject(&self, pixel: [f64; 2], depth: f64) -> [f64; 3] {
538        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
539        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
540        [x, y, depth]
541    }
542}
543
544/// Output from headless rendering containing RGBA and depth data.
545#[derive(Clone, Debug)]
546pub struct RenderOutput {
547    /// RGBA pixel data in row-major order (width * height * 4 bytes)
548    pub rgba: Vec<u8>,
549    /// Depth values in meters, row-major order (width * height f64s)
550    /// Values are linear depth from camera, not normalized.
551    /// Uses f64 for TBP numerical precision compatibility.
552    pub depth: Vec<f64>,
553    /// Image width in pixels
554    pub width: u32,
555    /// Image height in pixels
556    pub height: u32,
557    /// Camera intrinsics used for this render
558    pub intrinsics: CameraIntrinsics,
559    /// Camera transform (world position and orientation)
560    pub camera_transform: Transform,
561    /// Object rotation applied during render
562    pub object_rotation: ObjectRotation,
563}
564
565impl RenderOutput {
566    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
567    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
568        if x >= self.width || y >= self.height {
569            return None;
570        }
571        let idx = ((y * self.width + x) * 4) as usize;
572        Some([
573            self.rgba[idx],
574            self.rgba[idx + 1],
575            self.rgba[idx + 2],
576            self.rgba[idx + 3],
577        ])
578    }
579
580    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
581    pub fn get_depth(&self, x: u32, y: u32) -> Option<f64> {
582        if x >= self.width || y >= self.height {
583            return None;
584        }
585        let idx = (y * self.width + x) as usize;
586        Some(self.depth[idx])
587    }
588
589    /// Get RGB pixel (without alpha) at (x, y).
590    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
591        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
592    }
593
594    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
595    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
596        let mut image = Vec::with_capacity(self.height as usize);
597        for y in 0..self.height {
598            let mut row = Vec::with_capacity(self.width as usize);
599            for x in 0..self.width {
600                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
601            }
602            image.push(row);
603        }
604        image
605    }
606
607    /// Convert depth to neocortx-compatible format: Vec<Vec<f64>>
608    pub fn to_depth_image(&self) -> Vec<Vec<f64>> {
609        let mut image = Vec::with_capacity(self.height as usize);
610        for y in 0..self.height {
611            let mut row = Vec::with_capacity(self.width as usize);
612            for x in 0..self.width {
613                row.push(self.get_depth(x, y).unwrap_or(0.0));
614            }
615            image.push(row);
616        }
617        image
618    }
619}
620
621/// Errors that can occur during rendering.
622#[derive(Debug, Clone)]
623pub enum RenderError {
624    /// Object mesh file not found
625    MeshNotFound(String),
626    /// Object texture file not found
627    TextureNotFound(String),
628    /// Bevy rendering failed
629    RenderFailed(String),
630    /// Invalid configuration
631    InvalidConfig(String),
632}
633
634impl std::fmt::Display for RenderError {
635    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
636        match self {
637            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
638            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
639            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
640            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
641        }
642    }
643}
644
645impl std::error::Error for RenderError {}
646
647/// Render a YCB object to an in-memory buffer.
648///
649/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
650/// renders a single frame, extracts the RGBA and depth data, and shuts down.
651///
652/// # Arguments
653/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
654/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
655/// * `object_rotation` - Rotation to apply to the object
656/// * `config` - Render configuration (resolution, depth range, etc.)
657///
658/// # Example
659/// ```ignore
660/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
661/// use std::path::Path;
662///
663/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
664/// let output = render_to_buffer(
665///     Path::new("/tmp/ycb/003_cracker_box"),
666///     &viewpoints[0],
667///     &ObjectRotation::identity(),
668///     &RenderConfig::tbp_default(),
669/// )?;
670/// ```
671pub fn render_to_buffer(
672    object_dir: &Path,
673    camera_transform: &Transform,
674    object_rotation: &ObjectRotation,
675    config: &RenderConfig,
676) -> Result<RenderOutput, RenderError> {
677    // Use the actual Bevy headless renderer
678    render::render_headless(object_dir, camera_transform, object_rotation, config)
679}
680
681/// Render all viewpoints and rotations for a YCB object.
682///
683/// Convenience function that renders all combinations of viewpoints and rotations.
684///
685/// # Arguments
686/// * `object_dir` - Path to YCB object directory
687/// * `viewpoint_config` - Viewpoint configuration (camera positions)
688/// * `rotations` - Object rotations to render
689/// * `render_config` - Render configuration
690///
691/// # Returns
692/// Vector of RenderOutput, one per viewpoint × rotation combination.
693pub fn render_all_viewpoints(
694    object_dir: &Path,
695    viewpoint_config: &ViewpointConfig,
696    rotations: &[ObjectRotation],
697    render_config: &RenderConfig,
698) -> Result<Vec<RenderOutput>, RenderError> {
699    let viewpoints = generate_viewpoints(viewpoint_config);
700    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
701
702    for rotation in rotations {
703        for viewpoint in &viewpoints {
704            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
705            outputs.push(output);
706        }
707    }
708
709    Ok(outputs)
710}
711
712/// Render directly to files (for subprocess mode).
713///
714/// This function is designed for subprocess rendering where the process will exit
715/// after rendering. It saves RGBA and depth data directly to the specified files
716/// before the process terminates.
717///
718/// # Arguments
719/// * `object_dir` - Path to YCB object directory
720/// * `camera_transform` - Camera position and orientation
721/// * `object_rotation` - Rotation to apply to the object
722/// * `config` - Render configuration
723/// * `rgba_path` - Output path for RGBA PNG
724/// * `depth_path` - Output path for depth data (raw f32 bytes)
725///
726/// # Note
727/// This function may call `std::process::exit(0)` and not return.
728pub fn render_to_files(
729    object_dir: &Path,
730    camera_transform: &Transform,
731    object_rotation: &ObjectRotation,
732    config: &RenderConfig,
733    rgba_path: &Path,
734    depth_path: &Path,
735) -> Result<(), RenderError> {
736    render::render_to_files(
737        object_dir,
738        camera_transform,
739        object_rotation,
740        config,
741        rgba_path,
742        depth_path,
743    )
744}
745
746// Re-export bevy types that consumers will need
747pub use bevy::prelude::{Quat, Transform, Vec3};
748
749#[cfg(test)]
750mod tests {
751    use super::*;
752
753    #[test]
754    fn test_object_rotation_identity() {
755        let rot = ObjectRotation::identity();
756        assert_eq!(rot.pitch, 0.0);
757        assert_eq!(rot.yaw, 0.0);
758        assert_eq!(rot.roll, 0.0);
759    }
760
761    #[test]
762    fn test_object_rotation_from_array() {
763        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
764        assert_eq!(rot.pitch, 10.0);
765        assert_eq!(rot.yaw, 20.0);
766        assert_eq!(rot.roll, 30.0);
767    }
768
769    #[test]
770    fn test_tbp_benchmark_rotations() {
771        let rotations = ObjectRotation::tbp_benchmark_rotations();
772        assert_eq!(rotations.len(), 3);
773        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
774        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
775        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
776    }
777
778    #[test]
779    fn test_tbp_known_orientations_count() {
780        let orientations = ObjectRotation::tbp_known_orientations();
781        assert_eq!(orientations.len(), 14);
782    }
783
784    #[test]
785    fn test_rotation_to_quat() {
786        let rot = ObjectRotation::identity();
787        let quat = rot.to_quat();
788        // Identity quaternion should be approximately (1, 0, 0, 0)
789        assert!((quat.w - 1.0).abs() < 0.001);
790        assert!(quat.x.abs() < 0.001);
791        assert!(quat.y.abs() < 0.001);
792        assert!(quat.z.abs() < 0.001);
793    }
794
795    #[test]
796    fn test_rotation_90_yaw() {
797        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
798        let quat = rot.to_quat();
799        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
800        assert!((quat.w - 0.707).abs() < 0.01);
801        assert!((quat.y - 0.707).abs() < 0.01);
802    }
803
804    #[test]
805    fn test_viewpoint_config_default() {
806        let config = ViewpointConfig::default();
807        assert_eq!(config.radius, 0.5);
808        assert_eq!(config.yaw_count, 8);
809        assert_eq!(config.pitch_angles_deg.len(), 3);
810    }
811
812    #[test]
813    fn test_viewpoint_count() {
814        let config = ViewpointConfig::default();
815        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
816    }
817
818    #[test]
819    fn test_generate_viewpoints_count() {
820        let config = ViewpointConfig::default();
821        let viewpoints = generate_viewpoints(&config);
822        assert_eq!(viewpoints.len(), 24);
823    }
824
825    #[test]
826    fn test_viewpoints_spherical_radius() {
827        let config = ViewpointConfig::default();
828        let viewpoints = generate_viewpoints(&config);
829
830        for (i, transform) in viewpoints.iter().enumerate() {
831            let actual_radius = transform.translation.length();
832            assert!(
833                (actual_radius - config.radius).abs() < 0.001,
834                "Viewpoint {} has incorrect radius: {} (expected {})",
835                i,
836                actual_radius,
837                config.radius
838            );
839        }
840    }
841
842    #[test]
843    fn test_viewpoints_looking_at_origin() {
844        let config = ViewpointConfig::default();
845        let viewpoints = generate_viewpoints(&config);
846
847        for (i, transform) in viewpoints.iter().enumerate() {
848            let forward = transform.forward();
849            let to_origin = (Vec3::ZERO - transform.translation).normalize();
850            let dot = forward.dot(to_origin);
851            assert!(
852                dot > 0.99,
853                "Viewpoint {} not looking at origin, dot product: {}",
854                i,
855                dot
856            );
857        }
858    }
859
860    #[test]
861    fn test_sensor_config_default() {
862        let config = SensorConfig::default();
863        assert_eq!(config.object_rotations.len(), 1);
864        assert_eq!(config.total_captures(), 24);
865    }
866
867    #[test]
868    fn test_sensor_config_tbp_benchmark() {
869        let config = SensorConfig::tbp_benchmark();
870        assert_eq!(config.object_rotations.len(), 3);
871        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
872    }
873
874    #[test]
875    fn test_sensor_config_tbp_full() {
876        let config = SensorConfig::tbp_full_training();
877        assert_eq!(config.object_rotations.len(), 14);
878        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
879    }
880
881    #[test]
882    fn test_ycb_representative_objects() {
883        // Verify representative objects are defined
884        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
885        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
886    }
887
888    #[test]
889    fn test_ycb_ten_objects() {
890        // Verify ten objects subset is defined
891        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
892    }
893
894    #[test]
895    fn test_ycb_object_mesh_path() {
896        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
897        assert_eq!(
898            path.to_string_lossy(),
899            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
900        );
901    }
902
903    #[test]
904    fn test_ycb_object_texture_path() {
905        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
906        assert_eq!(
907            path.to_string_lossy(),
908            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
909        );
910    }
911
912    // =========================================================================
913    // Headless Rendering API Tests
914    // =========================================================================
915
916    #[test]
917    fn test_render_config_tbp_default() {
918        let config = RenderConfig::tbp_default();
919        assert_eq!(config.width, 64);
920        assert_eq!(config.height, 64);
921        assert_eq!(config.zoom, 1.0);
922        assert_eq!(config.near_plane, 0.01);
923        assert_eq!(config.far_plane, 10.0);
924    }
925
926    #[test]
927    fn test_render_config_preview() {
928        let config = RenderConfig::preview();
929        assert_eq!(config.width, 256);
930        assert_eq!(config.height, 256);
931    }
932
933    #[test]
934    fn test_render_config_default_is_tbp() {
935        let default = RenderConfig::default();
936        let tbp = RenderConfig::tbp_default();
937        assert_eq!(default.width, tbp.width);
938        assert_eq!(default.height, tbp.height);
939    }
940
941    #[test]
942    fn test_render_config_fov() {
943        let config = RenderConfig::tbp_default();
944        let fov = config.fov_radians();
945        // Base FOV is 60 degrees = ~1.047 radians
946        assert!((fov - 1.047).abs() < 0.01);
947
948        // Zoom in should reduce FOV
949        let zoomed = RenderConfig {
950            zoom: 2.0,
951            ..config
952        };
953        assert!(zoomed.fov_radians() < fov);
954    }
955
956    #[test]
957    fn test_render_config_intrinsics() {
958        let config = RenderConfig::tbp_default();
959        let intrinsics = config.intrinsics();
960
961        assert_eq!(intrinsics.image_size, [64, 64]);
962        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
963        // Focal length should be positive and reasonable
964        assert!(intrinsics.focal_length[0] > 0.0);
965        assert!(intrinsics.focal_length[1] > 0.0);
966        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
967        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
968    }
969
970    #[test]
971    fn test_camera_intrinsics_project() {
972        let intrinsics = CameraIntrinsics {
973            focal_length: [100.0, 100.0],
974            principal_point: [32.0, 32.0],
975            image_size: [64, 64],
976        };
977
978        // Point at origin of camera frame projects to principal point
979        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
980        assert!(center.is_some());
981        let [x, y] = center.unwrap();
982        assert!((x - 32.0).abs() < 0.001);
983        assert!((y - 32.0).abs() < 0.001);
984
985        // Point behind camera returns None
986        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
987        assert!(behind.is_none());
988    }
989
990    #[test]
991    fn test_camera_intrinsics_unproject() {
992        let intrinsics = CameraIntrinsics {
993            focal_length: [100.0, 100.0],
994            principal_point: [32.0, 32.0],
995            image_size: [64, 64],
996        };
997
998        // Unproject principal point at depth 1.0
999        let point = intrinsics.unproject([32.0, 32.0], 1.0);
1000        assert!((point[0]).abs() < 0.001); // x
1001        assert!((point[1]).abs() < 0.001); // y
1002        assert!((point[2] - 1.0).abs() < 0.001); // z
1003    }
1004
1005    #[test]
1006    fn test_render_output_get_rgba() {
1007        let output = RenderOutput {
1008            rgba: vec![
1009                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1010            ],
1011            depth: vec![1.0, 2.0, 3.0, 4.0],
1012            width: 2,
1013            height: 2,
1014            intrinsics: RenderConfig::tbp_default().intrinsics(),
1015            camera_transform: Transform::IDENTITY,
1016            object_rotation: ObjectRotation::identity(),
1017        };
1018
1019        // Top-left: red
1020        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1021        // Top-right: green
1022        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1023        // Bottom-left: blue
1024        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1025        // Bottom-right: white
1026        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1027        // Out of bounds
1028        assert_eq!(output.get_rgba(2, 0), None);
1029    }
1030
1031    #[test]
1032    fn test_render_output_get_depth() {
1033        let output = RenderOutput {
1034            rgba: vec![0u8; 16],
1035            depth: vec![1.0, 2.0, 3.0, 4.0],
1036            width: 2,
1037            height: 2,
1038            intrinsics: RenderConfig::tbp_default().intrinsics(),
1039            camera_transform: Transform::IDENTITY,
1040            object_rotation: ObjectRotation::identity(),
1041        };
1042
1043        assert_eq!(output.get_depth(0, 0), Some(1.0));
1044        assert_eq!(output.get_depth(1, 0), Some(2.0));
1045        assert_eq!(output.get_depth(0, 1), Some(3.0));
1046        assert_eq!(output.get_depth(1, 1), Some(4.0));
1047        assert_eq!(output.get_depth(2, 0), None);
1048    }
1049
1050    #[test]
1051    fn test_render_output_to_rgb_image() {
1052        let output = RenderOutput {
1053            rgba: vec![
1054                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1055            ],
1056            depth: vec![1.0, 2.0, 3.0, 4.0],
1057            width: 2,
1058            height: 2,
1059            intrinsics: RenderConfig::tbp_default().intrinsics(),
1060            camera_transform: Transform::IDENTITY,
1061            object_rotation: ObjectRotation::identity(),
1062        };
1063
1064        let image = output.to_rgb_image();
1065        assert_eq!(image.len(), 2); // 2 rows
1066        assert_eq!(image[0].len(), 2); // 2 columns
1067        assert_eq!(image[0][0], [255, 0, 0]); // Red
1068        assert_eq!(image[0][1], [0, 255, 0]); // Green
1069        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1070        assert_eq!(image[1][1], [255, 255, 255]); // White
1071    }
1072
1073    #[test]
1074    fn test_render_output_to_depth_image() {
1075        let output = RenderOutput {
1076            rgba: vec![0u8; 16],
1077            depth: vec![1.0, 2.0, 3.0, 4.0],
1078            width: 2,
1079            height: 2,
1080            intrinsics: RenderConfig::tbp_default().intrinsics(),
1081            camera_transform: Transform::IDENTITY,
1082            object_rotation: ObjectRotation::identity(),
1083        };
1084
1085        let depth_image = output.to_depth_image();
1086        assert_eq!(depth_image.len(), 2);
1087        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1088        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1089    }
1090
1091    #[test]
1092    fn test_render_error_display() {
1093        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1094        assert!(err.to_string().contains("Mesh not found"));
1095        assert!(err.to_string().contains("/path/to/mesh.obj"));
1096    }
1097
1098    // =========================================================================
1099    // Edge Case Tests
1100    // =========================================================================
1101
1102    #[test]
1103    fn test_object_rotation_extreme_angles() {
1104        // Test angles beyond 360 degrees
1105        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1106        let quat = rot.to_quat();
1107        // Quaternion should still be valid (normalized)
1108        assert!((quat.length() - 1.0).abs() < 0.001);
1109    }
1110
1111    #[test]
1112    fn test_object_rotation_to_transform() {
1113        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1114        let transform = rot.to_transform();
1115        // Transform should have no translation
1116        assert_eq!(transform.translation, Vec3::ZERO);
1117        // Should have rotation
1118        assert!(transform.rotation != Quat::IDENTITY);
1119    }
1120
1121    #[test]
1122    fn test_viewpoint_config_single_viewpoint() {
1123        let config = ViewpointConfig {
1124            radius: 1.0,
1125            yaw_count: 1,
1126            pitch_angles_deg: vec![0.0],
1127        };
1128        assert_eq!(config.viewpoint_count(), 1);
1129        let viewpoints = generate_viewpoints(&config);
1130        assert_eq!(viewpoints.len(), 1);
1131        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1132        let pos = viewpoints[0].translation;
1133        assert!((pos.x).abs() < 0.001);
1134        assert!((pos.y).abs() < 0.001);
1135        assert!((pos.z - 1.0).abs() < 0.001);
1136    }
1137
1138    #[test]
1139    fn test_viewpoint_radius_scaling() {
1140        let config1 = ViewpointConfig {
1141            radius: 0.5,
1142            yaw_count: 4,
1143            pitch_angles_deg: vec![0.0],
1144        };
1145        let config2 = ViewpointConfig {
1146            radius: 2.0,
1147            yaw_count: 4,
1148            pitch_angles_deg: vec![0.0],
1149        };
1150
1151        let v1 = generate_viewpoints(&config1);
1152        let v2 = generate_viewpoints(&config2);
1153
1154        // Viewpoints should scale proportionally
1155        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1156            let ratio = vp2.translation.length() / vp1.translation.length();
1157            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1158        }
1159    }
1160
1161    #[test]
1162    fn test_camera_intrinsics_project_at_z_zero() {
1163        let intrinsics = CameraIntrinsics {
1164            focal_length: [100.0, 100.0],
1165            principal_point: [32.0, 32.0],
1166            image_size: [64, 64],
1167        };
1168
1169        // Point at z=0 should return None (division by zero protection)
1170        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1171        assert!(result.is_none());
1172    }
1173
1174    #[test]
1175    fn test_camera_intrinsics_roundtrip() {
1176        let intrinsics = CameraIntrinsics {
1177            focal_length: [100.0, 100.0],
1178            principal_point: [32.0, 32.0],
1179            image_size: [64, 64],
1180        };
1181
1182        // Project a 3D point
1183        let original = Vec3::new(0.5, -0.3, 2.0);
1184        let projected = intrinsics.project(original).unwrap();
1185
1186        // Unproject back with the same depth (convert f32 to f64)
1187        let unprojected = intrinsics.unproject(projected, original.z as f64);
1188
1189        // Should get back approximately the same point
1190        assert!((unprojected[0] - original.x as f64).abs() < 0.001); // x
1191        assert!((unprojected[1] - original.y as f64).abs() < 0.001); // y
1192        assert!((unprojected[2] - original.z as f64).abs() < 0.001); // z
1193    }
1194
1195    #[test]
1196    fn test_render_output_empty() {
1197        let output = RenderOutput {
1198            rgba: vec![],
1199            depth: vec![],
1200            width: 0,
1201            height: 0,
1202            intrinsics: RenderConfig::tbp_default().intrinsics(),
1203            camera_transform: Transform::IDENTITY,
1204            object_rotation: ObjectRotation::identity(),
1205        };
1206
1207        // Should handle empty gracefully
1208        assert_eq!(output.get_rgba(0, 0), None);
1209        assert_eq!(output.get_depth(0, 0), None);
1210        assert!(output.to_rgb_image().is_empty());
1211        assert!(output.to_depth_image().is_empty());
1212    }
1213
1214    #[test]
1215    fn test_render_output_1x1() {
1216        let output = RenderOutput {
1217            rgba: vec![128, 64, 32, 255],
1218            depth: vec![0.5],
1219            width: 1,
1220            height: 1,
1221            intrinsics: RenderConfig::tbp_default().intrinsics(),
1222            camera_transform: Transform::IDENTITY,
1223            object_rotation: ObjectRotation::identity(),
1224        };
1225
1226        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1227        assert_eq!(output.get_depth(0, 0), Some(0.5));
1228        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1229
1230        let rgb_img = output.to_rgb_image();
1231        assert_eq!(rgb_img.len(), 1);
1232        assert_eq!(rgb_img[0].len(), 1);
1233        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1234    }
1235
1236    #[test]
1237    fn test_render_config_high_res() {
1238        let config = RenderConfig::high_res();
1239        assert_eq!(config.width, 512);
1240        assert_eq!(config.height, 512);
1241
1242        let intrinsics = config.intrinsics();
1243        assert_eq!(intrinsics.image_size, [512, 512]);
1244        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1245    }
1246
1247    #[test]
1248    fn test_render_config_zoom_affects_fov() {
1249        let base = RenderConfig::tbp_default();
1250        let zoomed = RenderConfig {
1251            zoom: 2.0,
1252            ..base.clone()
1253        };
1254
1255        // Higher zoom = lower FOV
1256        assert!(zoomed.fov_radians() < base.fov_radians());
1257        // Specifically, 2x zoom = half FOV
1258        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1259    }
1260
1261    #[test]
1262    fn test_render_config_zoom_affects_intrinsics() {
1263        let base = RenderConfig::tbp_default();
1264        let zoomed = RenderConfig {
1265            zoom: 2.0,
1266            ..base.clone()
1267        };
1268
1269        // Higher zoom = higher focal length
1270        let base_intrinsics = base.intrinsics();
1271        let zoomed_intrinsics = zoomed.intrinsics();
1272
1273        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1274    }
1275
1276    #[test]
1277    fn test_lighting_config_variants() {
1278        let default = LightingConfig::default();
1279        let bright = LightingConfig::bright();
1280        let soft = LightingConfig::soft();
1281        let unlit = LightingConfig::unlit();
1282
1283        // Bright should have higher intensity than default
1284        assert!(bright.key_light_intensity > default.key_light_intensity);
1285
1286        // Unlit should have no point lights
1287        assert_eq!(unlit.key_light_intensity, 0.0);
1288        assert_eq!(unlit.fill_light_intensity, 0.0);
1289        assert_eq!(unlit.ambient_brightness, 1.0);
1290
1291        // Soft should have lower intensity
1292        assert!(soft.key_light_intensity < default.key_light_intensity);
1293    }
1294
1295    #[test]
1296    fn test_all_render_error_variants() {
1297        let errors = vec![
1298            RenderError::MeshNotFound("mesh.obj".to_string()),
1299            RenderError::TextureNotFound("texture.png".to_string()),
1300            RenderError::RenderFailed("GPU error".to_string()),
1301            RenderError::InvalidConfig("bad config".to_string()),
1302        ];
1303
1304        for err in errors {
1305            // All variants should have Display impl
1306            let msg = err.to_string();
1307            assert!(!msg.is_empty());
1308        }
1309    }
1310
1311    #[test]
1312    fn test_tbp_known_orientations_unique() {
1313        let orientations = ObjectRotation::tbp_known_orientations();
1314
1315        // All 14 orientations should produce unique quaternions
1316        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1317
1318        for (i, q1) in quats.iter().enumerate() {
1319            for (j, q2) in quats.iter().enumerate() {
1320                if i != j {
1321                    // Quaternions should be different (accounting for q == -q equivalence)
1322                    let dot = q1.dot(*q2).abs();
1323                    assert!(
1324                        dot < 0.999,
1325                        "Orientations {} and {} produce same quaternion",
1326                        i,
1327                        j
1328                    );
1329                }
1330            }
1331        }
1332    }
1333}