bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Test fixtures for pre-rendered images (CI/CD support)
62pub mod fixtures;
63
64// Re-export ycbust types for convenience
65pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
66
67/// YCB dataset utilities
68pub mod ycb {
69    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
70
71    use std::path::Path;
72
73    /// Download YCB models to the specified directory.
74    ///
75    /// # Arguments
76    /// * `output_dir` - Directory to download models to
77    /// * `subset` - Which subset of objects to download
78    ///
79    /// # Example
80    /// ```ignore
81    /// use bevy_sensor::ycb::{download_models, Subset};
82    ///
83    /// download_models("/tmp/ycb", Subset::Representative).await?;
84    /// ```
85    pub async fn download_models<P: AsRef<Path>>(
86        output_dir: P,
87        subset: Subset,
88    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
89        let options = DownloadOptions {
90            overwrite: false,
91            full: false,
92            show_progress: true,
93            delete_archives: true,
94        };
95        download_ycb(subset, output_dir.as_ref(), options).await?;
96        Ok(())
97    }
98
99    /// Download YCB models with custom options.
100    pub async fn download_models_with_options<P: AsRef<Path>>(
101        output_dir: P,
102        subset: Subset,
103        options: DownloadOptions,
104    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
105        download_ycb(subset, output_dir.as_ref(), options).await?;
106        Ok(())
107    }
108
109    /// Check if YCB models exist at the given path
110    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
111        let path = output_dir.as_ref();
112        // Check for at least one representative object
113        path.join("003_cracker_box/google_16k/textured.obj")
114            .exists()
115    }
116
117    /// Get the path to a specific YCB object's OBJ file
118    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
119        output_dir
120            .as_ref()
121            .join(object_id)
122            .join("google_16k")
123            .join("textured.obj")
124    }
125
126    /// Get the path to a specific YCB object's texture file
127    pub fn object_texture_path<P: AsRef<Path>>(
128        output_dir: P,
129        object_id: &str,
130    ) -> std::path::PathBuf {
131        output_dir
132            .as_ref()
133            .join(object_id)
134            .join("google_16k")
135            .join("texture_map.png")
136    }
137}
138
139/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
140/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
141#[derive(Clone, Debug, PartialEq)]
142pub struct ObjectRotation {
143    /// Rotation around X-axis (pitch) in degrees
144    pub pitch: f32,
145    /// Rotation around Y-axis (yaw) in degrees
146    pub yaw: f32,
147    /// Rotation around Z-axis (roll) in degrees
148    pub roll: f32,
149}
150
151impl ObjectRotation {
152    /// Create a new rotation from Euler angles in degrees
153    pub fn new(pitch: f32, yaw: f32, roll: f32) -> Self {
154        Self { pitch, yaw, roll }
155    }
156
157    /// Create from TBP-style array [pitch, yaw, roll] in degrees
158    pub fn from_array(arr: [f32; 3]) -> Self {
159        Self {
160            pitch: arr[0],
161            yaw: arr[1],
162            roll: arr[2],
163        }
164    }
165
166    /// Identity rotation (no rotation)
167    pub fn identity() -> Self {
168        Self::new(0.0, 0.0, 0.0)
169    }
170
171    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
172    /// Used in shorter YCB experiments to reduce computational load.
173    pub fn tbp_benchmark_rotations() -> Vec<Self> {
174        vec![
175            Self::from_array([0.0, 0.0, 0.0]),
176            Self::from_array([0.0, 90.0, 0.0]),
177            Self::from_array([0.0, 180.0, 0.0]),
178        ]
179    }
180
181    /// TBP 14 known orientations (cube faces and corners)
182    /// These are the orientations objects are learned in during training.
183    pub fn tbp_known_orientations() -> Vec<Self> {
184        vec![
185            // 6 cube faces (90° rotations around each axis)
186            Self::from_array([0.0, 0.0, 0.0]),   // Front
187            Self::from_array([0.0, 90.0, 0.0]),  // Right
188            Self::from_array([0.0, 180.0, 0.0]), // Back
189            Self::from_array([0.0, 270.0, 0.0]), // Left
190            Self::from_array([90.0, 0.0, 0.0]),  // Top
191            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
192            // 8 cube corners (45° rotations)
193            Self::from_array([45.0, 45.0, 0.0]),
194            Self::from_array([45.0, 135.0, 0.0]),
195            Self::from_array([45.0, 225.0, 0.0]),
196            Self::from_array([45.0, 315.0, 0.0]),
197            Self::from_array([-45.0, 45.0, 0.0]),
198            Self::from_array([-45.0, 135.0, 0.0]),
199            Self::from_array([-45.0, 225.0, 0.0]),
200            Self::from_array([-45.0, 315.0, 0.0]),
201        ]
202    }
203
204    /// Convert to Bevy Quat
205    pub fn to_quat(&self) -> Quat {
206        Quat::from_euler(
207            EulerRot::XYZ,
208            self.pitch.to_radians(),
209            self.yaw.to_radians(),
210            self.roll.to_radians(),
211        )
212    }
213
214    /// Convert to Bevy Transform (rotation only, no translation)
215    pub fn to_transform(&self) -> Transform {
216        Transform::from_rotation(self.to_quat())
217    }
218}
219
220impl Default for ObjectRotation {
221    fn default() -> Self {
222        Self::identity()
223    }
224}
225
226/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
227/// Uses spherical coordinates to capture objects from multiple elevations.
228#[derive(Clone, Debug)]
229pub struct ViewpointConfig {
230    /// Distance from camera to object center (meters)
231    pub radius: f32,
232    /// Number of horizontal positions (yaw angles) around the object
233    pub yaw_count: usize,
234    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
235    pub pitch_angles_deg: Vec<f32>,
236}
237
238impl Default for ViewpointConfig {
239    fn default() -> Self {
240        Self {
241            radius: 0.5,
242            yaw_count: 8,
243            // Three elevations: below (-30°), level (0°), above (+30°)
244            // This matches TBP's look_up/look_down capability
245            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
246        }
247    }
248}
249
250impl ViewpointConfig {
251    /// Total number of viewpoints this config will generate
252    pub fn viewpoint_count(&self) -> usize {
253        self.yaw_count * self.pitch_angles_deg.len()
254    }
255}
256
257/// Full sensor configuration for capture sessions
258#[derive(Clone, Debug, Resource)]
259pub struct SensorConfig {
260    /// Viewpoint configuration (camera positions)
261    pub viewpoints: ViewpointConfig,
262    /// Object rotations to capture (each rotation generates a full viewpoint set)
263    pub object_rotations: Vec<ObjectRotation>,
264    /// Output directory for captures
265    pub output_dir: String,
266    /// Filename pattern (use {view} for view index, {rot} for rotation index)
267    pub filename_pattern: String,
268}
269
270impl Default for SensorConfig {
271    fn default() -> Self {
272        Self {
273            viewpoints: ViewpointConfig::default(),
274            object_rotations: vec![ObjectRotation::identity()],
275            output_dir: ".".to_string(),
276            filename_pattern: "capture_{rot}_{view}.png".to_string(),
277        }
278    }
279}
280
281impl SensorConfig {
282    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
283    pub fn tbp_benchmark() -> Self {
284        Self {
285            viewpoints: ViewpointConfig::default(),
286            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
287            output_dir: ".".to_string(),
288            filename_pattern: "capture_{rot}_{view}.png".to_string(),
289        }
290    }
291
292    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
293    pub fn tbp_full_training() -> Self {
294        Self {
295            viewpoints: ViewpointConfig::default(),
296            object_rotations: ObjectRotation::tbp_known_orientations(),
297            output_dir: ".".to_string(),
298            filename_pattern: "capture_{rot}_{view}.png".to_string(),
299        }
300    }
301
302    /// Total number of captures this config will generate
303    pub fn total_captures(&self) -> usize {
304        self.viewpoints.viewpoint_count() * self.object_rotations.len()
305    }
306}
307
308/// Generate camera viewpoints using spherical coordinates.
309///
310/// Spherical coordinate system (matching TBP habitat sensor conventions):
311/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
312/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
313/// - Radius: distance from origin (object center)
314pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
315    let mut views = Vec::with_capacity(config.viewpoint_count());
316
317    for pitch_deg in &config.pitch_angles_deg {
318        let pitch = pitch_deg.to_radians();
319
320        for i in 0..config.yaw_count {
321            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
322
323            // Spherical to Cartesian conversion (Y-up coordinate system)
324            // x = r * cos(pitch) * sin(yaw)
325            // y = r * sin(pitch)
326            // z = r * cos(pitch) * cos(yaw)
327            let x = config.radius * pitch.cos() * yaw.sin();
328            let y = config.radius * pitch.sin();
329            let z = config.radius * pitch.cos() * yaw.cos();
330
331            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
332            views.push(transform);
333        }
334    }
335    views
336}
337
338/// Marker component for the target object being captured
339#[derive(Component)]
340pub struct CaptureTarget;
341
342/// Marker component for the capture camera
343#[derive(Component)]
344pub struct CaptureCamera;
345
346// ============================================================================
347// Headless Rendering API (NEW)
348// ============================================================================
349
350/// Configuration for headless rendering.
351///
352/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
353#[derive(Clone, Debug)]
354pub struct RenderConfig {
355    /// Image width in pixels (default: 64)
356    pub width: u32,
357    /// Image height in pixels (default: 64)
358    pub height: u32,
359    /// Zoom factor affecting field of view (default: 1.0)
360    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
361    pub zoom: f32,
362    /// Near clipping plane in meters (default: 0.01)
363    pub near_plane: f32,
364    /// Far clipping plane in meters (default: 10.0)
365    pub far_plane: f32,
366    /// Lighting configuration
367    pub lighting: LightingConfig,
368}
369
370/// Lighting configuration for rendering.
371///
372/// Controls ambient light and point lights in the scene.
373#[derive(Clone, Debug)]
374pub struct LightingConfig {
375    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
376    pub ambient_brightness: f32,
377    /// Key light intensity in lumens (default: 1500.0)
378    pub key_light_intensity: f32,
379    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
380    pub key_light_position: [f32; 3],
381    /// Fill light intensity in lumens (default: 500.0)
382    pub fill_light_intensity: f32,
383    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
384    pub fill_light_position: [f32; 3],
385    /// Enable shadows (default: false for performance)
386    pub shadows_enabled: bool,
387}
388
389impl Default for LightingConfig {
390    fn default() -> Self {
391        Self {
392            ambient_brightness: 0.3,
393            key_light_intensity: 1500.0,
394            key_light_position: [4.0, 8.0, 4.0],
395            fill_light_intensity: 500.0,
396            fill_light_position: [-4.0, 2.0, -4.0],
397            shadows_enabled: false,
398        }
399    }
400}
401
402impl LightingConfig {
403    /// Bright lighting for clear visibility
404    pub fn bright() -> Self {
405        Self {
406            ambient_brightness: 0.5,
407            key_light_intensity: 2000.0,
408            key_light_position: [4.0, 8.0, 4.0],
409            fill_light_intensity: 800.0,
410            fill_light_position: [-4.0, 2.0, -4.0],
411            shadows_enabled: false,
412        }
413    }
414
415    /// Soft lighting with minimal shadows
416    pub fn soft() -> Self {
417        Self {
418            ambient_brightness: 0.4,
419            key_light_intensity: 1000.0,
420            key_light_position: [3.0, 6.0, 3.0],
421            fill_light_intensity: 600.0,
422            fill_light_position: [-3.0, 3.0, -3.0],
423            shadows_enabled: false,
424        }
425    }
426
427    /// Unlit mode - ambient only, no point lights
428    pub fn unlit() -> Self {
429        Self {
430            ambient_brightness: 1.0,
431            key_light_intensity: 0.0,
432            key_light_position: [0.0, 0.0, 0.0],
433            fill_light_intensity: 0.0,
434            fill_light_position: [0.0, 0.0, 0.0],
435            shadows_enabled: false,
436        }
437    }
438}
439
440impl Default for RenderConfig {
441    fn default() -> Self {
442        Self::tbp_default()
443    }
444}
445
446impl RenderConfig {
447    /// TBP-compatible 64x64 RGBD sensor configuration.
448    ///
449    /// This matches the default resolution used in TBP's habitat sensor.
450    pub fn tbp_default() -> Self {
451        Self {
452            width: 64,
453            height: 64,
454            zoom: 1.0,
455            near_plane: 0.01,
456            far_plane: 10.0,
457            lighting: LightingConfig::default(),
458        }
459    }
460
461    /// Higher resolution configuration for debugging and visualization.
462    pub fn preview() -> Self {
463        Self {
464            width: 256,
465            height: 256,
466            zoom: 1.0,
467            near_plane: 0.01,
468            far_plane: 10.0,
469            lighting: LightingConfig::default(),
470        }
471    }
472
473    /// High resolution configuration for detailed captures.
474    pub fn high_res() -> Self {
475        Self {
476            width: 512,
477            height: 512,
478            zoom: 1.0,
479            near_plane: 0.01,
480            far_plane: 10.0,
481            lighting: LightingConfig::default(),
482        }
483    }
484
485    /// Calculate vertical field of view in radians based on zoom.
486    ///
487    /// Base FOV is 60 degrees, adjusted by zoom factor.
488    pub fn fov_radians(&self) -> f32 {
489        let base_fov_deg = 60.0_f32;
490        (base_fov_deg / self.zoom).to_radians()
491    }
492
493    /// Compute camera intrinsics for use with neocortx.
494    ///
495    /// Returns focal length and principal point based on resolution and FOV.
496    pub fn intrinsics(&self) -> CameraIntrinsics {
497        let fov = self.fov_radians();
498        // focal_length = (height/2) / tan(fov/2)
499        let fy = (self.height as f32 / 2.0) / (fov / 2.0).tan();
500        let fx = fy; // Assuming square pixels
501
502        CameraIntrinsics {
503            focal_length: [fx, fy],
504            principal_point: [self.width as f32 / 2.0, self.height as f32 / 2.0],
505            image_size: [self.width, self.height],
506        }
507    }
508}
509
510/// Camera intrinsic parameters for 3D reconstruction.
511///
512/// Compatible with neocortx's VisionIntrinsics format.
513#[derive(Clone, Debug, PartialEq)]
514pub struct CameraIntrinsics {
515    /// Focal length in pixels (fx, fy)
516    pub focal_length: [f32; 2],
517    /// Principal point (cx, cy) - typically image center
518    pub principal_point: [f32; 2],
519    /// Image dimensions (width, height)
520    pub image_size: [u32; 2],
521}
522
523impl CameraIntrinsics {
524    /// Project a 3D point to 2D pixel coordinates.
525    pub fn project(&self, point: Vec3) -> Option<[f32; 2]> {
526        if point.z <= 0.0 {
527            return None;
528        }
529        let x = (point.x / point.z) * self.focal_length[0] + self.principal_point[0];
530        let y = (point.y / point.z) * self.focal_length[1] + self.principal_point[1];
531        Some([x, y])
532    }
533
534    /// Unproject a 2D pixel to a 3D ray direction.
535    pub fn unproject(&self, pixel: [f32; 2], depth: f32) -> Vec3 {
536        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
537        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
538        Vec3::new(x, y, depth)
539    }
540}
541
542/// Output from headless rendering containing RGBA and depth data.
543#[derive(Clone, Debug)]
544pub struct RenderOutput {
545    /// RGBA pixel data in row-major order (width * height * 4 bytes)
546    pub rgba: Vec<u8>,
547    /// Depth values in meters, row-major order (width * height floats)
548    /// Values are linear depth from camera, not normalized.
549    pub depth: Vec<f32>,
550    /// Image width in pixels
551    pub width: u32,
552    /// Image height in pixels
553    pub height: u32,
554    /// Camera intrinsics used for this render
555    pub intrinsics: CameraIntrinsics,
556    /// Camera transform (world position and orientation)
557    pub camera_transform: Transform,
558    /// Object rotation applied during render
559    pub object_rotation: ObjectRotation,
560}
561
562impl RenderOutput {
563    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
564    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
565        if x >= self.width || y >= self.height {
566            return None;
567        }
568        let idx = ((y * self.width + x) * 4) as usize;
569        Some([
570            self.rgba[idx],
571            self.rgba[idx + 1],
572            self.rgba[idx + 2],
573            self.rgba[idx + 3],
574        ])
575    }
576
577    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
578    pub fn get_depth(&self, x: u32, y: u32) -> Option<f32> {
579        if x >= self.width || y >= self.height {
580            return None;
581        }
582        let idx = (y * self.width + x) as usize;
583        Some(self.depth[idx])
584    }
585
586    /// Get RGB pixel (without alpha) at (x, y).
587    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
588        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
589    }
590
591    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
592    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
593        let mut image = Vec::with_capacity(self.height as usize);
594        for y in 0..self.height {
595            let mut row = Vec::with_capacity(self.width as usize);
596            for x in 0..self.width {
597                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
598            }
599            image.push(row);
600        }
601        image
602    }
603
604    /// Convert depth to neocortx-compatible format: Vec<Vec<f32>>
605    pub fn to_depth_image(&self) -> Vec<Vec<f32>> {
606        let mut image = Vec::with_capacity(self.height as usize);
607        for y in 0..self.height {
608            let mut row = Vec::with_capacity(self.width as usize);
609            for x in 0..self.width {
610                row.push(self.get_depth(x, y).unwrap_or(0.0));
611            }
612            image.push(row);
613        }
614        image
615    }
616}
617
618/// Errors that can occur during rendering.
619#[derive(Debug, Clone)]
620pub enum RenderError {
621    /// Object mesh file not found
622    MeshNotFound(String),
623    /// Object texture file not found
624    TextureNotFound(String),
625    /// Bevy rendering failed
626    RenderFailed(String),
627    /// Invalid configuration
628    InvalidConfig(String),
629}
630
631impl std::fmt::Display for RenderError {
632    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
633        match self {
634            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
635            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
636            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
637            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
638        }
639    }
640}
641
642impl std::error::Error for RenderError {}
643
644/// Render a YCB object to an in-memory buffer.
645///
646/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
647/// renders a single frame, extracts the RGBA and depth data, and shuts down.
648///
649/// # Arguments
650/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
651/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
652/// * `object_rotation` - Rotation to apply to the object
653/// * `config` - Render configuration (resolution, depth range, etc.)
654///
655/// # Example
656/// ```ignore
657/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
658/// use std::path::Path;
659///
660/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
661/// let output = render_to_buffer(
662///     Path::new("/tmp/ycb/003_cracker_box"),
663///     &viewpoints[0],
664///     &ObjectRotation::identity(),
665///     &RenderConfig::tbp_default(),
666/// )?;
667/// ```
668pub fn render_to_buffer(
669    object_dir: &Path,
670    camera_transform: &Transform,
671    object_rotation: &ObjectRotation,
672    config: &RenderConfig,
673) -> Result<RenderOutput, RenderError> {
674    // Use the actual Bevy headless renderer
675    render::render_headless(object_dir, camera_transform, object_rotation, config)
676}
677
678/// Render all viewpoints and rotations for a YCB object.
679///
680/// Convenience function that renders all combinations of viewpoints and rotations.
681///
682/// # Arguments
683/// * `object_dir` - Path to YCB object directory
684/// * `viewpoint_config` - Viewpoint configuration (camera positions)
685/// * `rotations` - Object rotations to render
686/// * `render_config` - Render configuration
687///
688/// # Returns
689/// Vector of RenderOutput, one per viewpoint × rotation combination.
690pub fn render_all_viewpoints(
691    object_dir: &Path,
692    viewpoint_config: &ViewpointConfig,
693    rotations: &[ObjectRotation],
694    render_config: &RenderConfig,
695) -> Result<Vec<RenderOutput>, RenderError> {
696    let viewpoints = generate_viewpoints(viewpoint_config);
697    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
698
699    for rotation in rotations {
700        for viewpoint in &viewpoints {
701            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
702            outputs.push(output);
703        }
704    }
705
706    Ok(outputs)
707}
708
709/// Render directly to files (for subprocess mode).
710///
711/// This function is designed for subprocess rendering where the process will exit
712/// after rendering. It saves RGBA and depth data directly to the specified files
713/// before the process terminates.
714///
715/// # Arguments
716/// * `object_dir` - Path to YCB object directory
717/// * `camera_transform` - Camera position and orientation
718/// * `object_rotation` - Rotation to apply to the object
719/// * `config` - Render configuration
720/// * `rgba_path` - Output path for RGBA PNG
721/// * `depth_path` - Output path for depth data (raw f32 bytes)
722///
723/// # Note
724/// This function may call `std::process::exit(0)` and not return.
725pub fn render_to_files(
726    object_dir: &Path,
727    camera_transform: &Transform,
728    object_rotation: &ObjectRotation,
729    config: &RenderConfig,
730    rgba_path: &Path,
731    depth_path: &Path,
732) -> Result<(), RenderError> {
733    render::render_to_files(
734        object_dir,
735        camera_transform,
736        object_rotation,
737        config,
738        rgba_path,
739        depth_path,
740    )
741}
742
743// Re-export bevy types that consumers will need
744pub use bevy::prelude::{Quat, Transform, Vec3};
745
746#[cfg(test)]
747mod tests {
748    use super::*;
749
750    #[test]
751    fn test_object_rotation_identity() {
752        let rot = ObjectRotation::identity();
753        assert_eq!(rot.pitch, 0.0);
754        assert_eq!(rot.yaw, 0.0);
755        assert_eq!(rot.roll, 0.0);
756    }
757
758    #[test]
759    fn test_object_rotation_from_array() {
760        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
761        assert_eq!(rot.pitch, 10.0);
762        assert_eq!(rot.yaw, 20.0);
763        assert_eq!(rot.roll, 30.0);
764    }
765
766    #[test]
767    fn test_tbp_benchmark_rotations() {
768        let rotations = ObjectRotation::tbp_benchmark_rotations();
769        assert_eq!(rotations.len(), 3);
770        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
771        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
772        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
773    }
774
775    #[test]
776    fn test_tbp_known_orientations_count() {
777        let orientations = ObjectRotation::tbp_known_orientations();
778        assert_eq!(orientations.len(), 14);
779    }
780
781    #[test]
782    fn test_rotation_to_quat() {
783        let rot = ObjectRotation::identity();
784        let quat = rot.to_quat();
785        // Identity quaternion should be approximately (1, 0, 0, 0)
786        assert!((quat.w - 1.0).abs() < 0.001);
787        assert!(quat.x.abs() < 0.001);
788        assert!(quat.y.abs() < 0.001);
789        assert!(quat.z.abs() < 0.001);
790    }
791
792    #[test]
793    fn test_rotation_90_yaw() {
794        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
795        let quat = rot.to_quat();
796        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
797        assert!((quat.w - 0.707).abs() < 0.01);
798        assert!((quat.y - 0.707).abs() < 0.01);
799    }
800
801    #[test]
802    fn test_viewpoint_config_default() {
803        let config = ViewpointConfig::default();
804        assert_eq!(config.radius, 0.5);
805        assert_eq!(config.yaw_count, 8);
806        assert_eq!(config.pitch_angles_deg.len(), 3);
807    }
808
809    #[test]
810    fn test_viewpoint_count() {
811        let config = ViewpointConfig::default();
812        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
813    }
814
815    #[test]
816    fn test_generate_viewpoints_count() {
817        let config = ViewpointConfig::default();
818        let viewpoints = generate_viewpoints(&config);
819        assert_eq!(viewpoints.len(), 24);
820    }
821
822    #[test]
823    fn test_viewpoints_spherical_radius() {
824        let config = ViewpointConfig::default();
825        let viewpoints = generate_viewpoints(&config);
826
827        for (i, transform) in viewpoints.iter().enumerate() {
828            let actual_radius = transform.translation.length();
829            assert!(
830                (actual_radius - config.radius).abs() < 0.001,
831                "Viewpoint {} has incorrect radius: {} (expected {})",
832                i,
833                actual_radius,
834                config.radius
835            );
836        }
837    }
838
839    #[test]
840    fn test_viewpoints_looking_at_origin() {
841        let config = ViewpointConfig::default();
842        let viewpoints = generate_viewpoints(&config);
843
844        for (i, transform) in viewpoints.iter().enumerate() {
845            let forward = transform.forward();
846            let to_origin = (Vec3::ZERO - transform.translation).normalize();
847            let dot = forward.dot(to_origin);
848            assert!(
849                dot > 0.99,
850                "Viewpoint {} not looking at origin, dot product: {}",
851                i,
852                dot
853            );
854        }
855    }
856
857    #[test]
858    fn test_sensor_config_default() {
859        let config = SensorConfig::default();
860        assert_eq!(config.object_rotations.len(), 1);
861        assert_eq!(config.total_captures(), 24);
862    }
863
864    #[test]
865    fn test_sensor_config_tbp_benchmark() {
866        let config = SensorConfig::tbp_benchmark();
867        assert_eq!(config.object_rotations.len(), 3);
868        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
869    }
870
871    #[test]
872    fn test_sensor_config_tbp_full() {
873        let config = SensorConfig::tbp_full_training();
874        assert_eq!(config.object_rotations.len(), 14);
875        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
876    }
877
878    #[test]
879    fn test_ycb_representative_objects() {
880        // Verify representative objects are defined
881        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
882        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
883    }
884
885    #[test]
886    fn test_ycb_ten_objects() {
887        // Verify ten objects subset is defined
888        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
889    }
890
891    #[test]
892    fn test_ycb_object_mesh_path() {
893        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
894        assert_eq!(
895            path.to_string_lossy(),
896            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
897        );
898    }
899
900    #[test]
901    fn test_ycb_object_texture_path() {
902        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
903        assert_eq!(
904            path.to_string_lossy(),
905            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
906        );
907    }
908
909    // =========================================================================
910    // Headless Rendering API Tests
911    // =========================================================================
912
913    #[test]
914    fn test_render_config_tbp_default() {
915        let config = RenderConfig::tbp_default();
916        assert_eq!(config.width, 64);
917        assert_eq!(config.height, 64);
918        assert_eq!(config.zoom, 1.0);
919        assert_eq!(config.near_plane, 0.01);
920        assert_eq!(config.far_plane, 10.0);
921    }
922
923    #[test]
924    fn test_render_config_preview() {
925        let config = RenderConfig::preview();
926        assert_eq!(config.width, 256);
927        assert_eq!(config.height, 256);
928    }
929
930    #[test]
931    fn test_render_config_default_is_tbp() {
932        let default = RenderConfig::default();
933        let tbp = RenderConfig::tbp_default();
934        assert_eq!(default.width, tbp.width);
935        assert_eq!(default.height, tbp.height);
936    }
937
938    #[test]
939    fn test_render_config_fov() {
940        let config = RenderConfig::tbp_default();
941        let fov = config.fov_radians();
942        // Base FOV is 60 degrees = ~1.047 radians
943        assert!((fov - 1.047).abs() < 0.01);
944
945        // Zoom in should reduce FOV
946        let zoomed = RenderConfig {
947            zoom: 2.0,
948            ..config
949        };
950        assert!(zoomed.fov_radians() < fov);
951    }
952
953    #[test]
954    fn test_render_config_intrinsics() {
955        let config = RenderConfig::tbp_default();
956        let intrinsics = config.intrinsics();
957
958        assert_eq!(intrinsics.image_size, [64, 64]);
959        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
960        // Focal length should be positive and reasonable
961        assert!(intrinsics.focal_length[0] > 0.0);
962        assert!(intrinsics.focal_length[1] > 0.0);
963        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
964        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
965    }
966
967    #[test]
968    fn test_camera_intrinsics_project() {
969        let intrinsics = CameraIntrinsics {
970            focal_length: [100.0, 100.0],
971            principal_point: [32.0, 32.0],
972            image_size: [64, 64],
973        };
974
975        // Point at origin of camera frame projects to principal point
976        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
977        assert!(center.is_some());
978        let [x, y] = center.unwrap();
979        assert!((x - 32.0).abs() < 0.001);
980        assert!((y - 32.0).abs() < 0.001);
981
982        // Point behind camera returns None
983        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
984        assert!(behind.is_none());
985    }
986
987    #[test]
988    fn test_camera_intrinsics_unproject() {
989        let intrinsics = CameraIntrinsics {
990            focal_length: [100.0, 100.0],
991            principal_point: [32.0, 32.0],
992            image_size: [64, 64],
993        };
994
995        // Unproject principal point at depth 1.0
996        let point = intrinsics.unproject([32.0, 32.0], 1.0);
997        assert!((point.x).abs() < 0.001);
998        assert!((point.y).abs() < 0.001);
999        assert!((point.z - 1.0).abs() < 0.001);
1000    }
1001
1002    #[test]
1003    fn test_render_output_get_rgba() {
1004        let output = RenderOutput {
1005            rgba: vec![
1006                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1007            ],
1008            depth: vec![1.0, 2.0, 3.0, 4.0],
1009            width: 2,
1010            height: 2,
1011            intrinsics: RenderConfig::tbp_default().intrinsics(),
1012            camera_transform: Transform::IDENTITY,
1013            object_rotation: ObjectRotation::identity(),
1014        };
1015
1016        // Top-left: red
1017        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1018        // Top-right: green
1019        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1020        // Bottom-left: blue
1021        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1022        // Bottom-right: white
1023        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1024        // Out of bounds
1025        assert_eq!(output.get_rgba(2, 0), None);
1026    }
1027
1028    #[test]
1029    fn test_render_output_get_depth() {
1030        let output = RenderOutput {
1031            rgba: vec![0u8; 16],
1032            depth: vec![1.0, 2.0, 3.0, 4.0],
1033            width: 2,
1034            height: 2,
1035            intrinsics: RenderConfig::tbp_default().intrinsics(),
1036            camera_transform: Transform::IDENTITY,
1037            object_rotation: ObjectRotation::identity(),
1038        };
1039
1040        assert_eq!(output.get_depth(0, 0), Some(1.0));
1041        assert_eq!(output.get_depth(1, 0), Some(2.0));
1042        assert_eq!(output.get_depth(0, 1), Some(3.0));
1043        assert_eq!(output.get_depth(1, 1), Some(4.0));
1044        assert_eq!(output.get_depth(2, 0), None);
1045    }
1046
1047    #[test]
1048    fn test_render_output_to_rgb_image() {
1049        let output = RenderOutput {
1050            rgba: vec![
1051                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1052            ],
1053            depth: vec![1.0, 2.0, 3.0, 4.0],
1054            width: 2,
1055            height: 2,
1056            intrinsics: RenderConfig::tbp_default().intrinsics(),
1057            camera_transform: Transform::IDENTITY,
1058            object_rotation: ObjectRotation::identity(),
1059        };
1060
1061        let image = output.to_rgb_image();
1062        assert_eq!(image.len(), 2); // 2 rows
1063        assert_eq!(image[0].len(), 2); // 2 columns
1064        assert_eq!(image[0][0], [255, 0, 0]); // Red
1065        assert_eq!(image[0][1], [0, 255, 0]); // Green
1066        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1067        assert_eq!(image[1][1], [255, 255, 255]); // White
1068    }
1069
1070    #[test]
1071    fn test_render_output_to_depth_image() {
1072        let output = RenderOutput {
1073            rgba: vec![0u8; 16],
1074            depth: vec![1.0, 2.0, 3.0, 4.0],
1075            width: 2,
1076            height: 2,
1077            intrinsics: RenderConfig::tbp_default().intrinsics(),
1078            camera_transform: Transform::IDENTITY,
1079            object_rotation: ObjectRotation::identity(),
1080        };
1081
1082        let depth_image = output.to_depth_image();
1083        assert_eq!(depth_image.len(), 2);
1084        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1085        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1086    }
1087
1088    #[test]
1089    fn test_render_error_display() {
1090        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1091        assert!(err.to_string().contains("Mesh not found"));
1092        assert!(err.to_string().contains("/path/to/mesh.obj"));
1093    }
1094
1095    // =========================================================================
1096    // Edge Case Tests
1097    // =========================================================================
1098
1099    #[test]
1100    fn test_object_rotation_extreme_angles() {
1101        // Test angles beyond 360 degrees
1102        let rot = ObjectRotation::new(450.0, -720.0, 1080.0);
1103        let quat = rot.to_quat();
1104        // Quaternion should still be valid (normalized)
1105        assert!((quat.length() - 1.0).abs() < 0.001);
1106    }
1107
1108    #[test]
1109    fn test_object_rotation_to_transform() {
1110        let rot = ObjectRotation::new(45.0, 90.0, 0.0);
1111        let transform = rot.to_transform();
1112        // Transform should have no translation
1113        assert_eq!(transform.translation, Vec3::ZERO);
1114        // Should have rotation
1115        assert!(transform.rotation != Quat::IDENTITY);
1116    }
1117
1118    #[test]
1119    fn test_viewpoint_config_single_viewpoint() {
1120        let config = ViewpointConfig {
1121            radius: 1.0,
1122            yaw_count: 1,
1123            pitch_angles_deg: vec![0.0],
1124        };
1125        assert_eq!(config.viewpoint_count(), 1);
1126        let viewpoints = generate_viewpoints(&config);
1127        assert_eq!(viewpoints.len(), 1);
1128        // Single viewpoint at yaw=0, pitch=0 should be at (0, 0, radius)
1129        let pos = viewpoints[0].translation;
1130        assert!((pos.x).abs() < 0.001);
1131        assert!((pos.y).abs() < 0.001);
1132        assert!((pos.z - 1.0).abs() < 0.001);
1133    }
1134
1135    #[test]
1136    fn test_viewpoint_radius_scaling() {
1137        let config1 = ViewpointConfig {
1138            radius: 0.5,
1139            yaw_count: 4,
1140            pitch_angles_deg: vec![0.0],
1141        };
1142        let config2 = ViewpointConfig {
1143            radius: 2.0,
1144            yaw_count: 4,
1145            pitch_angles_deg: vec![0.0],
1146        };
1147
1148        let v1 = generate_viewpoints(&config1);
1149        let v2 = generate_viewpoints(&config2);
1150
1151        // Viewpoints should scale proportionally
1152        for (vp1, vp2) in v1.iter().zip(v2.iter()) {
1153            let ratio = vp2.translation.length() / vp1.translation.length();
1154            assert!((ratio - 4.0).abs() < 0.01); // 2.0 / 0.5 = 4.0
1155        }
1156    }
1157
1158    #[test]
1159    fn test_camera_intrinsics_project_at_z_zero() {
1160        let intrinsics = CameraIntrinsics {
1161            focal_length: [100.0, 100.0],
1162            principal_point: [32.0, 32.0],
1163            image_size: [64, 64],
1164        };
1165
1166        // Point at z=0 should return None (division by zero protection)
1167        let result = intrinsics.project(Vec3::new(1.0, 1.0, 0.0));
1168        assert!(result.is_none());
1169    }
1170
1171    #[test]
1172    fn test_camera_intrinsics_roundtrip() {
1173        let intrinsics = CameraIntrinsics {
1174            focal_length: [100.0, 100.0],
1175            principal_point: [32.0, 32.0],
1176            image_size: [64, 64],
1177        };
1178
1179        // Project a 3D point
1180        let original = Vec3::new(0.5, -0.3, 2.0);
1181        let projected = intrinsics.project(original).unwrap();
1182
1183        // Unproject back with the same depth
1184        let unprojected = intrinsics.unproject(projected, original.z);
1185
1186        // Should get back approximately the same point
1187        assert!((unprojected.x - original.x).abs() < 0.001);
1188        assert!((unprojected.y - original.y).abs() < 0.001);
1189        assert!((unprojected.z - original.z).abs() < 0.001);
1190    }
1191
1192    #[test]
1193    fn test_render_output_empty() {
1194        let output = RenderOutput {
1195            rgba: vec![],
1196            depth: vec![],
1197            width: 0,
1198            height: 0,
1199            intrinsics: RenderConfig::tbp_default().intrinsics(),
1200            camera_transform: Transform::IDENTITY,
1201            object_rotation: ObjectRotation::identity(),
1202        };
1203
1204        // Should handle empty gracefully
1205        assert_eq!(output.get_rgba(0, 0), None);
1206        assert_eq!(output.get_depth(0, 0), None);
1207        assert!(output.to_rgb_image().is_empty());
1208        assert!(output.to_depth_image().is_empty());
1209    }
1210
1211    #[test]
1212    fn test_render_output_1x1() {
1213        let output = RenderOutput {
1214            rgba: vec![128, 64, 32, 255],
1215            depth: vec![0.5],
1216            width: 1,
1217            height: 1,
1218            intrinsics: RenderConfig::tbp_default().intrinsics(),
1219            camera_transform: Transform::IDENTITY,
1220            object_rotation: ObjectRotation::identity(),
1221        };
1222
1223        assert_eq!(output.get_rgba(0, 0), Some([128, 64, 32, 255]));
1224        assert_eq!(output.get_depth(0, 0), Some(0.5));
1225        assert_eq!(output.get_rgb(0, 0), Some([128, 64, 32]));
1226
1227        let rgb_img = output.to_rgb_image();
1228        assert_eq!(rgb_img.len(), 1);
1229        assert_eq!(rgb_img[0].len(), 1);
1230        assert_eq!(rgb_img[0][0], [128, 64, 32]);
1231    }
1232
1233    #[test]
1234    fn test_render_config_high_res() {
1235        let config = RenderConfig::high_res();
1236        assert_eq!(config.width, 512);
1237        assert_eq!(config.height, 512);
1238
1239        let intrinsics = config.intrinsics();
1240        assert_eq!(intrinsics.image_size, [512, 512]);
1241        assert_eq!(intrinsics.principal_point, [256.0, 256.0]);
1242    }
1243
1244    #[test]
1245    fn test_render_config_zoom_affects_fov() {
1246        let base = RenderConfig::tbp_default();
1247        let zoomed = RenderConfig {
1248            zoom: 2.0,
1249            ..base.clone()
1250        };
1251
1252        // Higher zoom = lower FOV
1253        assert!(zoomed.fov_radians() < base.fov_radians());
1254        // Specifically, 2x zoom = half FOV
1255        assert!((zoomed.fov_radians() - base.fov_radians() / 2.0).abs() < 0.01);
1256    }
1257
1258    #[test]
1259    fn test_render_config_zoom_affects_intrinsics() {
1260        let base = RenderConfig::tbp_default();
1261        let zoomed = RenderConfig {
1262            zoom: 2.0,
1263            ..base.clone()
1264        };
1265
1266        // Higher zoom = higher focal length
1267        let base_intrinsics = base.intrinsics();
1268        let zoomed_intrinsics = zoomed.intrinsics();
1269
1270        assert!(zoomed_intrinsics.focal_length[0] > base_intrinsics.focal_length[0]);
1271    }
1272
1273    #[test]
1274    fn test_lighting_config_variants() {
1275        let default = LightingConfig::default();
1276        let bright = LightingConfig::bright();
1277        let soft = LightingConfig::soft();
1278        let unlit = LightingConfig::unlit();
1279
1280        // Bright should have higher intensity than default
1281        assert!(bright.key_light_intensity > default.key_light_intensity);
1282
1283        // Unlit should have no point lights
1284        assert_eq!(unlit.key_light_intensity, 0.0);
1285        assert_eq!(unlit.fill_light_intensity, 0.0);
1286        assert_eq!(unlit.ambient_brightness, 1.0);
1287
1288        // Soft should have lower intensity
1289        assert!(soft.key_light_intensity < default.key_light_intensity);
1290    }
1291
1292    #[test]
1293    fn test_all_render_error_variants() {
1294        let errors = vec![
1295            RenderError::MeshNotFound("mesh.obj".to_string()),
1296            RenderError::TextureNotFound("texture.png".to_string()),
1297            RenderError::RenderFailed("GPU error".to_string()),
1298            RenderError::InvalidConfig("bad config".to_string()),
1299        ];
1300
1301        for err in errors {
1302            // All variants should have Display impl
1303            let msg = err.to_string();
1304            assert!(!msg.is_empty());
1305        }
1306    }
1307
1308    #[test]
1309    fn test_tbp_known_orientations_unique() {
1310        let orientations = ObjectRotation::tbp_known_orientations();
1311
1312        // All 14 orientations should produce unique quaternions
1313        let quats: Vec<Quat> = orientations.iter().map(|r| r.to_quat()).collect();
1314
1315        for (i, q1) in quats.iter().enumerate() {
1316            for (j, q2) in quats.iter().enumerate() {
1317                if i != j {
1318                    // Quaternions should be different (accounting for q == -q equivalence)
1319                    let dot = q1.dot(*q2).abs();
1320                    assert!(
1321                        dot < 0.999,
1322                        "Orientations {} and {} produce same quaternion",
1323                        i,
1324                        j
1325                    );
1326                }
1327            }
1328        }
1329    }
1330}