bevy_sensor/
lib.rs

1//! bevy-sensor: Multi-view rendering for YCB object dataset
2//!
3//! This library provides Bevy-based rendering of 3D objects from multiple viewpoints,
4//! designed to match TBP (Thousand Brains Project) habitat sensor conventions for
5//! use in neocortx sensorimotor learning experiments.
6//!
7//! # Headless Rendering (NEW)
8//!
9//! Render directly to memory buffers for use in sensorimotor learning:
10//!
11//! ```ignore
12//! use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
13//! use std::path::Path;
14//!
15//! let config = RenderConfig::tbp_default(); // 64x64, RGBD
16//! let viewpoint = bevy_sensor::generate_viewpoints(&ViewpointConfig::default())[0];
17//! let rotation = ObjectRotation::identity();
18//!
19//! let output = render_to_buffer(
20//!     Path::new("/tmp/ycb/003_cracker_box"),
21//!     &viewpoint,
22//!     &rotation,
23//!     &config,
24//! )?;
25//!
26//! // output.rgba: Vec<u8> - RGBA pixels (64*64*4 bytes)
27//! // output.depth: Vec<f32> - Depth values (64*64 floats)
28//! ```
29//!
30//! # File-based Capture (Legacy)
31//!
32//! ```ignore
33//! use bevy_sensor::{SensorConfig, ViewpointConfig, ObjectRotation};
34//!
35//! let config = SensorConfig {
36//!     viewpoints: ViewpointConfig::default(),
37//!     object_rotations: ObjectRotation::tbp_benchmark_rotations(),
38//!     ..Default::default()
39//! };
40//! ```
41//!
42//! # YCB Dataset
43//!
44//! Download YCB models programmatically:
45//!
46//! ```ignore
47//! use bevy_sensor::ycb::{download_models, Subset};
48//!
49//! // Download representative subset (3 objects)
50//! download_models("/tmp/ycb", Subset::Representative).await?;
51//! ```
52
53use bevy::prelude::*;
54use std::f32::consts::PI;
55use std::path::Path;
56
57// Headless rendering implementation (currently returns placeholder data)
58// Full GPU rendering requires a display - see render module for details
59mod render;
60
61// Re-export ycbust types for convenience
62pub use ycbust::{self, DownloadOptions, Subset as YcbSubset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
63
64/// YCB dataset utilities
65pub mod ycb {
66    pub use ycbust::{download_ycb, DownloadOptions, Subset, REPRESENTATIVE_OBJECTS, TEN_OBJECTS};
67
68    use std::path::Path;
69
70    /// Download YCB models to the specified directory.
71    ///
72    /// # Arguments
73    /// * `output_dir` - Directory to download models to
74    /// * `subset` - Which subset of objects to download
75    ///
76    /// # Example
77    /// ```ignore
78    /// use bevy_sensor::ycb::{download_models, Subset};
79    ///
80    /// download_models("/tmp/ycb", Subset::Representative).await?;
81    /// ```
82    pub async fn download_models<P: AsRef<Path>>(
83        output_dir: P,
84        subset: Subset,
85    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
86        let options = DownloadOptions {
87            overwrite: false,
88            full: false,
89            show_progress: true,
90            delete_archives: true,
91        };
92        download_ycb(subset, output_dir.as_ref(), options).await?;
93        Ok(())
94    }
95
96    /// Download YCB models with custom options.
97    pub async fn download_models_with_options<P: AsRef<Path>>(
98        output_dir: P,
99        subset: Subset,
100        options: DownloadOptions,
101    ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
102        download_ycb(subset, output_dir.as_ref(), options).await?;
103        Ok(())
104    }
105
106    /// Check if YCB models exist at the given path
107    pub fn models_exist<P: AsRef<Path>>(output_dir: P) -> bool {
108        let path = output_dir.as_ref();
109        // Check for at least one representative object
110        path.join("003_cracker_box/google_16k/textured.obj")
111            .exists()
112    }
113
114    /// Get the path to a specific YCB object's OBJ file
115    pub fn object_mesh_path<P: AsRef<Path>>(output_dir: P, object_id: &str) -> std::path::PathBuf {
116        output_dir
117            .as_ref()
118            .join(object_id)
119            .join("google_16k")
120            .join("textured.obj")
121    }
122
123    /// Get the path to a specific YCB object's texture file
124    pub fn object_texture_path<P: AsRef<Path>>(
125        output_dir: P,
126        object_id: &str,
127    ) -> std::path::PathBuf {
128        output_dir
129            .as_ref()
130            .join(object_id)
131            .join("google_16k")
132            .join("texture_map.png")
133    }
134}
135
136/// Object rotation in Euler angles (degrees), matching TBP benchmark format.
137/// Format: [pitch, yaw, roll] or [x, y, z] rotation.
138#[derive(Clone, Debug, PartialEq)]
139pub struct ObjectRotation {
140    /// Rotation around X-axis (pitch) in degrees
141    pub pitch: f32,
142    /// Rotation around Y-axis (yaw) in degrees
143    pub yaw: f32,
144    /// Rotation around Z-axis (roll) in degrees
145    pub roll: f32,
146}
147
148impl ObjectRotation {
149    /// Create a new rotation from Euler angles in degrees
150    pub fn new(pitch: f32, yaw: f32, roll: f32) -> Self {
151        Self { pitch, yaw, roll }
152    }
153
154    /// Create from TBP-style array [pitch, yaw, roll] in degrees
155    pub fn from_array(arr: [f32; 3]) -> Self {
156        Self {
157            pitch: arr[0],
158            yaw: arr[1],
159            roll: arr[2],
160        }
161    }
162
163    /// Identity rotation (no rotation)
164    pub fn identity() -> Self {
165        Self::new(0.0, 0.0, 0.0)
166    }
167
168    /// TBP benchmark rotations: [0,0,0], [0,90,0], [0,180,0]
169    /// Used in shorter YCB experiments to reduce computational load.
170    pub fn tbp_benchmark_rotations() -> Vec<Self> {
171        vec![
172            Self::from_array([0.0, 0.0, 0.0]),
173            Self::from_array([0.0, 90.0, 0.0]),
174            Self::from_array([0.0, 180.0, 0.0]),
175        ]
176    }
177
178    /// TBP 14 known orientations (cube faces and corners)
179    /// These are the orientations objects are learned in during training.
180    pub fn tbp_known_orientations() -> Vec<Self> {
181        vec![
182            // 6 cube faces (90° rotations around each axis)
183            Self::from_array([0.0, 0.0, 0.0]),   // Front
184            Self::from_array([0.0, 90.0, 0.0]),  // Right
185            Self::from_array([0.0, 180.0, 0.0]), // Back
186            Self::from_array([0.0, 270.0, 0.0]), // Left
187            Self::from_array([90.0, 0.0, 0.0]),  // Top
188            Self::from_array([-90.0, 0.0, 0.0]), // Bottom
189            // 8 cube corners (45° rotations)
190            Self::from_array([45.0, 45.0, 0.0]),
191            Self::from_array([45.0, 135.0, 0.0]),
192            Self::from_array([45.0, 225.0, 0.0]),
193            Self::from_array([45.0, 315.0, 0.0]),
194            Self::from_array([-45.0, 45.0, 0.0]),
195            Self::from_array([-45.0, 135.0, 0.0]),
196            Self::from_array([-45.0, 225.0, 0.0]),
197            Self::from_array([-45.0, 315.0, 0.0]),
198        ]
199    }
200
201    /// Convert to Bevy Quat
202    pub fn to_quat(&self) -> Quat {
203        Quat::from_euler(
204            EulerRot::XYZ,
205            self.pitch.to_radians(),
206            self.yaw.to_radians(),
207            self.roll.to_radians(),
208        )
209    }
210
211    /// Convert to Bevy Transform (rotation only, no translation)
212    pub fn to_transform(&self) -> Transform {
213        Transform::from_rotation(self.to_quat())
214    }
215}
216
217impl Default for ObjectRotation {
218    fn default() -> Self {
219        Self::identity()
220    }
221}
222
223/// Configuration for viewpoint generation matching TBP habitat sensor behavior.
224/// Uses spherical coordinates to capture objects from multiple elevations.
225#[derive(Clone, Debug)]
226pub struct ViewpointConfig {
227    /// Distance from camera to object center (meters)
228    pub radius: f32,
229    /// Number of horizontal positions (yaw angles) around the object
230    pub yaw_count: usize,
231    /// Elevation angles in degrees (pitch). Positive = above, negative = below.
232    pub pitch_angles_deg: Vec<f32>,
233}
234
235impl Default for ViewpointConfig {
236    fn default() -> Self {
237        Self {
238            radius: 0.5,
239            yaw_count: 8,
240            // Three elevations: below (-30°), level (0°), above (+30°)
241            // This matches TBP's look_up/look_down capability
242            pitch_angles_deg: vec![-30.0, 0.0, 30.0],
243        }
244    }
245}
246
247impl ViewpointConfig {
248    /// Total number of viewpoints this config will generate
249    pub fn viewpoint_count(&self) -> usize {
250        self.yaw_count * self.pitch_angles_deg.len()
251    }
252}
253
254/// Full sensor configuration for capture sessions
255#[derive(Clone, Debug, Resource)]
256pub struct SensorConfig {
257    /// Viewpoint configuration (camera positions)
258    pub viewpoints: ViewpointConfig,
259    /// Object rotations to capture (each rotation generates a full viewpoint set)
260    pub object_rotations: Vec<ObjectRotation>,
261    /// Output directory for captures
262    pub output_dir: String,
263    /// Filename pattern (use {view} for view index, {rot} for rotation index)
264    pub filename_pattern: String,
265}
266
267impl Default for SensorConfig {
268    fn default() -> Self {
269        Self {
270            viewpoints: ViewpointConfig::default(),
271            object_rotations: vec![ObjectRotation::identity()],
272            output_dir: ".".to_string(),
273            filename_pattern: "capture_{rot}_{view}.png".to_string(),
274        }
275    }
276}
277
278impl SensorConfig {
279    /// Create config for TBP benchmark comparison (3 rotations × 24 viewpoints = 72 captures)
280    pub fn tbp_benchmark() -> Self {
281        Self {
282            viewpoints: ViewpointConfig::default(),
283            object_rotations: ObjectRotation::tbp_benchmark_rotations(),
284            output_dir: ".".to_string(),
285            filename_pattern: "capture_{rot}_{view}.png".to_string(),
286        }
287    }
288
289    /// Create config for full TBP training (14 rotations × 24 viewpoints = 336 captures)
290    pub fn tbp_full_training() -> Self {
291        Self {
292            viewpoints: ViewpointConfig::default(),
293            object_rotations: ObjectRotation::tbp_known_orientations(),
294            output_dir: ".".to_string(),
295            filename_pattern: "capture_{rot}_{view}.png".to_string(),
296        }
297    }
298
299    /// Total number of captures this config will generate
300    pub fn total_captures(&self) -> usize {
301        self.viewpoints.viewpoint_count() * self.object_rotations.len()
302    }
303}
304
305/// Generate camera viewpoints using spherical coordinates.
306///
307/// Spherical coordinate system (matching TBP habitat sensor conventions):
308/// - Yaw: horizontal rotation around Y-axis (0° to 360°)
309/// - Pitch: elevation angle from horizontal plane (-90° to +90°)
310/// - Radius: distance from origin (object center)
311pub fn generate_viewpoints(config: &ViewpointConfig) -> Vec<Transform> {
312    let mut views = Vec::with_capacity(config.viewpoint_count());
313
314    for pitch_deg in &config.pitch_angles_deg {
315        let pitch = pitch_deg.to_radians();
316
317        for i in 0..config.yaw_count {
318            let yaw = (i as f32) * 2.0 * PI / (config.yaw_count as f32);
319
320            // Spherical to Cartesian conversion (Y-up coordinate system)
321            // x = r * cos(pitch) * sin(yaw)
322            // y = r * sin(pitch)
323            // z = r * cos(pitch) * cos(yaw)
324            let x = config.radius * pitch.cos() * yaw.sin();
325            let y = config.radius * pitch.sin();
326            let z = config.radius * pitch.cos() * yaw.cos();
327
328            let transform = Transform::from_xyz(x, y, z).looking_at(Vec3::ZERO, Vec3::Y);
329            views.push(transform);
330        }
331    }
332    views
333}
334
335/// Marker component for the target object being captured
336#[derive(Component)]
337pub struct CaptureTarget;
338
339/// Marker component for the capture camera
340#[derive(Component)]
341pub struct CaptureCamera;
342
343// ============================================================================
344// Headless Rendering API (NEW)
345// ============================================================================
346
347/// Configuration for headless rendering.
348///
349/// Matches TBP habitat sensor defaults: 64x64 resolution with RGBD output.
350#[derive(Clone, Debug)]
351pub struct RenderConfig {
352    /// Image width in pixels (default: 64)
353    pub width: u32,
354    /// Image height in pixels (default: 64)
355    pub height: u32,
356    /// Zoom factor affecting field of view (default: 1.0)
357    /// Use >1 to zoom in (narrower FOV), <1 to zoom out (wider FOV)
358    pub zoom: f32,
359    /// Near clipping plane in meters (default: 0.01)
360    pub near_plane: f32,
361    /// Far clipping plane in meters (default: 10.0)
362    pub far_plane: f32,
363    /// Lighting configuration
364    pub lighting: LightingConfig,
365}
366
367/// Lighting configuration for rendering.
368///
369/// Controls ambient light and point lights in the scene.
370#[derive(Clone, Debug)]
371pub struct LightingConfig {
372    /// Ambient light brightness (0.0 - 1.0, default: 0.3)
373    pub ambient_brightness: f32,
374    /// Key light intensity in lumens (default: 1500.0)
375    pub key_light_intensity: f32,
376    /// Key light position [x, y, z] (default: [4.0, 8.0, 4.0])
377    pub key_light_position: [f32; 3],
378    /// Fill light intensity in lumens (default: 500.0)
379    pub fill_light_intensity: f32,
380    /// Fill light position [x, y, z] (default: [-4.0, 2.0, -4.0])
381    pub fill_light_position: [f32; 3],
382    /// Enable shadows (default: false for performance)
383    pub shadows_enabled: bool,
384}
385
386impl Default for LightingConfig {
387    fn default() -> Self {
388        Self {
389            ambient_brightness: 0.3,
390            key_light_intensity: 1500.0,
391            key_light_position: [4.0, 8.0, 4.0],
392            fill_light_intensity: 500.0,
393            fill_light_position: [-4.0, 2.0, -4.0],
394            shadows_enabled: false,
395        }
396    }
397}
398
399impl LightingConfig {
400    /// Bright lighting for clear visibility
401    pub fn bright() -> Self {
402        Self {
403            ambient_brightness: 0.5,
404            key_light_intensity: 2000.0,
405            key_light_position: [4.0, 8.0, 4.0],
406            fill_light_intensity: 800.0,
407            fill_light_position: [-4.0, 2.0, -4.0],
408            shadows_enabled: false,
409        }
410    }
411
412    /// Soft lighting with minimal shadows
413    pub fn soft() -> Self {
414        Self {
415            ambient_brightness: 0.4,
416            key_light_intensity: 1000.0,
417            key_light_position: [3.0, 6.0, 3.0],
418            fill_light_intensity: 600.0,
419            fill_light_position: [-3.0, 3.0, -3.0],
420            shadows_enabled: false,
421        }
422    }
423
424    /// Unlit mode - ambient only, no point lights
425    pub fn unlit() -> Self {
426        Self {
427            ambient_brightness: 1.0,
428            key_light_intensity: 0.0,
429            key_light_position: [0.0, 0.0, 0.0],
430            fill_light_intensity: 0.0,
431            fill_light_position: [0.0, 0.0, 0.0],
432            shadows_enabled: false,
433        }
434    }
435}
436
437impl Default for RenderConfig {
438    fn default() -> Self {
439        Self::tbp_default()
440    }
441}
442
443impl RenderConfig {
444    /// TBP-compatible 64x64 RGBD sensor configuration.
445    ///
446    /// This matches the default resolution used in TBP's habitat sensor.
447    pub fn tbp_default() -> Self {
448        Self {
449            width: 64,
450            height: 64,
451            zoom: 1.0,
452            near_plane: 0.01,
453            far_plane: 10.0,
454            lighting: LightingConfig::default(),
455        }
456    }
457
458    /// Higher resolution configuration for debugging and visualization.
459    pub fn preview() -> Self {
460        Self {
461            width: 256,
462            height: 256,
463            zoom: 1.0,
464            near_plane: 0.01,
465            far_plane: 10.0,
466            lighting: LightingConfig::default(),
467        }
468    }
469
470    /// High resolution configuration for detailed captures.
471    pub fn high_res() -> Self {
472        Self {
473            width: 512,
474            height: 512,
475            zoom: 1.0,
476            near_plane: 0.01,
477            far_plane: 10.0,
478            lighting: LightingConfig::default(),
479        }
480    }
481
482    /// Calculate vertical field of view in radians based on zoom.
483    ///
484    /// Base FOV is 60 degrees, adjusted by zoom factor.
485    pub fn fov_radians(&self) -> f32 {
486        let base_fov_deg = 60.0_f32;
487        (base_fov_deg / self.zoom).to_radians()
488    }
489
490    /// Compute camera intrinsics for use with neocortx.
491    ///
492    /// Returns focal length and principal point based on resolution and FOV.
493    pub fn intrinsics(&self) -> CameraIntrinsics {
494        let fov = self.fov_radians();
495        // focal_length = (height/2) / tan(fov/2)
496        let fy = (self.height as f32 / 2.0) / (fov / 2.0).tan();
497        let fx = fy; // Assuming square pixels
498
499        CameraIntrinsics {
500            focal_length: [fx, fy],
501            principal_point: [self.width as f32 / 2.0, self.height as f32 / 2.0],
502            image_size: [self.width, self.height],
503        }
504    }
505}
506
507/// Camera intrinsic parameters for 3D reconstruction.
508///
509/// Compatible with neocortx's VisionIntrinsics format.
510#[derive(Clone, Debug, PartialEq)]
511pub struct CameraIntrinsics {
512    /// Focal length in pixels (fx, fy)
513    pub focal_length: [f32; 2],
514    /// Principal point (cx, cy) - typically image center
515    pub principal_point: [f32; 2],
516    /// Image dimensions (width, height)
517    pub image_size: [u32; 2],
518}
519
520impl CameraIntrinsics {
521    /// Project a 3D point to 2D pixel coordinates.
522    pub fn project(&self, point: Vec3) -> Option<[f32; 2]> {
523        if point.z <= 0.0 {
524            return None;
525        }
526        let x = (point.x / point.z) * self.focal_length[0] + self.principal_point[0];
527        let y = (point.y / point.z) * self.focal_length[1] + self.principal_point[1];
528        Some([x, y])
529    }
530
531    /// Unproject a 2D pixel to a 3D ray direction.
532    pub fn unproject(&self, pixel: [f32; 2], depth: f32) -> Vec3 {
533        let x = (pixel[0] - self.principal_point[0]) / self.focal_length[0] * depth;
534        let y = (pixel[1] - self.principal_point[1]) / self.focal_length[1] * depth;
535        Vec3::new(x, y, depth)
536    }
537}
538
539/// Output from headless rendering containing RGBA and depth data.
540#[derive(Clone, Debug)]
541pub struct RenderOutput {
542    /// RGBA pixel data in row-major order (width * height * 4 bytes)
543    pub rgba: Vec<u8>,
544    /// Depth values in meters, row-major order (width * height floats)
545    /// Values are linear depth from camera, not normalized.
546    pub depth: Vec<f32>,
547    /// Image width in pixels
548    pub width: u32,
549    /// Image height in pixels
550    pub height: u32,
551    /// Camera intrinsics used for this render
552    pub intrinsics: CameraIntrinsics,
553    /// Camera transform (world position and orientation)
554    pub camera_transform: Transform,
555    /// Object rotation applied during render
556    pub object_rotation: ObjectRotation,
557}
558
559impl RenderOutput {
560    /// Get RGBA pixel at (x, y). Returns None if out of bounds.
561    pub fn get_rgba(&self, x: u32, y: u32) -> Option<[u8; 4]> {
562        if x >= self.width || y >= self.height {
563            return None;
564        }
565        let idx = ((y * self.width + x) * 4) as usize;
566        Some([
567            self.rgba[idx],
568            self.rgba[idx + 1],
569            self.rgba[idx + 2],
570            self.rgba[idx + 3],
571        ])
572    }
573
574    /// Get depth value at (x, y) in meters. Returns None if out of bounds.
575    pub fn get_depth(&self, x: u32, y: u32) -> Option<f32> {
576        if x >= self.width || y >= self.height {
577            return None;
578        }
579        let idx = (y * self.width + x) as usize;
580        Some(self.depth[idx])
581    }
582
583    /// Get RGB pixel (without alpha) at (x, y).
584    pub fn get_rgb(&self, x: u32, y: u32) -> Option<[u8; 3]> {
585        self.get_rgba(x, y).map(|rgba| [rgba[0], rgba[1], rgba[2]])
586    }
587
588    /// Convert to neocortx-compatible image format: Vec<Vec<[u8; 3]>>
589    pub fn to_rgb_image(&self) -> Vec<Vec<[u8; 3]>> {
590        let mut image = Vec::with_capacity(self.height as usize);
591        for y in 0..self.height {
592            let mut row = Vec::with_capacity(self.width as usize);
593            for x in 0..self.width {
594                row.push(self.get_rgb(x, y).unwrap_or([0, 0, 0]));
595            }
596            image.push(row);
597        }
598        image
599    }
600
601    /// Convert depth to neocortx-compatible format: Vec<Vec<f32>>
602    pub fn to_depth_image(&self) -> Vec<Vec<f32>> {
603        let mut image = Vec::with_capacity(self.height as usize);
604        for y in 0..self.height {
605            let mut row = Vec::with_capacity(self.width as usize);
606            for x in 0..self.width {
607                row.push(self.get_depth(x, y).unwrap_or(0.0));
608            }
609            image.push(row);
610        }
611        image
612    }
613}
614
615/// Errors that can occur during rendering.
616#[derive(Debug, Clone)]
617pub enum RenderError {
618    /// Object mesh file not found
619    MeshNotFound(String),
620    /// Object texture file not found
621    TextureNotFound(String),
622    /// Bevy rendering failed
623    RenderFailed(String),
624    /// Invalid configuration
625    InvalidConfig(String),
626}
627
628impl std::fmt::Display for RenderError {
629    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
630        match self {
631            RenderError::MeshNotFound(path) => write!(f, "Mesh not found: {}", path),
632            RenderError::TextureNotFound(path) => write!(f, "Texture not found: {}", path),
633            RenderError::RenderFailed(msg) => write!(f, "Render failed: {}", msg),
634            RenderError::InvalidConfig(msg) => write!(f, "Invalid config: {}", msg),
635        }
636    }
637}
638
639impl std::error::Error for RenderError {}
640
641/// Render a YCB object to an in-memory buffer.
642///
643/// This is the primary API for headless rendering. It spawns a minimal Bevy app,
644/// renders a single frame, extracts the RGBA and depth data, and shuts down.
645///
646/// # Arguments
647/// * `object_dir` - Path to YCB object directory (e.g., "/tmp/ycb/003_cracker_box")
648/// * `camera_transform` - Camera position and orientation (use `generate_viewpoints`)
649/// * `object_rotation` - Rotation to apply to the object
650/// * `config` - Render configuration (resolution, depth range, etc.)
651///
652/// # Example
653/// ```ignore
654/// use bevy_sensor::{render_to_buffer, RenderConfig, ViewpointConfig, ObjectRotation};
655/// use std::path::Path;
656///
657/// let viewpoints = bevy_sensor::generate_viewpoints(&ViewpointConfig::default());
658/// let output = render_to_buffer(
659///     Path::new("/tmp/ycb/003_cracker_box"),
660///     &viewpoints[0],
661///     &ObjectRotation::identity(),
662///     &RenderConfig::tbp_default(),
663/// )?;
664/// ```
665pub fn render_to_buffer(
666    object_dir: &Path,
667    camera_transform: &Transform,
668    object_rotation: &ObjectRotation,
669    config: &RenderConfig,
670) -> Result<RenderOutput, RenderError> {
671    // Validate paths
672    let mesh_path = object_dir.join("google_16k/textured.obj");
673    let texture_path = object_dir.join("google_16k/texture_map.png");
674
675    if !mesh_path.exists() {
676        return Err(RenderError::MeshNotFound(mesh_path.display().to_string()));
677    }
678    if !texture_path.exists() {
679        return Err(RenderError::TextureNotFound(
680            texture_path.display().to_string(),
681        ));
682    }
683
684    // TODO: Implement actual Bevy headless rendering
685    // For now, return placeholder data to establish the API
686    let pixel_count = (config.width * config.height) as usize;
687    let intrinsics = config.intrinsics();
688
689    Ok(RenderOutput {
690        rgba: vec![128u8; pixel_count * 4], // Gray placeholder
691        depth: vec![0.5f32; pixel_count],   // 0.5m placeholder depth
692        width: config.width,
693        height: config.height,
694        intrinsics,
695        camera_transform: *camera_transform,
696        object_rotation: object_rotation.clone(),
697    })
698}
699
700/// Render all viewpoints and rotations for a YCB object.
701///
702/// Convenience function that renders all combinations of viewpoints and rotations.
703///
704/// # Arguments
705/// * `object_dir` - Path to YCB object directory
706/// * `viewpoint_config` - Viewpoint configuration (camera positions)
707/// * `rotations` - Object rotations to render
708/// * `render_config` - Render configuration
709///
710/// # Returns
711/// Vector of RenderOutput, one per viewpoint × rotation combination.
712pub fn render_all_viewpoints(
713    object_dir: &Path,
714    viewpoint_config: &ViewpointConfig,
715    rotations: &[ObjectRotation],
716    render_config: &RenderConfig,
717) -> Result<Vec<RenderOutput>, RenderError> {
718    let viewpoints = generate_viewpoints(viewpoint_config);
719    let mut outputs = Vec::with_capacity(viewpoints.len() * rotations.len());
720
721    for rotation in rotations {
722        for viewpoint in &viewpoints {
723            let output = render_to_buffer(object_dir, viewpoint, rotation, render_config)?;
724            outputs.push(output);
725        }
726    }
727
728    Ok(outputs)
729}
730
731// Re-export bevy types that consumers will need
732pub use bevy::prelude::{Quat, Transform, Vec3};
733
734#[cfg(test)]
735mod tests {
736    use super::*;
737
738    #[test]
739    fn test_object_rotation_identity() {
740        let rot = ObjectRotation::identity();
741        assert_eq!(rot.pitch, 0.0);
742        assert_eq!(rot.yaw, 0.0);
743        assert_eq!(rot.roll, 0.0);
744    }
745
746    #[test]
747    fn test_object_rotation_from_array() {
748        let rot = ObjectRotation::from_array([10.0, 20.0, 30.0]);
749        assert_eq!(rot.pitch, 10.0);
750        assert_eq!(rot.yaw, 20.0);
751        assert_eq!(rot.roll, 30.0);
752    }
753
754    #[test]
755    fn test_tbp_benchmark_rotations() {
756        let rotations = ObjectRotation::tbp_benchmark_rotations();
757        assert_eq!(rotations.len(), 3);
758        assert_eq!(rotations[0], ObjectRotation::from_array([0.0, 0.0, 0.0]));
759        assert_eq!(rotations[1], ObjectRotation::from_array([0.0, 90.0, 0.0]));
760        assert_eq!(rotations[2], ObjectRotation::from_array([0.0, 180.0, 0.0]));
761    }
762
763    #[test]
764    fn test_tbp_known_orientations_count() {
765        let orientations = ObjectRotation::tbp_known_orientations();
766        assert_eq!(orientations.len(), 14);
767    }
768
769    #[test]
770    fn test_rotation_to_quat() {
771        let rot = ObjectRotation::identity();
772        let quat = rot.to_quat();
773        // Identity quaternion should be approximately (1, 0, 0, 0)
774        assert!((quat.w - 1.0).abs() < 0.001);
775        assert!(quat.x.abs() < 0.001);
776        assert!(quat.y.abs() < 0.001);
777        assert!(quat.z.abs() < 0.001);
778    }
779
780    #[test]
781    fn test_rotation_90_yaw() {
782        let rot = ObjectRotation::new(0.0, 90.0, 0.0);
783        let quat = rot.to_quat();
784        // 90° Y rotation: w ≈ 0.707, y ≈ 0.707
785        assert!((quat.w - 0.707).abs() < 0.01);
786        assert!((quat.y - 0.707).abs() < 0.01);
787    }
788
789    #[test]
790    fn test_viewpoint_config_default() {
791        let config = ViewpointConfig::default();
792        assert_eq!(config.radius, 0.5);
793        assert_eq!(config.yaw_count, 8);
794        assert_eq!(config.pitch_angles_deg.len(), 3);
795    }
796
797    #[test]
798    fn test_viewpoint_count() {
799        let config = ViewpointConfig::default();
800        assert_eq!(config.viewpoint_count(), 24); // 8 × 3
801    }
802
803    #[test]
804    fn test_generate_viewpoints_count() {
805        let config = ViewpointConfig::default();
806        let viewpoints = generate_viewpoints(&config);
807        assert_eq!(viewpoints.len(), 24);
808    }
809
810    #[test]
811    fn test_viewpoints_spherical_radius() {
812        let config = ViewpointConfig::default();
813        let viewpoints = generate_viewpoints(&config);
814
815        for (i, transform) in viewpoints.iter().enumerate() {
816            let actual_radius = transform.translation.length();
817            assert!(
818                (actual_radius - config.radius).abs() < 0.001,
819                "Viewpoint {} has incorrect radius: {} (expected {})",
820                i,
821                actual_radius,
822                config.radius
823            );
824        }
825    }
826
827    #[test]
828    fn test_viewpoints_looking_at_origin() {
829        let config = ViewpointConfig::default();
830        let viewpoints = generate_viewpoints(&config);
831
832        for (i, transform) in viewpoints.iter().enumerate() {
833            let forward = transform.forward();
834            let to_origin = (Vec3::ZERO - transform.translation).normalize();
835            let dot = forward.dot(to_origin);
836            assert!(
837                dot > 0.99,
838                "Viewpoint {} not looking at origin, dot product: {}",
839                i,
840                dot
841            );
842        }
843    }
844
845    #[test]
846    fn test_sensor_config_default() {
847        let config = SensorConfig::default();
848        assert_eq!(config.object_rotations.len(), 1);
849        assert_eq!(config.total_captures(), 24);
850    }
851
852    #[test]
853    fn test_sensor_config_tbp_benchmark() {
854        let config = SensorConfig::tbp_benchmark();
855        assert_eq!(config.object_rotations.len(), 3);
856        assert_eq!(config.total_captures(), 72); // 3 rotations × 24 viewpoints
857    }
858
859    #[test]
860    fn test_sensor_config_tbp_full() {
861        let config = SensorConfig::tbp_full_training();
862        assert_eq!(config.object_rotations.len(), 14);
863        assert_eq!(config.total_captures(), 336); // 14 rotations × 24 viewpoints
864    }
865
866    #[test]
867    fn test_ycb_representative_objects() {
868        // Verify representative objects are defined
869        assert_eq!(crate::ycb::REPRESENTATIVE_OBJECTS.len(), 3);
870        assert!(crate::ycb::REPRESENTATIVE_OBJECTS.contains(&"003_cracker_box"));
871    }
872
873    #[test]
874    fn test_ycb_ten_objects() {
875        // Verify ten objects subset is defined
876        assert_eq!(crate::ycb::TEN_OBJECTS.len(), 10);
877    }
878
879    #[test]
880    fn test_ycb_object_mesh_path() {
881        let path = crate::ycb::object_mesh_path("/tmp/ycb", "003_cracker_box");
882        assert_eq!(
883            path.to_string_lossy(),
884            "/tmp/ycb/003_cracker_box/google_16k/textured.obj"
885        );
886    }
887
888    #[test]
889    fn test_ycb_object_texture_path() {
890        let path = crate::ycb::object_texture_path("/tmp/ycb", "003_cracker_box");
891        assert_eq!(
892            path.to_string_lossy(),
893            "/tmp/ycb/003_cracker_box/google_16k/texture_map.png"
894        );
895    }
896
897    // =========================================================================
898    // Headless Rendering API Tests
899    // =========================================================================
900
901    #[test]
902    fn test_render_config_tbp_default() {
903        let config = RenderConfig::tbp_default();
904        assert_eq!(config.width, 64);
905        assert_eq!(config.height, 64);
906        assert_eq!(config.zoom, 1.0);
907        assert_eq!(config.near_plane, 0.01);
908        assert_eq!(config.far_plane, 10.0);
909    }
910
911    #[test]
912    fn test_render_config_preview() {
913        let config = RenderConfig::preview();
914        assert_eq!(config.width, 256);
915        assert_eq!(config.height, 256);
916    }
917
918    #[test]
919    fn test_render_config_default_is_tbp() {
920        let default = RenderConfig::default();
921        let tbp = RenderConfig::tbp_default();
922        assert_eq!(default.width, tbp.width);
923        assert_eq!(default.height, tbp.height);
924    }
925
926    #[test]
927    fn test_render_config_fov() {
928        let config = RenderConfig::tbp_default();
929        let fov = config.fov_radians();
930        // Base FOV is 60 degrees = ~1.047 radians
931        assert!((fov - 1.047).abs() < 0.01);
932
933        // Zoom in should reduce FOV
934        let zoomed = RenderConfig {
935            zoom: 2.0,
936            ..config
937        };
938        assert!(zoomed.fov_radians() < fov);
939    }
940
941    #[test]
942    fn test_render_config_intrinsics() {
943        let config = RenderConfig::tbp_default();
944        let intrinsics = config.intrinsics();
945
946        assert_eq!(intrinsics.image_size, [64, 64]);
947        assert_eq!(intrinsics.principal_point, [32.0, 32.0]);
948        // Focal length should be positive and reasonable
949        assert!(intrinsics.focal_length[0] > 0.0);
950        assert!(intrinsics.focal_length[1] > 0.0);
951        // For 64x64 with 60° FOV, focal length ≈ 55.4 pixels
952        assert!((intrinsics.focal_length[0] - 55.4).abs() < 1.0);
953    }
954
955    #[test]
956    fn test_camera_intrinsics_project() {
957        let intrinsics = CameraIntrinsics {
958            focal_length: [100.0, 100.0],
959            principal_point: [32.0, 32.0],
960            image_size: [64, 64],
961        };
962
963        // Point at origin of camera frame projects to principal point
964        let center = intrinsics.project(Vec3::new(0.0, 0.0, 1.0));
965        assert!(center.is_some());
966        let [x, y] = center.unwrap();
967        assert!((x - 32.0).abs() < 0.001);
968        assert!((y - 32.0).abs() < 0.001);
969
970        // Point behind camera returns None
971        let behind = intrinsics.project(Vec3::new(0.0, 0.0, -1.0));
972        assert!(behind.is_none());
973    }
974
975    #[test]
976    fn test_camera_intrinsics_unproject() {
977        let intrinsics = CameraIntrinsics {
978            focal_length: [100.0, 100.0],
979            principal_point: [32.0, 32.0],
980            image_size: [64, 64],
981        };
982
983        // Unproject principal point at depth 1.0
984        let point = intrinsics.unproject([32.0, 32.0], 1.0);
985        assert!((point.x).abs() < 0.001);
986        assert!((point.y).abs() < 0.001);
987        assert!((point.z - 1.0).abs() < 0.001);
988    }
989
990    #[test]
991    fn test_render_output_get_rgba() {
992        let output = RenderOutput {
993            rgba: vec![
994                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
995            ],
996            depth: vec![1.0, 2.0, 3.0, 4.0],
997            width: 2,
998            height: 2,
999            intrinsics: RenderConfig::tbp_default().intrinsics(),
1000            camera_transform: Transform::IDENTITY,
1001            object_rotation: ObjectRotation::identity(),
1002        };
1003
1004        // Top-left: red
1005        assert_eq!(output.get_rgba(0, 0), Some([255, 0, 0, 255]));
1006        // Top-right: green
1007        assert_eq!(output.get_rgba(1, 0), Some([0, 255, 0, 255]));
1008        // Bottom-left: blue
1009        assert_eq!(output.get_rgba(0, 1), Some([0, 0, 255, 255]));
1010        // Bottom-right: white
1011        assert_eq!(output.get_rgba(1, 1), Some([255, 255, 255, 255]));
1012        // Out of bounds
1013        assert_eq!(output.get_rgba(2, 0), None);
1014    }
1015
1016    #[test]
1017    fn test_render_output_get_depth() {
1018        let output = RenderOutput {
1019            rgba: vec![0u8; 16],
1020            depth: vec![1.0, 2.0, 3.0, 4.0],
1021            width: 2,
1022            height: 2,
1023            intrinsics: RenderConfig::tbp_default().intrinsics(),
1024            camera_transform: Transform::IDENTITY,
1025            object_rotation: ObjectRotation::identity(),
1026        };
1027
1028        assert_eq!(output.get_depth(0, 0), Some(1.0));
1029        assert_eq!(output.get_depth(1, 0), Some(2.0));
1030        assert_eq!(output.get_depth(0, 1), Some(3.0));
1031        assert_eq!(output.get_depth(1, 1), Some(4.0));
1032        assert_eq!(output.get_depth(2, 0), None);
1033    }
1034
1035    #[test]
1036    fn test_render_output_to_rgb_image() {
1037        let output = RenderOutput {
1038            rgba: vec![
1039                255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255, 255, 255, 255, 255,
1040            ],
1041            depth: vec![1.0, 2.0, 3.0, 4.0],
1042            width: 2,
1043            height: 2,
1044            intrinsics: RenderConfig::tbp_default().intrinsics(),
1045            camera_transform: Transform::IDENTITY,
1046            object_rotation: ObjectRotation::identity(),
1047        };
1048
1049        let image = output.to_rgb_image();
1050        assert_eq!(image.len(), 2); // 2 rows
1051        assert_eq!(image[0].len(), 2); // 2 columns
1052        assert_eq!(image[0][0], [255, 0, 0]); // Red
1053        assert_eq!(image[0][1], [0, 255, 0]); // Green
1054        assert_eq!(image[1][0], [0, 0, 255]); // Blue
1055        assert_eq!(image[1][1], [255, 255, 255]); // White
1056    }
1057
1058    #[test]
1059    fn test_render_output_to_depth_image() {
1060        let output = RenderOutput {
1061            rgba: vec![0u8; 16],
1062            depth: vec![1.0, 2.0, 3.0, 4.0],
1063            width: 2,
1064            height: 2,
1065            intrinsics: RenderConfig::tbp_default().intrinsics(),
1066            camera_transform: Transform::IDENTITY,
1067            object_rotation: ObjectRotation::identity(),
1068        };
1069
1070        let depth_image = output.to_depth_image();
1071        assert_eq!(depth_image.len(), 2);
1072        assert_eq!(depth_image[0], vec![1.0, 2.0]);
1073        assert_eq!(depth_image[1], vec![3.0, 4.0]);
1074    }
1075
1076    #[test]
1077    fn test_render_error_display() {
1078        let err = RenderError::MeshNotFound("/path/to/mesh.obj".to_string());
1079        assert!(err.to_string().contains("Mesh not found"));
1080        assert!(err.to_string().contains("/path/to/mesh.obj"));
1081    }
1082}