Skip to main content

rust_webvr/api/openvr/
display.rs

1use {VRDisplay, VRDisplayData, VRDisplayCapabilities, VREyeParameters, VRFrameData};
2use {VRFramebuffer, VRPose, VRStageParameters, VRFieldOfView, VRGamepadPtr, VRLayer};
3use super::binding as openvr;
4use super::binding::ETrackedPropertyError::*;
5use super::binding::ETrackedDeviceProperty::*;
6use super::binding::EVREye::*;
7use super::binding::EVRInitError::*;
8use super::binding::ETrackingUniverseOrigin::*;
9use super::binding::EGraphicsAPIConvention::*;
10use super::library::OpenVRLibrary;
11use super::constants;
12use super::gamepad::OpenVRGamepadPtr;
13use rust_webvr_api::utils;
14use std::ffi::CString;
15use std::sync::Arc;
16use std::cell::RefCell;
17use std::slice;
18use std::str;
19use std::ptr;
20use std::mem;
21
22pub type OpenVRDisplayPtr = Arc<RefCell<OpenVRDisplay>>;
23
24pub struct OpenVRDisplay {
25    display_id: u32,
26    lib: *const OpenVRLibrary,
27    index: openvr::TrackedDeviceIndex_t,
28    system: *mut openvr::VR_IVRSystem_FnTable,
29    chaperone: *mut openvr::VR_IVRChaperone_FnTable,
30    compositor: *mut openvr::VR_IVRCompositor_FnTable,
31    frame_texture:  openvr::Texture_t,
32    left_bounds: openvr::VRTextureBounds_t,
33    right_bounds: openvr::VRTextureBounds_t,
34    gamepads: Vec<OpenVRGamepadPtr>,
35}
36
37unsafe impl Send for OpenVRDisplay {}
38unsafe impl Sync for OpenVRDisplay {}
39
40impl OpenVRDisplay {
41    pub fn new(lib: *const OpenVRLibrary,
42               index: openvr::TrackedDeviceIndex_t,
43               system: *mut openvr::VR_IVRSystem_FnTable,
44               chaperone: *mut openvr::VR_IVRChaperone_FnTable) 
45               -> Arc<RefCell<OpenVRDisplay>> {
46        Arc::new(RefCell::new(OpenVRDisplay {
47            display_id: utils::new_id(),
48            lib: lib,
49            index: index,
50            system: system,
51            chaperone: chaperone,
52            compositor: ptr::null_mut(),
53            frame_texture: openvr::Texture_t {
54                handle: ptr::null_mut(),
55                eType: EGraphicsAPIConvention_API_OpenGL,
56                eColorSpace: openvr::EColorSpace::EColorSpace_ColorSpace_Auto,
57            },
58            left_bounds: unsafe { mem::zeroed() },
59            right_bounds: unsafe { mem::zeroed() },
60            gamepads: Vec::new(),
61        }))
62    }
63}
64
65impl Drop for OpenVRDisplay {
66     fn drop(&mut self) {
67         self.stop_present();
68     }
69}
70
71impl VRDisplay for OpenVRDisplay {
72
73    fn id(&self) -> u32 {
74        self.display_id
75    }
76
77    // Returns the current display data.
78    fn data(&self) -> VRDisplayData {
79        let mut data = VRDisplayData::default();
80        
81        OpenVRDisplay::fetch_capabilities(&mut data.capabilities);
82        self.fetch_eye_parameters(&mut data.left_eye_parameters, &mut data.right_eye_parameters);
83        self.fetch_stage_parameters(&mut data);
84        data.display_id = self.display_id;
85        data.display_name = format!("{} {}",
86                            self.get_string_property(ETrackedDeviceProperty_Prop_ManufacturerName_String),
87                            self.get_string_property(ETrackedDeviceProperty_Prop_ModelNumber_String));
88        data.connected = self.is_connected();
89
90        data
91    }
92
93    fn immediate_frame_data(&self, near_z: f64, far_z: f64) -> VRFrameData {
94        let mut data = VRFrameData::default();
95
96        let mut tracked_poses: [openvr::TrackedDevicePose_t; openvr::k_unMaxTrackedDeviceCount as usize]
97                              = unsafe { mem::uninitialized() };
98        unsafe {
99            // Calculates updated poses for all displays
100            (*self.system).GetDeviceToAbsoluteTrackingPose.unwrap()(ETrackingUniverseOrigin_TrackingUniverseSeated,
101                                                                    self.get_seconds_to_photons(),
102                                                                    &mut tracked_poses[0],
103                                                                    openvr::k_unMaxTrackedDeviceCount);
104        };
105
106        let display_pose = &tracked_poses[self.index as usize];
107        self.fetch_frame_data(near_z as f32, far_z as f32, &display_pose, &mut data);
108
109        data
110    }
111
112     fn synced_frame_data(&self, near_z: f64, far_z: f64) -> VRFrameData {
113         if self.compositor == ptr::null_mut() {
114             // Fallback to immediate mode if compositor not available
115             self.immediate_frame_data(near_z, far_z);
116         }
117
118         let mut display_pose: openvr::TrackedDevicePose_t = unsafe { mem::uninitialized() };
119         unsafe {
120             (*self.compositor).GetLastPoseForTrackedDeviceIndex.unwrap()(self.index,
121                                                                          &mut display_pose,
122                                                                          ptr::null_mut());
123         }
124         let mut data = VRFrameData::default();
125         self.fetch_frame_data(near_z as f32, far_z as f32, &display_pose, &mut data);
126
127         data
128      }
129
130    // Resets the pose for this display
131    fn reset_pose(&mut self) {
132        unsafe {
133            (*self.system).ResetSeatedZeroPose.unwrap()();
134        }
135    }
136
137    fn sync_poses(&mut self) {
138        if !self.ensure_compositor_ready() {
139            return;
140        }
141        unsafe {
142            (*self.compositor).WaitGetPoses.unwrap()(ptr::null_mut(), 0, ptr::null_mut(), 0);
143        }
144    }
145
146    fn get_framebuffers(&self) -> Vec<VRFramebuffer> {
147        Vec::new()
148    }
149
150    fn bind_framebuffer(&mut self, _eye_index: u32) {
151
152    }
153
154    fn fetch_gamepads(&mut self) -> Result<Vec<VRGamepadPtr>,String> {
155        Ok(self.gamepads.iter().map(|d| d.clone() as VRGamepadPtr).collect())
156    }
157
158    fn render_layer(&mut self, layer: &VRLayer) {
159        self.frame_texture.handle = unsafe { mem::transmute(layer.texture_id as usize) };
160        self.left_bounds = texture_bounds_to_openvr(&layer.left_bounds);
161        self.right_bounds = texture_bounds_to_openvr(&layer.right_bounds);
162    }
163
164    fn submit_frame(&mut self) {
165        if !self.ensure_compositor_ready() {
166            return;
167        }
168
169        let flags = openvr::EVRSubmitFlags::EVRSubmitFlags_Submit_Default;
170
171        unsafe {
172            (*self.compositor).Submit.unwrap()(EVREye_Eye_Left, &mut self.frame_texture, &mut self.left_bounds, flags);
173            (*self.compositor).Submit.unwrap()(EVREye_Eye_Right, &mut self.frame_texture, &mut self.right_bounds, flags);
174            (*self.compositor).PostPresentHandoff.unwrap()();
175        }
176    }
177
178    fn stop_present(&mut self) {
179         if self.compositor != ptr::null_mut() {
180             println!("ClearLastSubmittedFrame");
181             unsafe {
182                (*self.compositor).ClearLastSubmittedFrame.unwrap()();
183             }
184         }
185    }
186}
187
188impl OpenVRDisplay {
189    pub fn set_gamepads(&mut self, gp: Vec<OpenVRGamepadPtr>) {
190        self.gamepads = gp;
191    }
192
193    fn get_string_property(&self, name: openvr::ETrackedDeviceProperty) -> String {
194        let max_size = 256;
195        let result = String::with_capacity(max_size);
196        let mut error = ETrackedPropertyError_TrackedProp_Success;
197        let size;
198        unsafe {
199            size = (*self.system).GetStringTrackedDeviceProperty.unwrap()(self.index, name, 
200                                                                          result.as_ptr() as *mut i8, 
201                                                                          max_size as u32, 
202                                                                          &mut error)
203        };
204
205        if size > 0 && error as u32 == ETrackedPropertyError_TrackedProp_Success as u32 {
206            let ptr = result.as_ptr() as *mut u8;
207            unsafe {
208                String::from(str::from_utf8(slice::from_raw_parts(ptr, size as usize)).unwrap_or(""))
209            }
210        } else {
211            "".into()
212        }
213    }
214
215    fn get_float_property(&self, name: openvr::ETrackedDeviceProperty) -> Option<f32> {
216        let mut error = ETrackedPropertyError_TrackedProp_Success;
217        let result = unsafe {
218            (*self.system).GetFloatTrackedDeviceProperty.unwrap()(self.index, name, &mut error)
219        };
220        if error as u32 == ETrackedPropertyError_TrackedProp_Success as u32 {
221            Some(result)
222        } else {
223            None
224        }
225    }
226
227    fn fetch_capabilities(capabilities: &mut VRDisplayCapabilities) {
228        capabilities.can_present = true;
229        capabilities.has_orientation = true;
230        capabilities.has_external_display = true;
231        capabilities.has_position = true;
232    }
233
234    fn fetch_field_of_view(&self, eye: openvr::EVREye, fov: &mut VRFieldOfView) {
235        let (mut up, mut right, mut down, mut left) = (0.0f32, 0.0f32, 0.0f32, 0.0f32);
236        unsafe {
237            (*self.system).GetProjectionRaw.unwrap()(eye, &mut left, &mut right, &mut up, &mut down);
238        }
239        // OpenVR returns clipping plane coordinates in raw tangent units
240        // WebVR expects degrees, so we have to convert tangent units to degrees
241        fov.up_degrees = -up.atan().to_degrees() as f64;
242        fov.right_degrees = right.atan().to_degrees() as f64;
243        fov.down_degrees = down.atan().to_degrees() as f64;
244        fov.left_degrees = -left.atan().to_degrees() as f64;
245    }
246
247    fn is_connected(&self) -> bool {
248        unsafe {
249            (*self.system).IsTrackedDeviceConnected.unwrap()(self.index)
250        }
251    }
252
253    fn fetch_eye_parameters(&self, left: &mut VREyeParameters, right: &mut VREyeParameters) {
254        self.fetch_field_of_view(EVREye_Eye_Left, &mut left.field_of_view);
255        self.fetch_field_of_view(EVREye_Eye_Right, &mut right.field_of_view);
256
257        let (left_matrix, right_matrix) = unsafe {
258            ((*self.system).GetEyeToHeadTransform.unwrap()(EVREye_Eye_Left),
259             (*self.system).GetEyeToHeadTransform.unwrap()(EVREye_Eye_Right))
260        };
261        
262        left.offset = [left_matrix.m[0][3], left_matrix.m[1][3], left_matrix.m[2][3]];
263        right.offset = [right_matrix.m[0][3], right_matrix.m[1][3], right_matrix.m[2][3]];
264
265        let (mut width, mut height) = (0, 0);
266        unsafe {
267            (*self.system).GetRecommendedRenderTargetSize.unwrap()(&mut width, &mut height);
268        }
269        left.render_width = width;
270        left.render_height = height;
271        right.render_width = width;
272        right.render_height = height;
273    }
274
275    fn fetch_stage_parameters(&self, data: &mut VRDisplayData) {
276        // Play area size
277        let mut size_x = 0f32;
278        let mut size_z = 0f32;
279
280        unsafe {
281            (*self.chaperone).GetPlayAreaSize.unwrap()(&mut size_x, &mut size_z);
282        }
283
284        if size_x > 0.0 && size_z > 0.0 {
285            let matrix: openvr::HmdMatrix34_t = unsafe {
286                (*self.system).GetSeatedZeroPoseToStandingAbsoluteTrackingPose.unwrap()()
287            };
288
289            data.stage_parameters = Some(VRStageParameters {
290                sitting_to_standing_transform: openvr_matrix34_to_array(&matrix),
291                size_x: size_x,
292                size_z: size_z
293            });
294        } else {
295            
296            // Chaperone data not ready yet. HMD might be deactivated.
297            // Use some default average transform until data is ready.
298            let matrix = [1.0, 0.0, 0.0, 0.0,
299                          0.0, 1.0, 0.0, 0.0,
300                          0.0, 0.0, 1.0, 0.0,
301                          0.0, 0.75, 0.0, 1.0];
302
303            data.stage_parameters = Some(VRStageParameters {
304                sitting_to_standing_transform: matrix,
305                size_x: 2.0,
306                size_z: 2.0
307            });
308        }
309    }
310
311    fn fetch_frame_data(&self,
312                        near_z: f32,
313                        far_z: f32,
314                        display_pose: &openvr::TrackedDevicePose_t,
315                        out: &mut VRFrameData) {
316        let near_z = near_z as f32;
317        let far_z = far_z as f32;
318        OpenVRDisplay::fetch_pose(&display_pose, &mut out.pose);
319        self.fetch_projection_matrix(EVREye_Eye_Left, near_z, far_z, &mut out.left_projection_matrix);
320        self.fetch_projection_matrix(EVREye_Eye_Right, near_z, far_z, &mut out.right_projection_matrix);
321
322        let mut view_matrix: [f32; 16] = unsafe { mem::uninitialized() };
323        self.fetch_view_matrix(&display_pose, &mut view_matrix);
324
325        let mut left_eye:[f32; 16] = unsafe { mem::uninitialized() };
326        let mut right_eye:[f32; 16] = unsafe { mem::uninitialized() };
327        
328        // Fech the transform of each eye
329        self.fetch_eye_to_head_matrix(EVREye_Eye_Left, &mut left_eye);
330        self.fetch_eye_to_head_matrix(EVREye_Eye_Right, &mut right_eye);
331
332        // View matrix must by multiplied by each eye_to_head transformation matrix
333        utils::multiply_matrix(&view_matrix, &left_eye, &mut out.left_view_matrix);
334        utils::multiply_matrix(&view_matrix, &right_eye, &mut out.right_view_matrix);
335        // Invert matrices
336        utils::inverse_matrix(&out.left_view_matrix, &mut view_matrix);
337        out.left_view_matrix = view_matrix;
338        utils::inverse_matrix(&out.right_view_matrix, &mut view_matrix);
339        out.right_view_matrix = view_matrix;
340
341        out.timestamp = utils::timestamp();
342    }
343
344    fn fetch_projection_matrix(&self, eye: openvr::EVREye, near: f32, far: f32, out: &mut [f32; 16]) {
345        let matrix = unsafe {
346            (*self.system).GetProjectionMatrix.unwrap()(eye, near, far, EGraphicsAPIConvention_API_OpenGL)
347        };
348        *out = openvr_matrix44_to_array(&matrix);
349    }
350
351    fn fetch_eye_to_head_matrix(&self, eye: openvr::EVREye, out: &mut [f32; 16]) {
352        let matrix = unsafe {
353            (*self.system).GetEyeToHeadTransform.unwrap()(eye)
354        };
355        *out = openvr_matrix34_to_array(&matrix);
356    }
357
358    pub fn fetch_pose(display_pose:&openvr::TrackedDevicePose_t, out:&mut VRPose) {
359        if !display_pose.bPoseIsValid {
360            // For some reason the pose may not be valid, return a empty one
361            return;
362        }
363
364        // OpenVR returns a transformation matrix
365        // WebVR expects a quaternion, we have to decompose the transformation matrix
366        out.orientation = Some(openvr_matrix_to_quat(&display_pose.mDeviceToAbsoluteTracking));
367
368        // Decompose position from transformation matrix
369        out.position = Some(openvr_matrix_to_position(&display_pose.mDeviceToAbsoluteTracking));
370
371        // Copy linear velocity and angular velocity
372        out.linear_velocity = Some([display_pose.vVelocity.v[0], 
373                                     display_pose.vVelocity.v[1], 
374                                     display_pose.vVelocity.v[2]]);
375        out.angular_velocity = Some([display_pose.vAngularVelocity.v[0], 
376                                      display_pose.vAngularVelocity.v[1], 
377                                      display_pose.vAngularVelocity.v[2]]);
378
379        // TODO: OpenVR doesn't expose linear and angular acceleration
380        // Derive them from GetDeviceToAbsoluteTrackingPose with different predicted seconds_photons?
381    }
382
383    fn fetch_view_matrix(&self, display_pose: &openvr::TrackedDevicePose_t, out: &mut [f32; 16]) {
384        if !display_pose.bPoseIsValid {
385            *out = [1.0, 0.0, 0.0, 0.0,  0.0, 1.0, 0.0, 0.0,  0.0, 0.0, 1.0, 0.0,  0.0, 0.0, 0.0, 1.0];
386        } else {
387            *out = openvr_matrix34_to_array(&display_pose.mDeviceToAbsoluteTracking);
388        }
389    }
390
391    pub fn index(&self) -> openvr::TrackedDeviceIndex_t {
392        self.index
393    }
394
395    // Computing seconds to photons
396    // More info: https://github.com/ValveSoftware/openvr/wiki/IVRSystem::GetDeviceToAbsoluteTrackingPose
397    fn get_seconds_to_photons(&self) -> f32 {
398        let mut seconds_last_vsync = 0f32;
399        let average_value = 0.04f32;
400
401        unsafe {
402            if !(*self.system).GetTimeSinceLastVsync.unwrap()(&mut seconds_last_vsync, ptr::null_mut()) {
403                // no vsync times are available, return a default average value
404                return average_value;
405            }
406        }
407        let display_freq = self.get_float_property(ETrackedDeviceProperty_Prop_DisplayFrequency_Float).unwrap_or(90.0);
408        let frame_duration = 1.0 / display_freq;
409        if let Some(vsync_to_photons) = self.get_float_property(ETrackedDeviceProperty_Prop_SecondsFromVsyncToPhotons_Float) {
410            frame_duration - seconds_last_vsync + vsync_to_photons
411        } else {
412            0.04f32
413        }
414    }
415
416    fn ensure_compositor_ready(&mut self)-> bool {
417        if self.compositor != ptr::null_mut() {
418            return true;
419        }
420
421        unsafe {
422            let mut error = EVRInitError_VRInitError_None;
423            let name = CString::new(format!("FnTable:{}", constants::IVRCompositor_Version)).unwrap();
424            self.compositor = (*(*self.lib).get_interface)(name.as_ptr(), &mut error)
425                          as *mut openvr::VR_IVRCompositor_FnTable;
426            if error as u32 == EVRInitError_VRInitError_None as u32 && self.compositor != ptr::null_mut() {
427                // Set seated tracking space (default in WebVR)
428                (*self.compositor).SetTrackingSpace.unwrap()(ETrackingUniverseOrigin_TrackingUniverseSeated);
429                true
430            } else {
431                error!("Error initializing OpenVR compositor: {:?}", error as u32);
432                self.compositor = ptr::null_mut();
433                false
434            }
435        }
436    }
437}
438
439// Helper functions
440 
441#[inline]
442fn openvr_matrix34_to_array(matrix: &openvr::HmdMatrix34_t) -> [f32; 16] {
443    [matrix.m[0][0], matrix.m[1][0], matrix.m[2][0], 0.0,
444     matrix.m[0][1], matrix.m[1][1], matrix.m[2][1], 0.0,
445     matrix.m[0][2], matrix.m[1][2], matrix.m[2][2], 0.0,
446     matrix.m[0][3], matrix.m[1][3], matrix.m[2][3], 1.0]
447}
448
449#[inline]
450fn openvr_matrix44_to_array(matrix: &openvr::HmdMatrix44_t) -> [f32; 16] {
451    [matrix.m[0][0], matrix.m[1][0], matrix.m[2][0], matrix.m[3][0],
452     matrix.m[0][1], matrix.m[1][1], matrix.m[2][1], matrix.m[3][1],
453     matrix.m[0][2], matrix.m[1][2], matrix.m[2][2], matrix.m[3][2],
454     matrix.m[0][3], matrix.m[1][3], matrix.m[2][3], matrix.m[3][3]]
455}
456
457#[inline]
458fn openvr_matrix_to_position(matrix: &openvr::HmdMatrix34_t) -> [f32; 3] {
459    [matrix.m[0][3], matrix.m[1][3], matrix.m[2][3]]
460}
461
462// Adapted from http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
463#[inline]
464fn openvr_matrix_to_quat(matrix: &openvr::HmdMatrix34_t) -> [f32; 4] {
465    let m = matrix.m;
466    let w = f32::max(0.0, 1.0 + m[0][0] + m[1][1] + m[2][2]).sqrt() * 0.5;
467    let mut x = f32::max(0.0, 1.0 + m[0][0] - m[1][1] - m[2][2]).sqrt() * 0.5;
468    let mut y = f32::max(0.0, 1.0 - m[0][0] + m[1][1] - m[2][2]).sqrt() * 0.5;
469    let mut z = f32::max(0.0, 1.0 - m[0][0] - m[1][1] + m[2][2]).sqrt() * 0.5;
470
471    x = utils::copysign(x, m[2][1] - m[1][2]);
472    y = utils::copysign(y, m[0][2] - m[2][0]);
473    z = utils::copysign(z, m[1][0] - m[0][1]);
474
475    [x, y, z, w]
476}
477
478fn texture_bounds_to_openvr(bounds: &[f32; 4]) -> openvr::VRTextureBounds_t {
479    let mut result: openvr::VRTextureBounds_t = unsafe { mem::uninitialized() };
480    // WebVR uses uMin, vMin, uWidth and vHeight bounds
481    result.uMin = bounds[0];
482    result.vMin = bounds[1];
483    result.uMax = result.uMin + bounds[2];
484    result.vMax = result.vMin + bounds[3]; 
485    result
486}