1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
use serde::{Deserialize, Serialize};

#[macro_use]
extern crate serde_derive; // TODO for some reason, required when publishing — why?

use std::net::SocketAddr;

/// Represents one 'player' or 'seat' in the VR system
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum KitProfile {
	Local {
		video: VideoOutputParams,
		audio: AudioOutputParams,
		input: Vec<InputProfile>,
	},
	WebRtc {
		video: VideoOutputParams,
		audio: AudioOutputParams,
		input: Vec<InputProfile>,
		signalling_socket: SocketAddr,
		connection_id: String,
		video_encoder: VideoEncoder,
		framerate: (i32, i32),
	},
}

impl KitProfile {
	pub fn video(&self) -> &VideoOutputParams {
		use crate::KitProfile::*;
		match self {
			Local { ref video, .. } => video,
			WebRtc { ref video, .. } => video,
		}
	}
	pub fn video_mut(&mut self) -> &mut VideoOutputParams {
		use crate::KitProfile::*;
		match self {
			Local { ref mut video, .. } => video,
			WebRtc { ref mut video, .. } => video,
		}
	}
}

/// Represents the encoder used when transmitting video over WebRTC.
/// Hardware-accelerated encoders purportedly offer lower latency (!).
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum VideoEncoder {
	/// Software VP8 Encoder
	Vp8Enc,
	/// Hardware-accelerated (VAAPI) VP8 Encoder
	VaapiVp8Enc,
}

/// Represents the 'video' aspect of the multimedia; e.g. info about the head mount
/// and display projected through it.
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct VideoOutputParams {
	pub display: VideoDisplayParams,
	pub head_mount: HeadMountParams,
	pub render_texture_size: [u32; 2],
}

/// Represents the 'display' aspect of a video output device; e.g. info about a phone.
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct VideoDisplayParams {
	pub width_px: u32,
	pub height_px: u32,
	pub width_m: f32,
	pub height_m: f32,
	pub bottom_bezel_offset_m: f32,
}

/// Represents the physical/optical layer of the head mount display.
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct HeadMountParams {
	/// Distance between the screen and the lens, in metres.
	pub screen_lens_distance_m: f32,
	/// Distance between the optical centre of both lenses, in metres. Assumed to be horizontal.
	pub inter_lens_distance_m: f32,
	/// Field of view angle relative to centre.
	pub fov_angle_degrees: f32,
	/// Distance between the bottom of the tray and the optical centre of the lens, in metres.
	pub tray_lens_distance_m: f32,
	/// Coefficients K_i for the pincushion distortion function (real screen position → virtual screen position)
	/// (relative to optical centre)
	/// \[ p' = p \left(1 + \sum_{i=1}^{n} K_{i} r^{2i}\right) \]
	/// where \( r \) is the distance, in tan-angle units, from the optical centre of the point
	pub pincushion_distortion_coefficients: Vec<f32>,
}

#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum InputProfile {
	Gyroscope {},

}

#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct AudioOutputParams {}