pub struct Camera {
pub pos: Vec3,
pub z: Range<f32>,
pub vertical_fov: f32,
pub pitch: f32,
pub yaw: f32,
}Expand description
A camera.
Fields§
§pos: Vec3The position of the camera.
z: Range<f32>The z range of the camera.
vertical_fov: f32The vertical FOV.
pitch: f32The pitch.
yaw: f32The yaw.
Implementations§
Source§impl Camera
impl Camera
Sourcepub const PITCH_LIMIT: Range<f32>
pub const PITCH_LIMIT: Range<f32>
The pitch limit.
Sourcepub fn new(z: Range<f32>, vertical_fov: f32) -> Self
pub fn new(z: Range<f32>, vertical_fov: f32) -> Self
Create a new camera.
Examples found in repository?
examples/simple.rs (line 163)
110 async fn init(window: Arc<Window>, args: &Args) -> Self {
111 let model_path = &args.model;
112 let size = window.inner_size();
113
114 log::debug!("Creating wgpu instance");
115 let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor::default());
116
117 log::debug!("Creating window surface");
118 let surface = instance.create_surface(window.clone()).expect("surface");
119
120 log::debug!("Requesting adapter");
121 let adapter = instance
122 .request_adapter(&wgpu::RequestAdapterOptions {
123 power_preference: wgpu::PowerPreference::HighPerformance,
124 compatible_surface: Some(&surface),
125 force_fallback_adapter: false,
126 })
127 .await
128 .expect("adapter");
129
130 log::debug!("Requesting device");
131 let (device, queue) = adapter
132 .request_device(&wgpu::DeviceDescriptor {
133 label: Some("Device"),
134 required_limits: adapter.limits(),
135 ..Default::default()
136 })
137 .await
138 .expect("device");
139
140 let surface_caps = surface.get_capabilities(&adapter);
141 let surface_format = surface_caps.formats[0];
142 let config = wgpu::SurfaceConfiguration {
143 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
144 format: surface_format,
145 width: size.width.max(1),
146 height: size.height.max(1),
147 present_mode: surface_caps.present_modes[0],
148 alpha_mode: surface_caps.alpha_modes[0],
149 view_formats: vec![surface_format.remove_srgb_suffix()],
150 desired_maximum_frame_latency: 2,
151 };
152
153 log::debug!("Configuring surface");
154 surface.configure(&device, &config);
155
156 log::debug!("Creating gaussians");
157 let gaussians = [GaussiansSource::Ply, GaussiansSource::Spz]
158 .into_iter()
159 .find_map(|source| gs::core::Gaussians::read_from_file(model_path, source).ok())
160 .expect("gaussians");
161
162 log::debug!("Creating camera");
163 let camera = gs::Camera::new(0.1..1e4, 60f32.to_radians());
164
165 log::debug!("Creating viewer");
166 let mut viewer =
167 gs::Viewer::new(&device, config.view_formats[0], &gaussians).expect("viewer");
168 viewer.update_model_transform(
169 &queue,
170 Vec3::ZERO,
171 Quat::from_axis_angle(Vec3::Z, 180f32.to_radians()),
172 Vec3::ONE,
173 );
174
175 viewer.update_gaussian_transform(
176 &queue,
177 args.size,
178 match args.mode {
179 DisplayMode::Splat => gs::core::GaussianDisplayMode::Splat,
180 DisplayMode::Ellipse => gs::core::GaussianDisplayMode::Ellipse,
181 DisplayMode::Point => gs::core::GaussianDisplayMode::Point,
182 },
183 gs::core::GaussianShDegree::new(args.sh_degree).expect("sh degree"),
184 args.no_sh0,
185 GaussianMaxStdDev::new(args.std_dev).expect("max std dev"),
186 );
187
188 log::info!("System initialized");
189
190 Self {
191 surface,
192 device,
193 queue,
194 config,
195
196 camera,
197 gaussians,
198 viewer,
199 }
200 }More examples
examples/multi_model.rs (line 151)
79 async fn init(window: Arc<Window>, args: &Args) -> Self {
80 let model_paths = &args.models;
81 let model_offset = Vec3::from_slice(&args.offset);
82 let size = window.inner_size();
83
84 log::debug!("Creating wgpu instance");
85 let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor::default());
86
87 log::debug!("Creating window surface");
88 let surface = instance.create_surface(window.clone()).expect("surface");
89
90 log::debug!("Requesting adapter");
91 let adapter = instance
92 .request_adapter(&wgpu::RequestAdapterOptions {
93 power_preference: wgpu::PowerPreference::HighPerformance,
94 compatible_surface: Some(&surface),
95 force_fallback_adapter: false,
96 })
97 .await
98 .expect("adapter");
99
100 log::debug!("Requesting device");
101 let (device, queue) = adapter
102 .request_device(&wgpu::DeviceDescriptor {
103 label: Some("Device"),
104 required_limits: adapter.limits(),
105 ..Default::default()
106 })
107 .await
108 .expect("device");
109
110 let surface_caps = surface.get_capabilities(&adapter);
111 let surface_format = surface_caps.formats[0];
112 let config = wgpu::SurfaceConfiguration {
113 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
114 format: surface_format,
115 width: size.width.max(1),
116 height: size.height.max(1),
117 present_mode: surface_caps.present_modes[0],
118 alpha_mode: surface_caps.alpha_modes[0],
119 view_formats: vec![surface_format.remove_srgb_suffix()],
120 desired_maximum_frame_latency: 2,
121 };
122
123 log::debug!("Configuring surface");
124 surface.configure(&device, &config);
125
126 log::debug!("Creating gaussians");
127 let gaussians = model_paths
128 .iter()
129 .map(|model_path| {
130 log::debug!("Reading model from {model_path}");
131 [GaussiansSource::Ply, GaussiansSource::Spz]
132 .into_iter()
133 .find_map(|source| gs::core::Gaussians::read_from_file(model_path, source).ok())
134 .expect("gaussians")
135 })
136 .collect::<Vec<_>>();
137
138 log::debug!("Computing gaussian centroids");
139 let mut gaussian_centroids = gaussians
140 .iter()
141 .map(|g| {
142 let mut centroid = Vec3::ZERO;
143 for gaussian in g.iter_gaussian() {
144 centroid += gaussian.pos;
145 }
146 centroid / g.len() as f32
147 })
148 .collect::<Vec<_>>();
149
150 log::debug!("Creating camera");
151 let camera = gs::Camera::new(0.1..1e4, 60f32.to_radians());
152
153 log::debug!("Creating viewer");
154 let mut viewer =
155 gs::MultiModelViewer::new(&device, config.view_formats[0]).expect("viewer");
156
157 let quat = Quat::from_axis_angle(Vec3::Z, 180f32.to_radians());
158 for (i, gaussians) in gaussians.iter().enumerate() {
159 let offset = model_offset * i as f32;
160
161 log::debug!("Pushing model {i}");
162
163 viewer.insert_model(&device, i, gaussians);
164 viewer
165 .update_model_transform(&queue, &i, offset, quat, Vec3::ONE)
166 .expect("update model");
167
168 gaussian_centroids[i] = quat.mul_vec3(gaussian_centroids[i]) + offset;
169 }
170
171 log::info!("System initialized");
172
173 Self {
174 surface,
175 device,
176 queue,
177 config,
178
179 camera,
180 gaussians,
181 gaussian_centroids,
182 viewer,
183 }
184 }examples/selection.rs (line 151)
96 async fn init(window: Arc<Window>, args: &Args) -> Self {
97 let model_path = &args.model;
98 let filter = args.filter;
99 let immediate = args.immediate;
100 let size = window.inner_size();
101
102 log::debug!("Creating wgpu instance");
103 let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor::default());
104
105 log::debug!("Creating window surface");
106 let surface = instance.create_surface(window.clone()).expect("surface");
107
108 log::debug!("Requesting adapter");
109 let adapter = instance
110 .request_adapter(&wgpu::RequestAdapterOptions {
111 power_preference: wgpu::PowerPreference::HighPerformance,
112 compatible_surface: Some(&surface),
113 force_fallback_adapter: false,
114 })
115 .await
116 .expect("adapter");
117
118 log::debug!("Requesting device");
119 let (device, queue) = adapter
120 .request_device(&wgpu::DeviceDescriptor {
121 label: Some("Device"),
122 required_limits: adapter.limits(),
123 ..Default::default()
124 })
125 .await
126 .expect("device");
127
128 let surface_caps = surface.get_capabilities(&adapter);
129 let surface_format = surface_caps.formats[0];
130 let config = wgpu::SurfaceConfiguration {
131 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
132 format: surface_format,
133 width: size.width.max(1),
134 height: size.height.max(1),
135 present_mode: surface_caps.present_modes[0],
136 alpha_mode: surface_caps.alpha_modes[0],
137 view_formats: vec![surface_format.remove_srgb_suffix()],
138 desired_maximum_frame_latency: 2,
139 };
140
141 log::debug!("Configuring surface");
142 surface.configure(&device, &config);
143
144 log::debug!("Creating gaussians");
145 let gaussians = [GaussiansSource::Ply, GaussiansSource::Spz]
146 .into_iter()
147 .find_map(|source| gs::core::Gaussians::read_from_file(model_path, source).ok())
148 .expect("gaussians");
149
150 log::debug!("Creating camera");
151 let camera = gs::Camera::new(0.1..1e4, 60f32.to_radians());
152
153 log::debug!("Creating viewer");
154 let mut viewer = gs::Viewer::new_with_options(
155 &device,
156 config.view_formats[0],
157 &gaussians,
158 gs::ViewerCreateOptions {
159 gaussians_buffer_usage:
160 gs::core::GaussiansBuffer::<gs::DefaultGaussianPod>::DEFAULT_USAGES
161 | wgpu::BufferUsages::COPY_SRC,
162 ..Default::default()
163 },
164 )
165 .expect("viewer");
166 viewer.update_model_transform(
167 &queue,
168 Vec3::ZERO,
169 Quat::from_axis_angle(Vec3::Z, 180f32.to_radians()),
170 Vec3::ONE,
171 );
172
173 log::debug!("Creating selector");
174 let mut selector = gs::selection::ViewportSelector::new(
175 &device,
176 &queue,
177 UVec2::new(size.width, size.height),
178 &viewer.camera_buffer,
179 )
180 .expect("selector");
181 selector.selector_type = gs::selection::ViewportSelectorType::Brush;
182
183 log::debug!("Creating selection viewport selection modifier");
184 let mut viewport_selection_modifier = gs::editor::NonDestructiveModifier::new(
185 &device,
186 &queue,
187 gs::editor::BasicSelectionModifier::new_with_basic_modifier(
188 &device,
189 &viewer.gaussians_buffer,
190 &viewer.model_transform_buffer,
191 &viewer.gaussian_transform_buffer,
192 vec![gs::selection::create_viewport_bundle::<
193 gs::DefaultGaussianPod,
194 >(&device)],
195 ),
196 &viewer.gaussians_buffer,
197 )
198 .expect("modifier");
199
200 let viewport_selection_bind_group = viewport_selection_modifier.modifier.selection.bundles
201 [0]
202 .create_bind_group(
203 &device,
204 // index 0 is the Gaussians buffer, so we use 1,
205 // see docs of create_viewport_bundle
206 1,
207 [
208 viewer.camera_buffer.buffer().as_entire_binding(),
209 wgpu::BindingResource::TextureView(selector.texture().view()),
210 ],
211 )
212 .expect("bind group");
213
214 viewport_selection_modifier.modifier.selection_expr =
215 gs::editor::SelectionExpr::Selection(0, vec![viewport_selection_bind_group]);
216
217 viewport_selection_modifier // Non destructive modifier
218 .modifier // Selection modifier
219 .modifier // Basic modifier
220 .basic_color_modifiers_buffer
221 .update_with_pod(
222 &queue,
223 &gs::editor::BasicColorModifiersPod {
224 rgb_or_hsv: BasicColorRgbOverrideOrHsvModifiersPod::new_rgb_override(
225 Vec3::new(1.0, 1.0, 0.0),
226 ),
227 ..Default::default()
228 },
229 );
230
231 log::debug!("Creating selection viewport texture overlay renderer");
232 let viewport_texture_overlay_renderer =
233 utils::selection::ViewportTextureOverlayRenderer::new(
234 &device,
235 config.view_formats[0],
236 selector.texture(),
237 );
238
239 log::info!("System initialized");
240
241 Self {
242 surface,
243 device,
244 queue,
245 config,
246
247 filter,
248 immediate,
249 inverted: filter,
250 selector_type: None,
251
252 camera,
253 gaussians,
254 viewer,
255 selector,
256
257 viewport_selection_modifier,
258 viewport_texture_overlay_renderer,
259 }
260 }Sourcepub fn move_by(&mut self, forward: f32, right: f32)
pub fn move_by(&mut self, forward: f32, right: f32)
Move the camera.
Examples found in repository?
examples/selection.rs (line 472)
452 fn update_movement(&mut self, input: &core::Input, delta_time: f32) {
453 // Camera movement
454 const SPEED: f32 = 1.0;
455
456 let mut forward = 0.0;
457 if input.held_keys.contains(&KeyCode::KeyW) {
458 forward += SPEED * delta_time;
459 }
460 if input.held_keys.contains(&KeyCode::KeyS) {
461 forward -= SPEED * delta_time;
462 }
463
464 let mut right = 0.0;
465 if input.held_keys.contains(&KeyCode::KeyD) {
466 right += SPEED * delta_time;
467 }
468 if input.held_keys.contains(&KeyCode::KeyA) {
469 right -= SPEED * delta_time;
470 }
471
472 self.camera.move_by(forward, right);
473
474 let mut up = 0.0;
475 if input.held_keys.contains(&KeyCode::Space) {
476 up += SPEED * delta_time;
477 }
478 if input.held_keys.contains(&KeyCode::ShiftLeft) {
479 up -= SPEED * delta_time;
480 }
481
482 self.camera.move_up(up);
483
484 // Camera rotation
485 const SENSITIVITY: f32 = 0.15;
486
487 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
488 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
489
490 self.camera.pitch_by(-pitch);
491 self.camera.yaw_by(-yaw);
492 }More examples
examples/multi_model.rs (line 205)
186 fn update(&mut self, input: &core::Input, delta_time: f32) {
187 const SPEED: f32 = 1.0;
188
189 let mut forward = 0.0;
190 if input.held_keys.contains(&KeyCode::KeyW) {
191 forward += SPEED * delta_time;
192 }
193 if input.held_keys.contains(&KeyCode::KeyS) {
194 forward -= SPEED * delta_time;
195 }
196
197 let mut right = 0.0;
198 if input.held_keys.contains(&KeyCode::KeyD) {
199 right += SPEED * delta_time;
200 }
201 if input.held_keys.contains(&KeyCode::KeyA) {
202 right -= SPEED * delta_time;
203 }
204
205 self.camera.move_by(forward, right);
206
207 let mut up = 0.0;
208 if input.held_keys.contains(&KeyCode::Space) {
209 up += SPEED * delta_time;
210 }
211 if input.held_keys.contains(&KeyCode::ShiftLeft) {
212 up -= SPEED * delta_time;
213 }
214
215 self.camera.move_up(up);
216
217 // Camera rotation
218 const SENSITIVITY: f32 = 0.15;
219
220 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
221 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
222
223 self.camera.pitch_by(-pitch);
224 self.camera.yaw_by(-yaw);
225
226 // Update the viewer
227 self.viewer.update_camera(
228 &self.queue,
229 &self.camera,
230 uvec2(self.config.width, self.config.height),
231 );
232 }examples/simple.rs (line 222)
202 fn update(&mut self, input: &core::Input, delta_time: f32) {
203 // Camera movement
204 const SPEED: f32 = 1.0;
205
206 let mut forward = 0.0;
207 if input.held_keys.contains(&KeyCode::KeyW) {
208 forward += SPEED * delta_time;
209 }
210 if input.held_keys.contains(&KeyCode::KeyS) {
211 forward -= SPEED * delta_time;
212 }
213
214 let mut right = 0.0;
215 if input.held_keys.contains(&KeyCode::KeyD) {
216 right += SPEED * delta_time;
217 }
218 if input.held_keys.contains(&KeyCode::KeyA) {
219 right -= SPEED * delta_time;
220 }
221
222 self.camera.move_by(forward, right);
223
224 let mut up = 0.0;
225 if input.held_keys.contains(&KeyCode::Space) {
226 up += SPEED * delta_time;
227 }
228 if input.held_keys.contains(&KeyCode::ShiftLeft) {
229 up -= SPEED * delta_time;
230 }
231
232 self.camera.move_up(up);
233
234 // Camera rotation
235 const SENSITIVITY: f32 = 0.15;
236
237 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
238 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
239
240 self.camera.pitch_by(-pitch);
241 self.camera.yaw_by(-yaw);
242
243 // Update the viewer
244 self.viewer.update_camera(
245 &self.queue,
246 &self.camera,
247 uvec2(self.config.width, self.config.height),
248 );
249 }Sourcepub fn move_up(&mut self, up: f32)
pub fn move_up(&mut self, up: f32)
Move the camera forward.
Examples found in repository?
examples/selection.rs (line 482)
452 fn update_movement(&mut self, input: &core::Input, delta_time: f32) {
453 // Camera movement
454 const SPEED: f32 = 1.0;
455
456 let mut forward = 0.0;
457 if input.held_keys.contains(&KeyCode::KeyW) {
458 forward += SPEED * delta_time;
459 }
460 if input.held_keys.contains(&KeyCode::KeyS) {
461 forward -= SPEED * delta_time;
462 }
463
464 let mut right = 0.0;
465 if input.held_keys.contains(&KeyCode::KeyD) {
466 right += SPEED * delta_time;
467 }
468 if input.held_keys.contains(&KeyCode::KeyA) {
469 right -= SPEED * delta_time;
470 }
471
472 self.camera.move_by(forward, right);
473
474 let mut up = 0.0;
475 if input.held_keys.contains(&KeyCode::Space) {
476 up += SPEED * delta_time;
477 }
478 if input.held_keys.contains(&KeyCode::ShiftLeft) {
479 up -= SPEED * delta_time;
480 }
481
482 self.camera.move_up(up);
483
484 // Camera rotation
485 const SENSITIVITY: f32 = 0.15;
486
487 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
488 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
489
490 self.camera.pitch_by(-pitch);
491 self.camera.yaw_by(-yaw);
492 }More examples
examples/multi_model.rs (line 215)
186 fn update(&mut self, input: &core::Input, delta_time: f32) {
187 const SPEED: f32 = 1.0;
188
189 let mut forward = 0.0;
190 if input.held_keys.contains(&KeyCode::KeyW) {
191 forward += SPEED * delta_time;
192 }
193 if input.held_keys.contains(&KeyCode::KeyS) {
194 forward -= SPEED * delta_time;
195 }
196
197 let mut right = 0.0;
198 if input.held_keys.contains(&KeyCode::KeyD) {
199 right += SPEED * delta_time;
200 }
201 if input.held_keys.contains(&KeyCode::KeyA) {
202 right -= SPEED * delta_time;
203 }
204
205 self.camera.move_by(forward, right);
206
207 let mut up = 0.0;
208 if input.held_keys.contains(&KeyCode::Space) {
209 up += SPEED * delta_time;
210 }
211 if input.held_keys.contains(&KeyCode::ShiftLeft) {
212 up -= SPEED * delta_time;
213 }
214
215 self.camera.move_up(up);
216
217 // Camera rotation
218 const SENSITIVITY: f32 = 0.15;
219
220 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
221 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
222
223 self.camera.pitch_by(-pitch);
224 self.camera.yaw_by(-yaw);
225
226 // Update the viewer
227 self.viewer.update_camera(
228 &self.queue,
229 &self.camera,
230 uvec2(self.config.width, self.config.height),
231 );
232 }examples/simple.rs (line 232)
202 fn update(&mut self, input: &core::Input, delta_time: f32) {
203 // Camera movement
204 const SPEED: f32 = 1.0;
205
206 let mut forward = 0.0;
207 if input.held_keys.contains(&KeyCode::KeyW) {
208 forward += SPEED * delta_time;
209 }
210 if input.held_keys.contains(&KeyCode::KeyS) {
211 forward -= SPEED * delta_time;
212 }
213
214 let mut right = 0.0;
215 if input.held_keys.contains(&KeyCode::KeyD) {
216 right += SPEED * delta_time;
217 }
218 if input.held_keys.contains(&KeyCode::KeyA) {
219 right -= SPEED * delta_time;
220 }
221
222 self.camera.move_by(forward, right);
223
224 let mut up = 0.0;
225 if input.held_keys.contains(&KeyCode::Space) {
226 up += SPEED * delta_time;
227 }
228 if input.held_keys.contains(&KeyCode::ShiftLeft) {
229 up -= SPEED * delta_time;
230 }
231
232 self.camera.move_up(up);
233
234 // Camera rotation
235 const SENSITIVITY: f32 = 0.15;
236
237 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
238 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
239
240 self.camera.pitch_by(-pitch);
241 self.camera.yaw_by(-yaw);
242
243 // Update the viewer
244 self.viewer.update_camera(
245 &self.queue,
246 &self.camera,
247 uvec2(self.config.width, self.config.height),
248 );
249 }Sourcepub fn pitch_by(&mut self, delta: f32)
pub fn pitch_by(&mut self, delta: f32)
Apply pitch.
Examples found in repository?
examples/selection.rs (line 490)
452 fn update_movement(&mut self, input: &core::Input, delta_time: f32) {
453 // Camera movement
454 const SPEED: f32 = 1.0;
455
456 let mut forward = 0.0;
457 if input.held_keys.contains(&KeyCode::KeyW) {
458 forward += SPEED * delta_time;
459 }
460 if input.held_keys.contains(&KeyCode::KeyS) {
461 forward -= SPEED * delta_time;
462 }
463
464 let mut right = 0.0;
465 if input.held_keys.contains(&KeyCode::KeyD) {
466 right += SPEED * delta_time;
467 }
468 if input.held_keys.contains(&KeyCode::KeyA) {
469 right -= SPEED * delta_time;
470 }
471
472 self.camera.move_by(forward, right);
473
474 let mut up = 0.0;
475 if input.held_keys.contains(&KeyCode::Space) {
476 up += SPEED * delta_time;
477 }
478 if input.held_keys.contains(&KeyCode::ShiftLeft) {
479 up -= SPEED * delta_time;
480 }
481
482 self.camera.move_up(up);
483
484 // Camera rotation
485 const SENSITIVITY: f32 = 0.15;
486
487 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
488 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
489
490 self.camera.pitch_by(-pitch);
491 self.camera.yaw_by(-yaw);
492 }More examples
examples/multi_model.rs (line 223)
186 fn update(&mut self, input: &core::Input, delta_time: f32) {
187 const SPEED: f32 = 1.0;
188
189 let mut forward = 0.0;
190 if input.held_keys.contains(&KeyCode::KeyW) {
191 forward += SPEED * delta_time;
192 }
193 if input.held_keys.contains(&KeyCode::KeyS) {
194 forward -= SPEED * delta_time;
195 }
196
197 let mut right = 0.0;
198 if input.held_keys.contains(&KeyCode::KeyD) {
199 right += SPEED * delta_time;
200 }
201 if input.held_keys.contains(&KeyCode::KeyA) {
202 right -= SPEED * delta_time;
203 }
204
205 self.camera.move_by(forward, right);
206
207 let mut up = 0.0;
208 if input.held_keys.contains(&KeyCode::Space) {
209 up += SPEED * delta_time;
210 }
211 if input.held_keys.contains(&KeyCode::ShiftLeft) {
212 up -= SPEED * delta_time;
213 }
214
215 self.camera.move_up(up);
216
217 // Camera rotation
218 const SENSITIVITY: f32 = 0.15;
219
220 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
221 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
222
223 self.camera.pitch_by(-pitch);
224 self.camera.yaw_by(-yaw);
225
226 // Update the viewer
227 self.viewer.update_camera(
228 &self.queue,
229 &self.camera,
230 uvec2(self.config.width, self.config.height),
231 );
232 }examples/simple.rs (line 240)
202 fn update(&mut self, input: &core::Input, delta_time: f32) {
203 // Camera movement
204 const SPEED: f32 = 1.0;
205
206 let mut forward = 0.0;
207 if input.held_keys.contains(&KeyCode::KeyW) {
208 forward += SPEED * delta_time;
209 }
210 if input.held_keys.contains(&KeyCode::KeyS) {
211 forward -= SPEED * delta_time;
212 }
213
214 let mut right = 0.0;
215 if input.held_keys.contains(&KeyCode::KeyD) {
216 right += SPEED * delta_time;
217 }
218 if input.held_keys.contains(&KeyCode::KeyA) {
219 right -= SPEED * delta_time;
220 }
221
222 self.camera.move_by(forward, right);
223
224 let mut up = 0.0;
225 if input.held_keys.contains(&KeyCode::Space) {
226 up += SPEED * delta_time;
227 }
228 if input.held_keys.contains(&KeyCode::ShiftLeft) {
229 up -= SPEED * delta_time;
230 }
231
232 self.camera.move_up(up);
233
234 // Camera rotation
235 const SENSITIVITY: f32 = 0.15;
236
237 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
238 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
239
240 self.camera.pitch_by(-pitch);
241 self.camera.yaw_by(-yaw);
242
243 // Update the viewer
244 self.viewer.update_camera(
245 &self.queue,
246 &self.camera,
247 uvec2(self.config.width, self.config.height),
248 );
249 }Sourcepub fn yaw_by(&mut self, delta: f32)
pub fn yaw_by(&mut self, delta: f32)
Apply yaw.
Examples found in repository?
examples/selection.rs (line 491)
452 fn update_movement(&mut self, input: &core::Input, delta_time: f32) {
453 // Camera movement
454 const SPEED: f32 = 1.0;
455
456 let mut forward = 0.0;
457 if input.held_keys.contains(&KeyCode::KeyW) {
458 forward += SPEED * delta_time;
459 }
460 if input.held_keys.contains(&KeyCode::KeyS) {
461 forward -= SPEED * delta_time;
462 }
463
464 let mut right = 0.0;
465 if input.held_keys.contains(&KeyCode::KeyD) {
466 right += SPEED * delta_time;
467 }
468 if input.held_keys.contains(&KeyCode::KeyA) {
469 right -= SPEED * delta_time;
470 }
471
472 self.camera.move_by(forward, right);
473
474 let mut up = 0.0;
475 if input.held_keys.contains(&KeyCode::Space) {
476 up += SPEED * delta_time;
477 }
478 if input.held_keys.contains(&KeyCode::ShiftLeft) {
479 up -= SPEED * delta_time;
480 }
481
482 self.camera.move_up(up);
483
484 // Camera rotation
485 const SENSITIVITY: f32 = 0.15;
486
487 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
488 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
489
490 self.camera.pitch_by(-pitch);
491 self.camera.yaw_by(-yaw);
492 }More examples
examples/multi_model.rs (line 224)
186 fn update(&mut self, input: &core::Input, delta_time: f32) {
187 const SPEED: f32 = 1.0;
188
189 let mut forward = 0.0;
190 if input.held_keys.contains(&KeyCode::KeyW) {
191 forward += SPEED * delta_time;
192 }
193 if input.held_keys.contains(&KeyCode::KeyS) {
194 forward -= SPEED * delta_time;
195 }
196
197 let mut right = 0.0;
198 if input.held_keys.contains(&KeyCode::KeyD) {
199 right += SPEED * delta_time;
200 }
201 if input.held_keys.contains(&KeyCode::KeyA) {
202 right -= SPEED * delta_time;
203 }
204
205 self.camera.move_by(forward, right);
206
207 let mut up = 0.0;
208 if input.held_keys.contains(&KeyCode::Space) {
209 up += SPEED * delta_time;
210 }
211 if input.held_keys.contains(&KeyCode::ShiftLeft) {
212 up -= SPEED * delta_time;
213 }
214
215 self.camera.move_up(up);
216
217 // Camera rotation
218 const SENSITIVITY: f32 = 0.15;
219
220 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
221 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
222
223 self.camera.pitch_by(-pitch);
224 self.camera.yaw_by(-yaw);
225
226 // Update the viewer
227 self.viewer.update_camera(
228 &self.queue,
229 &self.camera,
230 uvec2(self.config.width, self.config.height),
231 );
232 }examples/simple.rs (line 241)
202 fn update(&mut self, input: &core::Input, delta_time: f32) {
203 // Camera movement
204 const SPEED: f32 = 1.0;
205
206 let mut forward = 0.0;
207 if input.held_keys.contains(&KeyCode::KeyW) {
208 forward += SPEED * delta_time;
209 }
210 if input.held_keys.contains(&KeyCode::KeyS) {
211 forward -= SPEED * delta_time;
212 }
213
214 let mut right = 0.0;
215 if input.held_keys.contains(&KeyCode::KeyD) {
216 right += SPEED * delta_time;
217 }
218 if input.held_keys.contains(&KeyCode::KeyA) {
219 right -= SPEED * delta_time;
220 }
221
222 self.camera.move_by(forward, right);
223
224 let mut up = 0.0;
225 if input.held_keys.contains(&KeyCode::Space) {
226 up += SPEED * delta_time;
227 }
228 if input.held_keys.contains(&KeyCode::ShiftLeft) {
229 up -= SPEED * delta_time;
230 }
231
232 self.camera.move_up(up);
233
234 // Camera rotation
235 const SENSITIVITY: f32 = 0.15;
236
237 let yaw = input.mouse_diff.x * SENSITIVITY * delta_time;
238 let pitch = input.mouse_diff.y * SENSITIVITY * delta_time;
239
240 self.camera.pitch_by(-pitch);
241 self.camera.yaw_by(-yaw);
242
243 // Update the viewer
244 self.viewer.update_camera(
245 &self.queue,
246 &self.camera,
247 uvec2(self.config.width, self.config.height),
248 );
249 }Sourcepub fn get_forward(&self) -> Vec3
pub fn get_forward(&self) -> Vec3
Get the forward vector.
Trait Implementations§
Auto Trait Implementations§
impl Freeze for Camera
impl RefUnwindSafe for Camera
impl Send for Camera
impl Sync for Camera
impl Unpin for Camera
impl UnsafeUnpin for Camera
impl UnwindSafe for Camera
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more