1use crate::device::GpuContext;
2use threecrate_core::{PointCloud, Point3f, ColoredPoint3f, Error, Result};
3use nalgebra::{Matrix4, Vector3};
4use bytemuck::{Pod, Zeroable};
5use wgpu::util::DeviceExt;
6use winit::window::Window;
7
8#[repr(C)]
10#[derive(Copy, Clone, Pod, Zeroable)]
11pub struct PointVertex {
12 pub position: [f32; 3],
13 pub color: [f32; 3],
14 pub size: f32,
15 pub normal: [f32; 3],
16}
17
18impl PointVertex {
19 pub fn from_point(point: &Point3f, color: [f32; 3], size: f32, normal: [f32; 3]) -> Self {
21 Self {
22 position: [point.x, point.y, point.z],
23 color,
24 size,
25 normal,
26 }
27 }
28
29 pub fn from_colored_point(point: &ColoredPoint3f, size: f32, normal: [f32; 3]) -> Self {
31 Self {
32 position: [point.position.x, point.position.y, point.position.z],
33 color: [
34 point.color[0] as f32 / 255.0,
35 point.color[1] as f32 / 255.0,
36 point.color[2] as f32 / 255.0,
37 ],
38 size,
39 normal,
40 }
41 }
42
43 pub fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {
45 wgpu::VertexBufferLayout {
46 array_stride: std::mem::size_of::<PointVertex>() as wgpu::BufferAddress,
47 step_mode: wgpu::VertexStepMode::Vertex,
48 attributes: &[
49 wgpu::VertexAttribute {
51 offset: 0,
52 shader_location: 0,
53 format: wgpu::VertexFormat::Float32x3,
54 },
55 wgpu::VertexAttribute {
57 offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
58 shader_location: 1,
59 format: wgpu::VertexFormat::Float32x3,
60 },
61 wgpu::VertexAttribute {
63 offset: std::mem::size_of::<[f32; 6]>() as wgpu::BufferAddress,
64 shader_location: 2,
65 format: wgpu::VertexFormat::Float32,
66 },
67 wgpu::VertexAttribute {
69 offset: std::mem::size_of::<[f32; 7]>() as wgpu::BufferAddress,
70 shader_location: 3,
71 format: wgpu::VertexFormat::Float32x3,
72 },
73 ],
74 }
75 }
76}
77
78#[repr(C)]
80#[derive(Copy, Clone, Pod, Zeroable)]
81pub struct CameraUniform {
82 pub view_proj: [[f32; 4]; 4],
83 pub view_pos: [f32; 3],
84 pub _padding: f32,
85}
86
87#[repr(C)]
89#[derive(Copy, Clone, Debug, Pod, Zeroable)]
90pub struct RenderParams {
91 pub point_size: f32,
92 pub alpha_threshold: f32,
93 pub enable_splatting: f32,
94 pub enable_lighting: f32,
95 pub ambient_strength: f32,
96 pub diffuse_strength: f32,
97 pub specular_strength: f32,
98 pub shininess: f32,
99}
100
101impl Default for RenderParams {
102 fn default() -> Self {
103 Self {
104 point_size: 4.0,
105 alpha_threshold: 0.1,
106 enable_splatting: 1.0,
107 enable_lighting: 1.0,
108 ambient_strength: 0.3,
109 diffuse_strength: 0.7,
110 specular_strength: 0.5,
111 shininess: 32.0,
112 }
113 }
114}
115
116#[derive(Debug, Clone)]
118pub struct RenderConfig {
119 pub render_params: RenderParams,
120 pub background_color: [f64; 4],
121 pub enable_depth_test: bool,
122 pub enable_alpha_blending: bool,
123 pub enable_multisampling: bool,
124}
125
126impl Default for RenderConfig {
127 fn default() -> Self {
128 Self {
129 render_params: RenderParams::default(),
130 background_color: [0.1, 0.1, 0.1, 1.0],
131 enable_depth_test: true,
132 enable_alpha_blending: true,
133 enable_multisampling: true,
134 }
135 }
136}
137
138pub struct PointCloudRenderer<'window> {
140 pub gpu_context: GpuContext,
141 pub surface: wgpu::Surface<'window>,
142 pub surface_config: wgpu::SurfaceConfiguration,
143 pub render_pipeline: wgpu::RenderPipeline,
144 pub camera_uniform: CameraUniform,
145 pub camera_buffer: wgpu::Buffer,
146 pub render_params: RenderParams,
147 pub render_params_buffer: wgpu::Buffer,
148 pub bind_group: wgpu::BindGroup,
149 pub bind_group_layout: wgpu::BindGroupLayout,
150 pub config: RenderConfig,
151 pub msaa_texture: Option<wgpu::Texture>,
152 pub msaa_view: Option<wgpu::TextureView>,
153}
154
155impl<'window> PointCloudRenderer<'window> {
156 pub async fn new(window: &'window Window, config: RenderConfig) -> Result<Self> {
158 let gpu_context = GpuContext::new().await?;
159
160 let surface = gpu_context.instance.create_surface(window)
161 .map_err(|e| Error::Gpu(format!("Failed to create surface: {:?}", e)))?;
162
163 let surface_caps = surface.get_capabilities(&gpu_context.adapter);
164 let surface_format = surface_caps.formats.iter()
165 .copied()
166 .find(|f| f.is_srgb())
167 .unwrap_or(surface_caps.formats[0]);
168
169 let size = window.inner_size();
170 let sample_count = if config.enable_multisampling { 4 } else { 1 };
171
172 let surface_config = wgpu::SurfaceConfiguration {
173 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
174 format: surface_format,
175 width: size.width,
176 height: size.height,
177 present_mode: surface_caps.present_modes[0],
178 alpha_mode: surface_caps.alpha_modes[0],
179 view_formats: vec![],
180 desired_maximum_frame_latency: 2,
181 };
182 surface.configure(&gpu_context.device, &surface_config);
183
184 let (msaa_texture, msaa_view) = if config.enable_multisampling {
186 let msaa_texture = gpu_context.device.create_texture(&wgpu::TextureDescriptor {
187 label: Some("MSAA Texture"),
188 size: wgpu::Extent3d {
189 width: size.width,
190 height: size.height,
191 depth_or_array_layers: 1,
192 },
193 mip_level_count: 1,
194 sample_count,
195 dimension: wgpu::TextureDimension::D2,
196 format: surface_format,
197 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
198 view_formats: &[],
199 });
200 let msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default());
201 (Some(msaa_texture), Some(msaa_view))
202 } else {
203 (None, None)
204 };
205
206 let camera_uniform = CameraUniform {
208 view_proj: Matrix4::identity().into(),
209 view_pos: [0.0, 0.0, 0.0],
210 _padding: 0.0,
211 };
212
213 let camera_buffer = gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
214 label: Some("Camera Buffer"),
215 contents: bytemuck::bytes_of(&camera_uniform),
216 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
217 });
218
219 let render_params = config.render_params;
221 let render_params_buffer = gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
222 label: Some("Render Params Buffer"),
223 contents: bytemuck::bytes_of(&render_params),
224 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
225 });
226
227 let bind_group_layout = gpu_context.device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
229 entries: &[
230 wgpu::BindGroupLayoutEntry {
231 binding: 0,
232 visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
233 ty: wgpu::BindingType::Buffer {
234 ty: wgpu::BufferBindingType::Uniform,
235 has_dynamic_offset: false,
236 min_binding_size: None,
237 },
238 count: None,
239 },
240 wgpu::BindGroupLayoutEntry {
241 binding: 1,
242 visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
243 ty: wgpu::BindingType::Buffer {
244 ty: wgpu::BufferBindingType::Uniform,
245 has_dynamic_offset: false,
246 min_binding_size: None,
247 },
248 count: None,
249 },
250 ],
251 label: Some("point_cloud_bind_group_layout"),
252 });
253
254 let bind_group = gpu_context.device.create_bind_group(&wgpu::BindGroupDescriptor {
255 layout: &bind_group_layout,
256 entries: &[
257 wgpu::BindGroupEntry {
258 binding: 0,
259 resource: camera_buffer.as_entire_binding(),
260 },
261 wgpu::BindGroupEntry {
262 binding: 1,
263 resource: render_params_buffer.as_entire_binding(),
264 },
265 ],
266 label: Some("point_cloud_bind_group"),
267 });
268
269 let shader = gpu_context.device.create_shader_module(wgpu::ShaderModuleDescriptor {
271 label: Some("Point Cloud Shader"),
272 source: wgpu::ShaderSource::Wgsl(include_str!("shaders/point_cloud.wgsl").into()),
273 });
274
275 let render_pipeline_layout = gpu_context.device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
276 label: Some("Point Cloud Render Pipeline Layout"),
277 bind_group_layouts: &[&bind_group_layout],
278 push_constant_ranges: &[],
279 });
280
281 let render_pipeline = gpu_context.device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
282 label: Some("Point Cloud Render Pipeline"),
283 layout: Some(&render_pipeline_layout),
284 vertex: wgpu::VertexState {
285 module: &shader,
286 entry_point: "vs_main",
287 buffers: &[PointVertex::desc()],
288 compilation_options: wgpu::PipelineCompilationOptions::default(),
289 },
290 fragment: Some(wgpu::FragmentState {
291 module: &shader,
292 entry_point: "fs_main",
293 targets: &[Some(wgpu::ColorTargetState {
294 format: surface_config.format,
295 blend: if config.enable_alpha_blending {
296 Some(wgpu::BlendState::ALPHA_BLENDING)
297 } else {
298 Some(wgpu::BlendState::REPLACE)
299 },
300 write_mask: wgpu::ColorWrites::ALL,
301 })],
302 compilation_options: wgpu::PipelineCompilationOptions::default(),
303 }),
304 primitive: wgpu::PrimitiveState {
305 topology: wgpu::PrimitiveTopology::TriangleList,
306 strip_index_format: None,
307 front_face: wgpu::FrontFace::Ccw,
308 cull_mode: None,
309 unclipped_depth: false,
310 polygon_mode: wgpu::PolygonMode::Fill,
311 conservative: false,
312 },
313 depth_stencil: if config.enable_depth_test {
314 Some(wgpu::DepthStencilState {
315 format: wgpu::TextureFormat::Depth32Float,
316 depth_write_enabled: true,
317 depth_compare: wgpu::CompareFunction::Less,
318 stencil: wgpu::StencilState::default(),
319 bias: wgpu::DepthBiasState::default(),
320 })
321 } else {
322 None
323 },
324 multisample: wgpu::MultisampleState {
325 count: sample_count,
326 mask: !0,
327 alpha_to_coverage_enabled: false,
328 },
329 multiview: None,
330 });
331
332 Ok(Self {
333 gpu_context,
334 surface,
335 surface_config,
336 render_pipeline,
337 camera_uniform,
338 camera_buffer,
339 render_params,
340 render_params_buffer,
341 bind_group,
342 bind_group_layout,
343 config,
344 msaa_texture,
345 msaa_view,
346 })
347 }
348
349 pub fn update_camera(&mut self, view_matrix: Matrix4<f32>, proj_matrix: Matrix4<f32>, camera_pos: Vector3<f32>) {
351 self.camera_uniform.view_proj = (proj_matrix * view_matrix).into();
352 self.camera_uniform.view_pos = camera_pos.into();
353
354 self.gpu_context.queue.write_buffer(
355 &self.camera_buffer,
356 0,
357 bytemuck::bytes_of(&self.camera_uniform),
358 );
359 }
360
361 pub fn update_render_params(&mut self, params: RenderParams) {
363 self.render_params = params;
364 self.gpu_context.queue.write_buffer(
365 &self.render_params_buffer,
366 0,
367 bytemuck::bytes_of(&self.render_params),
368 );
369 }
370
371 pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
373 self.surface_config.width = new_size.width;
374 self.surface_config.height = new_size.height;
375 self.surface.configure(&self.gpu_context.device, &self.surface_config);
376
377 if self.config.enable_multisampling {
379 let msaa_texture = self.gpu_context.device.create_texture(&wgpu::TextureDescriptor {
380 label: Some("MSAA Texture"),
381 size: wgpu::Extent3d {
382 width: new_size.width,
383 height: new_size.height,
384 depth_or_array_layers: 1,
385 },
386 mip_level_count: 1,
387 sample_count: 4,
388 dimension: wgpu::TextureDimension::D2,
389 format: self.surface_config.format,
390 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
391 view_formats: &[],
392 });
393 let msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default());
394 self.msaa_texture = Some(msaa_texture);
395 self.msaa_view = Some(msaa_view);
396 }
397 }
398
399 pub fn create_vertex_buffer(&self, vertices: &[PointVertex]) -> wgpu::Buffer {
401 self.gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
402 label: Some("Point Cloud Vertex Buffer"),
403 contents: bytemuck::cast_slice(vertices),
404 usage: wgpu::BufferUsages::VERTEX,
405 })
406 }
407
408 pub fn create_depth_texture(&self) -> wgpu::Texture {
410 let sample_count = if self.config.enable_multisampling { 4 } else { 1 };
411
412 self.gpu_context.device.create_texture(&wgpu::TextureDescriptor {
413 label: Some("Depth Texture"),
414 size: wgpu::Extent3d {
415 width: self.surface_config.width,
416 height: self.surface_config.height,
417 depth_or_array_layers: 1,
418 },
419 mip_level_count: 1,
420 sample_count,
421 dimension: wgpu::TextureDimension::D2,
422 format: wgpu::TextureFormat::Depth32Float,
423 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
424 view_formats: &[],
425 })
426 }
427
428 pub fn render(&self, vertices: &[PointVertex]) -> Result<()> {
430 let vertex_buffer = self.create_vertex_buffer(vertices);
431 let depth_texture = self.create_depth_texture();
432 let depth_view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default());
433
434 let output = self.surface.get_current_texture()
435 .map_err(|e| Error::Gpu(format!("Failed to get surface texture: {:?}", e)))?;
436
437 let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default());
438
439 let mut encoder = self.gpu_context.device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
440 label: Some("Point Cloud Render Encoder"),
441 });
442
443 let (color_attachment, resolve_target) = if let Some(ref msaa_view) = self.msaa_view {
445 (msaa_view, Some(&view))
446 } else {
447 (&view, None)
448 };
449
450 {
451 let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
452 label: Some("Point Cloud Render Pass"),
453 color_attachments: &[Some(wgpu::RenderPassColorAttachment {
454 view: color_attachment,
455 resolve_target,
456 ops: wgpu::Operations {
457 load: wgpu::LoadOp::Clear(wgpu::Color {
458 r: self.config.background_color[0],
459 g: self.config.background_color[1],
460 b: self.config.background_color[2],
461 a: self.config.background_color[3],
462 }),
463 store: wgpu::StoreOp::Store,
464 },
465 })],
466 depth_stencil_attachment: if self.config.enable_depth_test {
467 Some(wgpu::RenderPassDepthStencilAttachment {
468 view: &depth_view,
469 depth_ops: Some(wgpu::Operations {
470 load: wgpu::LoadOp::Clear(1.0),
471 store: wgpu::StoreOp::Store,
472 }),
473 stencil_ops: None,
474 })
475 } else {
476 None
477 },
478 timestamp_writes: None,
479 occlusion_query_set: None,
480 });
481
482 render_pass.set_pipeline(&self.render_pipeline);
483 render_pass.set_bind_group(0, &self.bind_group, &[]);
484 render_pass.set_vertex_buffer(0, vertex_buffer.slice(..));
485 render_pass.draw(0..vertices.len() as u32, 0..1);
486 }
487
488 self.gpu_context.queue.submit(std::iter::once(encoder.finish()));
489 output.present();
490
491 Ok(())
492 }
493}
494
495pub fn point_cloud_to_vertices(point_cloud: &PointCloud<Point3f>, color: [f32; 3], size: f32) -> Vec<PointVertex> {
497 let normals = estimate_point_normals(&point_cloud.points);
498 point_cloud.points.iter()
499 .zip(normals.iter())
500 .map(|(point, normal)| PointVertex::from_point(point, color, size, *normal))
501 .collect()
502}
503
504pub fn point_cloud_to_vertices_colored(point_cloud: &PointCloud<Point3f>, size: f32) -> Vec<PointVertex> {
506 let normals = estimate_point_normals(&point_cloud.points);
507 point_cloud.points.iter()
508 .zip(normals.iter())
509 .map(|(point, normal)| {
510 let color = [
512 (point.x * 0.5 + 0.5).clamp(0.0, 1.0),
513 (point.y * 0.5 + 0.5).clamp(0.0, 1.0),
514 (point.z * 0.5 + 0.5).clamp(0.0, 1.0),
515 ];
516
517 PointVertex::from_point(point, color, size, *normal)
518 })
519 .collect()
520}
521
522pub fn colored_point_cloud_to_vertices(point_cloud: &PointCloud<ColoredPoint3f>, size: f32) -> Vec<PointVertex> {
524 let positions: Vec<Point3f> = point_cloud.points.iter()
525 .map(|p| Point3f::new(p.position.x, p.position.y, p.position.z))
526 .collect();
527 let normals = estimate_point_normals(&positions);
528
529 point_cloud.points.iter()
530 .zip(normals.iter())
531 .map(|(point, normal)| PointVertex::from_colored_point(point, size, *normal))
532 .collect()
533}
534
535fn estimate_point_normals(points: &[Point3f]) -> Vec<[f32; 3]> {
537 let mut normals = vec![[0.0, 0.0, 1.0]; points.len()];
538
539 for (i, point) in points.iter().enumerate() {
540 let mut neighbors = Vec::new();
542 let search_radius = 0.1;
543
544 for (j, other_point) in points.iter().enumerate() {
545 if i != j {
546 let distance = ((point.x - other_point.x).powi(2) +
547 (point.y - other_point.y).powi(2) +
548 (point.z - other_point.z).powi(2)).sqrt();
549
550 if distance < search_radius && neighbors.len() < 10 {
551 neighbors.push(*other_point);
552 }
553 }
554 }
555
556 if neighbors.len() >= 3 {
557 let mut normal = estimate_normal_from_neighbors(point, &neighbors);
559
560 let length = (normal[0].powi(2) + normal[1].powi(2) + normal[2].powi(2)).sqrt();
562 if length > 0.0 {
563 normal[0] /= length;
564 normal[1] /= length;
565 normal[2] /= length;
566 }
567
568 normals[i] = normal;
569 }
570 }
571
572 normals
573}
574
575fn estimate_normal_from_neighbors(center: &Point3f, neighbors: &[Point3f]) -> [f32; 3] {
577 if neighbors.len() < 2 {
578 return [0.0, 0.0, 1.0];
579 }
580
581 let v1 = [
583 neighbors[0].x - center.x,
584 neighbors[0].y - center.y,
585 neighbors[0].z - center.z,
586 ];
587
588 let v2 = [
589 neighbors[1].x - center.x,
590 neighbors[1].y - center.y,
591 neighbors[1].z - center.z,
592 ];
593
594 let normal = [
596 v1[1] * v2[2] - v1[2] * v2[1],
597 v1[2] * v2[0] - v1[0] * v2[2],
598 v1[0] * v2[1] - v1[1] * v2[0],
599 ];
600
601 normal
602}
603
604