1use crate::device::GpuContext;
2use threecrate_core::{PointCloud, Point3f, ColoredPoint3f, Error, Result};
3use nalgebra::{Matrix4, Vector3};
4use bytemuck::{Pod, Zeroable};
5use wgpu::util::DeviceExt;
6use winit::window::Window;
7
8#[repr(C)]
10#[derive(Copy, Clone, Pod, Zeroable)]
11pub struct PointVertex {
12 pub position: [f32; 3],
13 pub color: [f32; 3],
14 pub size: f32,
15 pub normal: [f32; 3],
16}
17
18impl PointVertex {
19 pub fn from_point(point: &Point3f, color: [f32; 3], size: f32, normal: [f32; 3]) -> Self {
21 Self {
22 position: [point.x, point.y, point.z],
23 color,
24 size,
25 normal,
26 }
27 }
28
29 pub fn from_colored_point(point: &ColoredPoint3f, size: f32, normal: [f32; 3]) -> Self {
31 Self {
32 position: [point.position.x, point.position.y, point.position.z],
33 color: [
34 point.color[0] as f32 / 255.0,
35 point.color[1] as f32 / 255.0,
36 point.color[2] as f32 / 255.0,
37 ],
38 size,
39 normal,
40 }
41 }
42
43 pub fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {
45 wgpu::VertexBufferLayout {
46 array_stride: std::mem::size_of::<PointVertex>() as wgpu::BufferAddress,
47 step_mode: wgpu::VertexStepMode::Vertex,
48 attributes: &[
49 wgpu::VertexAttribute {
51 offset: 0,
52 shader_location: 0,
53 format: wgpu::VertexFormat::Float32x3,
54 },
55 wgpu::VertexAttribute {
57 offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
58 shader_location: 1,
59 format: wgpu::VertexFormat::Float32x3,
60 },
61 wgpu::VertexAttribute {
63 offset: std::mem::size_of::<[f32; 6]>() as wgpu::BufferAddress,
64 shader_location: 2,
65 format: wgpu::VertexFormat::Float32,
66 },
67 wgpu::VertexAttribute {
69 offset: std::mem::size_of::<[f32; 7]>() as wgpu::BufferAddress,
70 shader_location: 3,
71 format: wgpu::VertexFormat::Float32x3,
72 },
73 ],
74 }
75 }
76}
77
78#[repr(C)]
80#[derive(Copy, Clone, Pod, Zeroable)]
81pub struct CameraUniform {
82 pub view_proj: [[f32; 4]; 4],
83 pub view_pos: [f32; 3],
84 pub _padding: f32,
85}
86
87#[repr(C)]
89#[derive(Copy, Clone, Debug, Pod, Zeroable)]
90pub struct RenderParams {
91 pub point_size: f32,
92 pub alpha_threshold: f32,
93 pub enable_splatting: f32,
94 pub enable_lighting: f32,
95 pub ambient_strength: f32,
96 pub diffuse_strength: f32,
97 pub specular_strength: f32,
98 pub shininess: f32,
99}
100
101impl Default for RenderParams {
102 fn default() -> Self {
103 Self {
104 point_size: 4.0,
105 alpha_threshold: 0.1,
106 enable_splatting: 1.0,
107 enable_lighting: 1.0,
108 ambient_strength: 0.3,
109 diffuse_strength: 0.7,
110 specular_strength: 0.5,
111 shininess: 32.0,
112 }
113 }
114}
115
116#[derive(Debug, Clone)]
118pub struct RenderConfig {
119 pub render_params: RenderParams,
120 pub background_color: [f64; 4],
121 pub enable_depth_test: bool,
122 pub enable_alpha_blending: bool,
123 pub enable_multisampling: bool,
124}
125
126impl Default for RenderConfig {
127 fn default() -> Self {
128 Self {
129 render_params: RenderParams::default(),
130 background_color: [0.1, 0.1, 0.1, 1.0],
131 enable_depth_test: true,
132 enable_alpha_blending: true,
133 enable_multisampling: true,
134 }
135 }
136}
137
138pub struct PointCloudRenderer<'window> {
140 pub gpu_context: GpuContext,
141 pub surface: wgpu::Surface<'window>,
142 pub surface_config: wgpu::SurfaceConfiguration,
143 pub render_pipeline: wgpu::RenderPipeline,
144 pub camera_uniform: CameraUniform,
145 pub camera_buffer: wgpu::Buffer,
146 pub render_params: RenderParams,
147 pub render_params_buffer: wgpu::Buffer,
148 pub bind_group: wgpu::BindGroup,
149 pub bind_group_layout: wgpu::BindGroupLayout,
150 pub config: RenderConfig,
151 pub msaa_texture: Option<wgpu::Texture>,
152 pub msaa_view: Option<wgpu::TextureView>,
153}
154
155impl<'window> PointCloudRenderer<'window> {
156 pub async fn new(window: &'window Window, config: RenderConfig) -> Result<Self> {
158 let gpu_context = GpuContext::new().await?;
159
160 let surface = gpu_context.instance.create_surface(window)
161 .map_err(|e| Error::Gpu(format!("Failed to create surface: {:?}", e)))?;
162
163 let surface_caps = surface.get_capabilities(&gpu_context.adapter);
164 let surface_format = surface_caps.formats.iter()
165 .copied()
166 .find(|f| f.is_srgb())
167 .unwrap_or(surface_caps.formats[0]);
168
169 let size = window.inner_size();
170 let sample_count = if config.enable_multisampling { 4 } else { 1 };
171
172 let surface_config = wgpu::SurfaceConfiguration {
173 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
174 format: surface_format,
175 width: size.width,
176 height: size.height,
177 present_mode: surface_caps.present_modes[0],
178 alpha_mode: surface_caps.alpha_modes[0],
179 view_formats: vec![],
180 desired_maximum_frame_latency: 2,
181 };
182 surface.configure(&gpu_context.device, &surface_config);
183
184 let (msaa_texture, msaa_view) = if config.enable_multisampling {
186 let msaa_texture = gpu_context.device.create_texture(&wgpu::TextureDescriptor {
187 label: Some("MSAA Texture"),
188 size: wgpu::Extent3d {
189 width: size.width,
190 height: size.height,
191 depth_or_array_layers: 1,
192 },
193 mip_level_count: 1,
194 sample_count,
195 dimension: wgpu::TextureDimension::D2,
196 format: surface_format,
197 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
198 view_formats: &[],
199 });
200 let msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default());
201 (Some(msaa_texture), Some(msaa_view))
202 } else {
203 (None, None)
204 };
205
206 let camera_uniform = CameraUniform {
208 view_proj: Matrix4::identity().into(),
209 view_pos: [0.0, 0.0, 0.0],
210 _padding: 0.0,
211 };
212
213 let camera_buffer = gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
214 label: Some("Camera Buffer"),
215 contents: bytemuck::bytes_of(&camera_uniform),
216 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
217 });
218
219 let render_params = config.render_params;
221 let render_params_buffer = gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
222 label: Some("Render Params Buffer"),
223 contents: bytemuck::bytes_of(&render_params),
224 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
225 });
226
227 let bind_group_layout = gpu_context.device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
229 entries: &[
230 wgpu::BindGroupLayoutEntry {
231 binding: 0,
232 visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
233 ty: wgpu::BindingType::Buffer {
234 ty: wgpu::BufferBindingType::Uniform,
235 has_dynamic_offset: false,
236 min_binding_size: None,
237 },
238 count: None,
239 },
240 wgpu::BindGroupLayoutEntry {
241 binding: 1,
242 visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
243 ty: wgpu::BindingType::Buffer {
244 ty: wgpu::BufferBindingType::Uniform,
245 has_dynamic_offset: false,
246 min_binding_size: None,
247 },
248 count: None,
249 },
250 ],
251 label: Some("point_cloud_bind_group_layout"),
252 });
253
254 let bind_group = gpu_context.device.create_bind_group(&wgpu::BindGroupDescriptor {
255 layout: &bind_group_layout,
256 entries: &[
257 wgpu::BindGroupEntry {
258 binding: 0,
259 resource: camera_buffer.as_entire_binding(),
260 },
261 wgpu::BindGroupEntry {
262 binding: 1,
263 resource: render_params_buffer.as_entire_binding(),
264 },
265 ],
266 label: Some("point_cloud_bind_group"),
267 });
268
269 let shader = gpu_context.device.create_shader_module(wgpu::ShaderModuleDescriptor {
271 label: Some("Point Cloud Shader"),
272 source: wgpu::ShaderSource::Wgsl(include_str!("shaders/point_cloud.wgsl").into()),
273 });
274
275 let render_pipeline_layout = gpu_context.device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
276 label: Some("Point Cloud Render Pipeline Layout"),
277 bind_group_layouts: &[&bind_group_layout],
278 push_constant_ranges: &[],
279 });
280
281 let render_pipeline = gpu_context.device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
282 label: Some("Point Cloud Render Pipeline"),
283 layout: Some(&render_pipeline_layout),
284 vertex: wgpu::VertexState {
285 module: &shader,
286 entry_point: Some("vs_main"),
287 buffers: &[PointVertex::desc()],
288 compilation_options: wgpu::PipelineCompilationOptions::default(),
289 },
290 fragment: Some(wgpu::FragmentState {
291 module: &shader,
292 entry_point: Some("fs_main"),
293 targets: &[Some(wgpu::ColorTargetState {
294 format: surface_config.format,
295 blend: if config.enable_alpha_blending {
296 Some(wgpu::BlendState::ALPHA_BLENDING)
297 } else {
298 Some(wgpu::BlendState::REPLACE)
299 },
300 write_mask: wgpu::ColorWrites::ALL,
301 })],
302 compilation_options: wgpu::PipelineCompilationOptions::default(),
303 }),
304 primitive: wgpu::PrimitiveState {
305 topology: wgpu::PrimitiveTopology::TriangleList,
306 strip_index_format: None,
307 front_face: wgpu::FrontFace::Ccw,
308 cull_mode: None,
309 unclipped_depth: false,
310 polygon_mode: wgpu::PolygonMode::Fill,
311 conservative: false,
312 },
313 depth_stencil: if config.enable_depth_test {
314 Some(wgpu::DepthStencilState {
315 format: wgpu::TextureFormat::Depth32Float,
316 depth_write_enabled: true,
317 depth_compare: wgpu::CompareFunction::Less,
318 stencil: wgpu::StencilState::default(),
319 bias: wgpu::DepthBiasState::default(),
320 })
321 } else {
322 None
323 },
324 multisample: wgpu::MultisampleState {
325 count: sample_count,
326 mask: !0,
327 alpha_to_coverage_enabled: false,
328 },
329 multiview: None,
330 cache: None,
331 });
332
333 Ok(Self {
334 gpu_context,
335 surface,
336 surface_config,
337 render_pipeline,
338 camera_uniform,
339 camera_buffer,
340 render_params,
341 render_params_buffer,
342 bind_group,
343 bind_group_layout,
344 config,
345 msaa_texture,
346 msaa_view,
347 })
348 }
349
350 pub fn update_camera(&mut self, view_matrix: Matrix4<f32>, proj_matrix: Matrix4<f32>, camera_pos: Vector3<f32>) {
352 self.camera_uniform.view_proj = (proj_matrix * view_matrix).into();
353 self.camera_uniform.view_pos = camera_pos.into();
354
355 self.gpu_context.queue.write_buffer(
356 &self.camera_buffer,
357 0,
358 bytemuck::bytes_of(&self.camera_uniform),
359 );
360 }
361
362 pub fn update_render_params(&mut self, params: RenderParams) {
364 self.render_params = params;
365 self.gpu_context.queue.write_buffer(
366 &self.render_params_buffer,
367 0,
368 bytemuck::bytes_of(&self.render_params),
369 );
370 }
371
372 pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
374 self.surface_config.width = new_size.width;
375 self.surface_config.height = new_size.height;
376 self.surface.configure(&self.gpu_context.device, &self.surface_config);
377
378 if self.config.enable_multisampling {
380 let msaa_texture = self.gpu_context.device.create_texture(&wgpu::TextureDescriptor {
381 label: Some("MSAA Texture"),
382 size: wgpu::Extent3d {
383 width: new_size.width,
384 height: new_size.height,
385 depth_or_array_layers: 1,
386 },
387 mip_level_count: 1,
388 sample_count: 4,
389 dimension: wgpu::TextureDimension::D2,
390 format: self.surface_config.format,
391 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
392 view_formats: &[],
393 });
394 let msaa_view = msaa_texture.create_view(&wgpu::TextureViewDescriptor::default());
395 self.msaa_texture = Some(msaa_texture);
396 self.msaa_view = Some(msaa_view);
397 }
398 }
399
400 pub fn create_vertex_buffer(&self, vertices: &[PointVertex]) -> wgpu::Buffer {
402 self.gpu_context.device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
403 label: Some("Point Cloud Vertex Buffer"),
404 contents: bytemuck::cast_slice(vertices),
405 usage: wgpu::BufferUsages::VERTEX,
406 })
407 }
408
409 pub fn create_depth_texture(&self) -> wgpu::Texture {
411 let sample_count = if self.config.enable_multisampling { 4 } else { 1 };
412
413 self.gpu_context.device.create_texture(&wgpu::TextureDescriptor {
414 label: Some("Depth Texture"),
415 size: wgpu::Extent3d {
416 width: self.surface_config.width,
417 height: self.surface_config.height,
418 depth_or_array_layers: 1,
419 },
420 mip_level_count: 1,
421 sample_count,
422 dimension: wgpu::TextureDimension::D2,
423 format: wgpu::TextureFormat::Depth32Float,
424 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
425 view_formats: &[],
426 })
427 }
428
429 pub fn render(&self, vertices: &[PointVertex]) -> Result<()> {
431 let vertex_buffer = self.create_vertex_buffer(vertices);
432 let depth_texture = self.create_depth_texture();
433 let depth_view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default());
434
435 let output = self.surface.get_current_texture()
436 .map_err(|e| Error::Gpu(format!("Failed to get surface texture: {:?}", e)))?;
437
438 let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default());
439
440 let mut encoder = self.gpu_context.device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
441 label: Some("Point Cloud Render Encoder"),
442 });
443
444 let (color_attachment, resolve_target) = if let Some(ref msaa_view) = self.msaa_view {
446 (msaa_view, Some(&view))
447 } else {
448 (&view, None)
449 };
450
451 {
452 let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
453 label: Some("Point Cloud Render Pass"),
454 color_attachments: &[Some(wgpu::RenderPassColorAttachment {
455 view: color_attachment,
456 resolve_target,
457 ops: wgpu::Operations {
458 load: wgpu::LoadOp::Clear(wgpu::Color {
459 r: self.config.background_color[0],
460 g: self.config.background_color[1],
461 b: self.config.background_color[2],
462 a: self.config.background_color[3],
463 }),
464 store: wgpu::StoreOp::Store,
465 },
466 })],
467 depth_stencil_attachment: if self.config.enable_depth_test {
468 Some(wgpu::RenderPassDepthStencilAttachment {
469 view: &depth_view,
470 depth_ops: Some(wgpu::Operations {
471 load: wgpu::LoadOp::Clear(1.0),
472 store: wgpu::StoreOp::Store,
473 }),
474 stencil_ops: None,
475 })
476 } else {
477 None
478 },
479 timestamp_writes: None,
480 occlusion_query_set: None,
481 });
482
483 render_pass.set_pipeline(&self.render_pipeline);
484 render_pass.set_bind_group(0, &self.bind_group, &[]);
485 render_pass.set_vertex_buffer(0, vertex_buffer.slice(..));
486 render_pass.draw(0..vertices.len() as u32, 0..1);
487 }
488
489 self.gpu_context.queue.submit(std::iter::once(encoder.finish()));
490 output.present();
491
492 Ok(())
493 }
494}
495
496pub fn point_cloud_to_vertices(point_cloud: &PointCloud<Point3f>, color: [f32; 3], size: f32) -> Vec<PointVertex> {
498 let normals = estimate_point_normals(&point_cloud.points);
499 point_cloud.points.iter()
500 .zip(normals.iter())
501 .map(|(point, normal)| PointVertex::from_point(point, color, size, *normal))
502 .collect()
503}
504
505pub fn point_cloud_to_vertices_colored(point_cloud: &PointCloud<Point3f>, size: f32) -> Vec<PointVertex> {
507 let normals = estimate_point_normals(&point_cloud.points);
508 point_cloud.points.iter()
509 .zip(normals.iter())
510 .map(|(point, normal)| {
511 let color = [
513 (point.x * 0.5 + 0.5).clamp(0.0, 1.0),
514 (point.y * 0.5 + 0.5).clamp(0.0, 1.0),
515 (point.z * 0.5 + 0.5).clamp(0.0, 1.0),
516 ];
517
518 PointVertex::from_point(point, color, size, *normal)
519 })
520 .collect()
521}
522
523pub fn colored_point_cloud_to_vertices(point_cloud: &PointCloud<ColoredPoint3f>, size: f32) -> Vec<PointVertex> {
525 let positions: Vec<Point3f> = point_cloud.points.iter()
526 .map(|p| Point3f::new(p.position.x, p.position.y, p.position.z))
527 .collect();
528 let normals = estimate_point_normals(&positions);
529
530 point_cloud.points.iter()
531 .zip(normals.iter())
532 .map(|(point, normal)| PointVertex::from_colored_point(point, size, *normal))
533 .collect()
534}
535
536fn estimate_point_normals(points: &[Point3f]) -> Vec<[f32; 3]> {
538 let mut normals = vec![[0.0, 0.0, 1.0]; points.len()];
539
540 for (i, point) in points.iter().enumerate() {
541 let mut neighbors = Vec::new();
543 let search_radius = 0.1;
544
545 for (j, other_point) in points.iter().enumerate() {
546 if i != j {
547 let distance = ((point.x - other_point.x).powi(2) +
548 (point.y - other_point.y).powi(2) +
549 (point.z - other_point.z).powi(2)).sqrt();
550
551 if distance < search_radius && neighbors.len() < 10 {
552 neighbors.push(*other_point);
553 }
554 }
555 }
556
557 if neighbors.len() >= 3 {
558 let mut normal = estimate_normal_from_neighbors(point, &neighbors);
560
561 let length = (normal[0].powi(2) + normal[1].powi(2) + normal[2].powi(2)).sqrt();
563 if length > 0.0 {
564 normal[0] /= length;
565 normal[1] /= length;
566 normal[2] /= length;
567 }
568
569 normals[i] = normal;
570 }
571 }
572
573 normals
574}
575
576fn estimate_normal_from_neighbors(center: &Point3f, neighbors: &[Point3f]) -> [f32; 3] {
578 if neighbors.len() < 2 {
579 return [0.0, 0.0, 1.0];
580 }
581
582 let v1 = [
584 neighbors[0].x - center.x,
585 neighbors[0].y - center.y,
586 neighbors[0].z - center.z,
587 ];
588
589 let v2 = [
590 neighbors[1].x - center.x,
591 neighbors[1].y - center.y,
592 neighbors[1].z - center.z,
593 ];
594
595 let normal = [
597 v1[1] * v2[2] - v1[2] * v2[1],
598 v1[2] * v2[0] - v1[0] * v2[2],
599 v1[0] * v2[1] - v1[1] * v2[0],
600 ];
601
602 normal
603}
604
605