runmat_plot/gui/
window_impl.rs

1//! Implementation methods for the GUI plot window
2
3#[cfg(feature = "gui")]
4use super::plot_overlay::{OverlayConfig, OverlayMetrics};
5#[cfg(feature = "gui")]
6use super::{PlotWindow, WindowConfig};
7#[cfg(feature = "gui")]
8use crate::core::PipelineType;
9#[cfg(feature = "gui")]
10use egui_winit::State as EguiState;
11#[cfg(feature = "gui")]
12use glam::{Mat4, Vec2, Vec3, Vec4};
13#[cfg(feature = "gui")]
14use std::sync::Arc;
15#[cfg(feature = "gui")]
16use winit::{dpi::PhysicalSize, event::Event, event_loop::EventLoop, window::WindowBuilder};
17#[cfg(feature = "gui")]
18impl<'window> PlotWindow<'window> {
19    /// Create a new interactive plot window
20    pub async fn new(config: WindowConfig) -> Result<Self, Box<dyn std::error::Error>> {
21        // Create a new EventLoop (assumes this is the only EventLoop creation)
22        let event_loop =
23            EventLoop::new().map_err(|e| format!("Failed to create EventLoop: {e}"))?;
24        let window = WindowBuilder::new()
25            .with_title(&config.title)
26            .with_inner_size(PhysicalSize::new(config.width, config.height))
27            .with_resizable(config.resizable)
28            .with_maximized(config.maximized)
29            .build(&event_loop)?;
30        let window = Arc::new(window);
31
32        // Create WGPU instance and surface
33        let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
34            backends: wgpu::Backends::all(),
35            ..Default::default()
36        });
37
38        let surface = instance.create_surface(window.clone())?;
39
40        // Request adapter and device
41        let adapter = instance
42            .request_adapter(&wgpu::RequestAdapterOptions {
43                power_preference: wgpu::PowerPreference::HighPerformance,
44                compatible_surface: Some(&surface),
45                force_fallback_adapter: false,
46            })
47            .await
48            .ok_or("Failed to request adapter")?;
49
50        let (device, queue) = adapter
51            .request_device(
52                &wgpu::DeviceDescriptor {
53                    label: Some("RunMat Plot Device"),
54                    required_features: wgpu::Features::empty(),
55                    required_limits: wgpu::Limits::default(),
56                },
57                None,
58            )
59            .await?;
60
61        let device = Arc::new(device);
62        let queue = Arc::new(queue);
63
64        // Configure surface
65        let surface_caps = surface.get_capabilities(&adapter);
66        let surface_format = surface_caps
67            .formats
68            .iter()
69            .find(|f| f.is_srgb())
70            .copied()
71            .unwrap_or(surface_caps.formats[0]);
72
73        let surface_config = wgpu::SurfaceConfiguration {
74            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
75            format: surface_format,
76            width: config.width,
77            height: config.height,
78            present_mode: if config.vsync {
79                wgpu::PresentMode::AutoVsync
80            } else {
81                wgpu::PresentMode::AutoNoVsync
82            },
83            alpha_mode: surface_caps.alpha_modes[0],
84            view_formats: vec![],
85            desired_maximum_frame_latency: 2,
86        };
87        surface.configure(&device, &surface_config);
88
89        // Create depth texture
90        let depth_texture = device.create_texture(&wgpu::TextureDescriptor {
91            label: Some("Depth Texture"),
92            size: wgpu::Extent3d {
93                width: config.width,
94                height: config.height,
95                depth_or_array_layers: 1,
96            },
97            mip_level_count: 1,
98            sample_count: 1,
99            dimension: wgpu::TextureDimension::D2,
100            format: wgpu::TextureFormat::Depth32Float,
101            usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
102            view_formats: &[],
103        });
104
105        let depth_view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default());
106
107        // Create unified plot renderer
108        let plot_renderer =
109            crate::core::PlotRenderer::new(device.clone(), queue.clone(), surface_config).await?;
110        let plot_overlay = crate::gui::PlotOverlay::new();
111
112        // Setup egui with modern dark theme
113        let egui_ctx = egui::Context::default();
114
115        // Apply our beautiful modern dark theme to egui
116        let theme = crate::styling::ModernDarkTheme::default();
117        theme.apply_to_egui(&egui_ctx);
118
119        let egui_state = EguiState::new(
120            egui_ctx.clone(),
121            egui::viewport::ViewportId::ROOT,
122            &window,
123            Some(window.scale_factor() as f32),
124            None,
125        );
126
127        let egui_renderer = egui_wgpu::Renderer::new(
128            &device,
129            surface_format,
130            None, // egui doesn't need depth buffer
131            1,
132        );
133
134        Ok(Self {
135            window,
136            event_loop: Some(event_loop),
137            plot_renderer,
138            plot_overlay,
139            surface,
140            depth_texture,
141            depth_view,
142            egui_ctx,
143            egui_state,
144            egui_renderer,
145            config,
146            mouse_position: Vec2::ZERO,
147            is_mouse_over_plot: true,
148        })
149    }
150
151    /// Add a simple line plot to the scene for testing
152    pub fn add_test_plot(&mut self) {
153        use crate::core::vertex_utils;
154
155        // Create some test data
156        let x_data: Vec<f64> = (0..100).map(|i| i as f64 * 0.1).collect();
157        let y_data: Vec<f64> = x_data.iter().map(|x| x.sin()).collect();
158
159        // Create vertices for the line plot
160        let vertices =
161            vertex_utils::create_line_plot(&x_data, &y_data, Vec4::new(0.0, 0.5, 1.0, 1.0));
162
163        // Create a scene node
164        let mut render_data = crate::core::RenderData {
165            pipeline_type: PipelineType::Lines,
166            vertices,
167            indices: None,
168            material: crate::core::Material::default(),
169            draw_calls: vec![crate::core::DrawCall {
170                vertex_offset: 0,
171                vertex_count: (x_data.len() - 1) * 2, // Each line segment has 2 vertices
172                index_offset: None,
173                index_count: None,
174                instance_count: 1,
175            }],
176        };
177
178        // Set material color
179        render_data.material.albedo = Vec4::new(0.0, 0.5, 1.0, 1.0);
180
181        let node = crate::core::SceneNode {
182            id: 0, // Will be set by scene
183            name: "Test Line Plot".to_string(),
184            transform: Mat4::IDENTITY,
185            visible: true,
186            cast_shadows: false,
187            receive_shadows: false,
188            parent: None,
189            children: Vec::new(),
190            render_data: Some(render_data),
191            bounds: crate::core::BoundingBox::from_points(
192                &x_data
193                    .iter()
194                    .zip(y_data.iter())
195                    .map(|(&x, &y)| Vec3::new(x as f32, y as f32, 0.0))
196                    .collect::<Vec<_>>(),
197            ),
198            lod_levels: Vec::new(),
199            current_lod: 0,
200        };
201
202        self.plot_renderer.scene.add_node(node);
203
204        // Fit camera to show the plot
205        let bounds_min = Vec3::new(-1.0, -1.5, -1.0);
206        let bounds_max = Vec3::new(10.0, 1.5, 1.0);
207        self.plot_renderer.camera.fit_bounds(bounds_min, bounds_max);
208    }
209
210    /// Set the figure to display in this window (clears existing content)
211    pub fn set_figure(&mut self, figure: crate::plots::Figure) {
212        // Use the unified plot renderer
213        self.plot_renderer.set_figure(figure);
214    }
215
216    /// Run the interactive plot window event loop
217    pub async fn run(&mut self) -> Result<(), Box<dyn std::error::Error>> {
218        let event_loop = self
219            .event_loop
220            .take()
221            .ok_or("Event loop already consumed")?;
222        let window = self.window.clone();
223        let mut last_render_time = std::time::Instant::now();
224
225        event_loop.run(move |event, target| {
226            target.set_control_flow(winit::event_loop::ControlFlow::Poll);
227
228            // Handle egui events
229            let mut repaint = false;
230            if let Event::WindowEvent { ref event, .. } = event {
231                let response = self.egui_state.on_window_event(&window, event);
232                repaint = response.repaint;
233            }
234            if repaint {
235                window.request_redraw();
236            }
237
238            match event {
239                winit::event::Event::WindowEvent {
240                    window_id,
241                    event: winit::event::WindowEvent::CloseRequested,
242                } if window_id == window.id() => {
243                    target.exit();
244                }
245
246                winit::event::Event::WindowEvent {
247                    window_id,
248                    event: winit::event::WindowEvent::Resized(new_size),
249                } if window_id == window.id() => {
250                    // Resize surface and depth texture
251                    if new_size.width > 0 && new_size.height > 0 {
252                        self.resize(new_size.width, new_size.height);
253                    }
254                }
255
256                winit::event::Event::WindowEvent {
257                    window_id,
258                    event: winit::event::WindowEvent::RedrawRequested,
259                } if window_id == window.id() => {
260                    let now = std::time::Instant::now();
261                    let dt = now - last_render_time;
262                    last_render_time = now;
263
264                    match self.render(dt) {
265                        Ok(_) => {}
266                        Err(wgpu::SurfaceError::Lost) => {
267                            self.resize(self.config.width, self.config.height)
268                        }
269                        Err(wgpu::SurfaceError::OutOfMemory) => target.exit(),
270                        Err(e) => eprintln!("Render error: {e:?}"),
271                    }
272                }
273
274                winit::event::Event::WindowEvent {
275                    window_id,
276                    event: winit::event::WindowEvent::MouseInput { button, state, .. },
277                } if window_id == window.id() => {
278                    self.handle_mouse_input(button, state);
279                }
280
281                winit::event::Event::WindowEvent {
282                    window_id,
283                    event: winit::event::WindowEvent::CursorMoved { position, .. },
284                } if window_id == window.id() => {
285                    self.handle_mouse_move(position);
286                }
287
288                winit::event::Event::WindowEvent {
289                    window_id,
290                    event: winit::event::WindowEvent::MouseWheel { delta, .. },
291                } if window_id == window.id() => {
292                    self.handle_mouse_scroll(delta);
293                }
294
295                winit::event::Event::AboutToWait => {
296                    // Request redraw only when interaction occurs - prevents infinite loop
297                    if repaint {
298                        window.request_redraw();
299                    }
300                }
301
302                _ => {}
303            }
304        })?;
305
306        Ok(())
307    }
308
309    /// Handle window resize
310    fn resize(&mut self, width: u32, height: u32) {
311        if width == 0 || height == 0 {
312            return; // Skip invalid sizes that could cause crashes
313        }
314
315        self.config.width = width;
316        self.config.height = height;
317
318        // Recreate surface configuration with error handling
319        let surface_config = wgpu::SurfaceConfiguration {
320            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
321            format: self.plot_renderer.wgpu_renderer.surface_config.format,
322            width,
323            height,
324            present_mode: if self.config.vsync {
325                wgpu::PresentMode::AutoVsync
326            } else {
327                wgpu::PresentMode::AutoNoVsync
328            },
329            alpha_mode: wgpu::CompositeAlphaMode::Auto,
330            view_formats: vec![],
331            desired_maximum_frame_latency: 2,
332        };
333
334        // Update renderer's surface config
335        self.plot_renderer.wgpu_renderer.surface_config = surface_config.clone();
336        self.surface
337            .configure(&self.plot_renderer.wgpu_renderer.device, &surface_config);
338
339        // Recreate depth texture
340        self.depth_texture =
341            self.plot_renderer
342                .wgpu_renderer
343                .device
344                .create_texture(&wgpu::TextureDescriptor {
345                    label: Some("Depth Texture"),
346                    size: wgpu::Extent3d {
347                        width,
348                        height,
349                        depth_or_array_layers: 1,
350                    },
351                    mip_level_count: 1,
352                    sample_count: 1,
353                    dimension: wgpu::TextureDimension::D2,
354                    format: wgpu::TextureFormat::Depth32Float,
355                    usage: wgpu::TextureUsages::RENDER_ATTACHMENT
356                        | wgpu::TextureUsages::TEXTURE_BINDING,
357                    view_formats: &[],
358                });
359
360        self.depth_view = self
361            .depth_texture
362            .create_view(&wgpu::TextureViewDescriptor::default());
363
364        // Update camera aspect ratio
365        self.plot_renderer
366            .camera
367            .update_aspect_ratio(width as f32 / height as f32);
368    }
369
370    /// Render a frame
371    fn render(&mut self, _dt: std::time::Duration) -> Result<(), wgpu::SurfaceError> {
372        // Get the next frame
373        let output = self.surface.get_current_texture()?;
374        let view = output
375            .texture
376            .create_view(&wgpu::TextureViewDescriptor::default());
377
378        // Camera updates will be handled by simple interaction code
379
380        // Create command encoder
381        let mut encoder = self
382            .plot_renderer
383            .wgpu_renderer
384            .device
385            .create_command_encoder(&wgpu::CommandEncoderDescriptor {
386                label: Some("Render Encoder"),
387            });
388
389        // Render egui
390        let raw_input = self.egui_state.take_egui_input(&self.window);
391
392        // Get UI data before borrowing
393        let scene_stats = self.plot_renderer.scene.statistics();
394        let _camera_pos = self.plot_renderer.camera.position;
395
396        // Track the plot area for WGPU rendering
397        let mut plot_area: Option<egui::Rect> = None;
398
399        let full_output = self.egui_ctx.run(raw_input, |ctx| {
400            // Use PlotOverlay for unified UI rendering - no more duplicate sidebar code!
401            let overlay_config = OverlayConfig {
402                show_grid: true,
403                show_axes: true,
404                ..Default::default()
405            };
406            let overlay_metrics = OverlayMetrics {
407                vertex_count: scene_stats.total_vertices,
408                triangle_count: scene_stats.total_triangles,
409                render_time_ms: 0.0, // TODO: Add timing
410                fps: 60.0,           // TODO: Calculate actual FPS
411            };
412
413            let frame_info = self.plot_overlay.render(
414                ctx,
415                &self.plot_renderer,
416                &overlay_config,
417                overlay_metrics,
418            );
419            plot_area = frame_info.plot_area;
420        });
421
422        // Calculate data bounds for viewport transformation
423        let data_bounds = self.plot_renderer.calculate_data_bounds();
424
425        // Now we have the plot area, update camera and WGPU rendering accordingly
426        if let Some(plot_rect) = plot_area {
427            // Update camera aspect ratio to match the plot area
428            let plot_width = plot_rect.width();
429            let plot_height = plot_rect.height();
430            if plot_width > 0.0 && plot_height > 0.0 {
431                self.plot_renderer
432                    .camera
433                    .update_aspect_ratio(plot_width / plot_height);
434            }
435        }
436
437        self.egui_state
438            .handle_platform_output(&self.window, full_output.platform_output);
439
440        let tris = self
441            .egui_ctx
442            .tessellate(full_output.shapes, full_output.pixels_per_point);
443        for (id, image_delta) in &full_output.textures_delta.set {
444            self.egui_renderer.update_texture(
445                &self.plot_renderer.wgpu_renderer.device,
446                &self.plot_renderer.wgpu_renderer.queue,
447                *id,
448                image_delta,
449            );
450        }
451
452        self.egui_renderer.update_buffers(
453            &self.plot_renderer.wgpu_renderer.device,
454            &self.plot_renderer.wgpu_renderer.queue,
455            &mut encoder,
456            &tris,
457            &egui_wgpu::ScreenDescriptor {
458                size_in_pixels: [self.config.width, self.config.height],
459                pixels_per_point: self.window.scale_factor() as f32,
460            },
461        );
462
463        {
464            let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
465                label: Some("Egui Render Pass"),
466                color_attachments: &[Some(wgpu::RenderPassColorAttachment {
467                    view: &view,
468                    resolve_target: None,
469                    ops: wgpu::Operations {
470                        load: wgpu::LoadOp::Load,
471                        store: wgpu::StoreOp::Store,
472                    },
473                })],
474                depth_stencil_attachment: None,
475                occlusion_query_set: None,
476                timestamp_writes: None,
477            });
478
479            self.egui_renderer.render(
480                &mut render_pass,
481                &tris,
482                &egui_wgpu::ScreenDescriptor {
483                    size_in_pixels: [self.config.width, self.config.height],
484                    pixels_per_point: self.window.scale_factor() as f32,
485                },
486            );
487
488            // End the egui render pass to avoid borrowing conflicts
489            drop(render_pass);
490
491            // Render WGPU plot data on top of egui content using the unified renderer
492            if let Some(plot_rect) = plot_area {
493                let scale_factor = self.window.scale_factor() as f32;
494
495                let viewport = (
496                    plot_rect.min.x * scale_factor,
497                    plot_rect.min.y * scale_factor,
498                    plot_rect.width() * scale_factor,
499                    plot_rect.height() * scale_factor,
500                );
501
502                // Execute optimized direct viewport rendering
503                if let Some(bounds) = data_bounds {
504                    let _ = self.plot_renderer.render_direct_to_viewport(
505                        &mut encoder,
506                        &view,
507                        viewport,
508                        bounds,
509                        false, // Don't clear background, preserve egui content
510                        None,  // No custom background color
511                    );
512                }
513            }
514        }
515
516        for id in &full_output.textures_delta.free {
517            self.egui_renderer.free_texture(id);
518        }
519
520        // Submit commands
521        self.plot_renderer
522            .wgpu_renderer
523            .queue
524            .submit(std::iter::once(encoder.finish()));
525        output.present();
526
527        Ok(())
528    }
529
530    /// Handle mouse input
531    fn handle_mouse_input(
532        &mut self,
533        button: winit::event::MouseButton,
534        state: winit::event::ElementState,
535    ) {
536        use winit::event::{ElementState, MouseButton};
537
538        match (button, state) {
539            (MouseButton::Left, ElementState::Pressed) => {
540                self.is_mouse_over_plot = true; // For panning
541            }
542            (MouseButton::Left, ElementState::Released) => {
543                self.is_mouse_over_plot = false;
544            }
545            _ => {}
546        }
547    }
548
549    /// Handle mouse movement
550    fn handle_mouse_move(&mut self, position: winit::dpi::PhysicalPosition<f64>) {
551        let new_position = glam::Vec2::new(position.x as f32, position.y as f32);
552        let delta = new_position - self.mouse_position;
553        self.mouse_position = new_position;
554
555        // Pan when left mouse button is held down
556        if self.is_mouse_over_plot && delta.length() > 0.0 {
557            self.plot_renderer.camera.pan(-delta * 0.01); // Negative for natural feel
558        }
559    }
560
561    /// Handle mouse scroll
562    fn handle_mouse_scroll(&mut self, delta: winit::event::MouseScrollDelta) {
563        let scroll_delta = match delta {
564            winit::event::MouseScrollDelta::LineDelta(_, y) => y,
565            winit::event::MouseScrollDelta::PixelDelta(pos) => pos.y as f32 / 100.0,
566        };
567
568        // Zoom in/out by scaling the orthographic projection
569        if let crate::core::camera::ProjectionType::Orthographic {
570            ref mut left,
571            ref mut right,
572            ref mut bottom,
573            ref mut top,
574            ..
575        } = self.plot_renderer.camera.projection
576        {
577            let zoom_factor = 1.0 + scroll_delta * 0.1;
578            let center_x = (*left + *right) / 2.0;
579            let center_y = (*bottom + *top) / 2.0;
580            let width = (*right - *left) / zoom_factor;
581            let height = (*top - *bottom) / zoom_factor;
582
583            *left = center_x - width / 2.0;
584            *right = center_x + width / 2.0;
585            *bottom = center_y - height / 2.0;
586            *top = center_y + height / 2.0;
587        }
588    }
589}