1use image::ImageBuffer;
2use mraphics_core::{
3 Animation, Camera, Color, MeshHandle, MeshLike, MeshPool, MraphicsID, RenderInstance, Renderer,
4 Scene, Timeline,
5};
6use std::{
7 cell::RefCell, collections::HashMap, marker::PhantomData, rc::Rc, sync::Arc, time::Duration,
8};
9use wgpu::{Surface, SurfaceConfiguration, Texture, TextureFormat};
10use winit::{dpi::LogicalSize, event::WindowEvent, event_loop::EventLoop, window::Window};
11
12struct WindowContext {
13 pub window: Arc<Window>,
14 pub surface: Surface<'static>,
15 pub surface_config: SurfaceConfiguration,
16}
17
18const OFFSCREEN_TEXTURE_FORMAT: TextureFormat = wgpu::TextureFormat::Rgba8Unorm;
19
20pub struct Canvas<'res, T: Timeline<'res>, C: Camera> {
21 window_ctx: Option<WindowContext>,
22
23 pub size: (u32, u32),
24
25 pub(crate) renderer: Option<Renderer>,
26
27 pub(crate) offscreen_texture: Option<Texture>,
28
29 pub camera: C,
30
31 pub on_window_event: Box<dyn FnMut(&winit::event_loop::ActiveEventLoop, &WindowEvent, &mut C)>,
32
33 pub timeline: T,
34 pub playhead: f32,
35
36 pub scene: Rc<RefCell<Scene>>,
39
40 pub mesh_pool: Rc<RefCell<MeshPool>>,
43
44 pub update_flags: HashMap<MraphicsID, bool>,
49
50 pub clear_color: Color<f64>,
51
52 _marker: PhantomData<&'res ()>,
53}
54
55impl<'res, T: Timeline<'res>, C: Camera> Canvas<'res, T, C> {
56 pub fn new(timeline: T, mut camera: C) -> Self {
57 let (width, height) = (960, 540);
58 camera.set_aspect(width as f32 / height as f32);
59 Self {
60 window_ctx: None,
61
62 size: (width, height),
63
64 renderer: None,
65
66 offscreen_texture: None,
67
68 camera,
69
70 on_window_event: Box::new(|_, _, _| {}),
71
72 timeline,
73 playhead: 0.0,
74
75 scene: Rc::new(RefCell::new(Scene::new())),
76 mesh_pool: Rc::new(RefCell::new(MeshPool::new())),
77 update_flags: HashMap::new(),
78
79 clear_color: Color::from_hex_str(mraphics_core::constants::GRAY_E).unwrap(),
80
81 _marker: PhantomData,
82 }
83 }
84
85 pub fn queue_animation<Ani: Animation<'res>>(&mut self, animation: Ani, duration: &Duration) {
87 let mut action = animation.into_action(self.mesh_pool.clone(), self.scene.clone());
88 action.duration = duration.as_secs_f32();
89 action.start_time = self.playhead;
90
91 self.playhead += action.duration;
92
93 self.timeline.add_action(action);
94 }
95
96 pub fn advance_playhead(&mut self, step: &Duration) {
98 self.playhead += step.as_secs_f32();
99 }
100
101 pub fn add_mesh<Mesh: MeshLike + 'static>(&mut self, mut mesh: Mesh) -> MeshHandle<Mesh> {
102 self.scene.borrow_mut().add_mesh(&mut mesh);
103
104 self.scene
105 .borrow_mut()
106 .acquire_instance_mut_unchecked(mesh.identifier())
107 .sync_matrix_data();
108
109 let mesh_handle = self.mesh_pool.borrow_mut().add_mesh(mesh);
110
111 self.update_flags.insert(mesh_handle.identifier(), false);
112
113 mesh_handle
114 }
115
116 pub fn update_meshes(&mut self) {
125 for (&id, needs_update) in &mut self.update_flags {
126 if !*needs_update {
127 continue;
128 }
129
130 self.mesh_pool.borrow_mut().update_mesh(id);
131 self.mesh_pool.borrow_mut().update_instance(
132 id,
133 self.scene.borrow_mut().acquire_instance_mut_unchecked(id),
134 );
135 *needs_update = false;
136 }
137 }
138
139 pub fn mark_for_update<Mesh: MeshLike + 'static>(&mut self, mesh_handle: &MeshHandle<Mesh>) {
141 self.update_flags.insert(mesh_handle.identifier(), true);
142 }
143
144 pub fn with_instance<F: FnMut(Option<&mut RenderInstance>), Mesh: MeshLike>(
145 &self,
146 mesh_handle: &MeshHandle<Mesh>,
147 mut closure: F,
148 ) {
149 closure(
150 self.scene
151 .borrow_mut()
152 .acquire_instance_mut(mesh_handle.identifier()),
153 );
154 }
155
156 pub fn with_instance_unchecked<F: FnMut(&mut RenderInstance), Mesh: MeshLike>(
157 &self,
158 mesh_handle: &MeshHandle<Mesh>,
159 mut closure: F,
160 ) {
161 closure(
162 self.scene
163 .borrow_mut()
164 .acquire_instance_mut_unchecked(mesh_handle.identifier()),
165 );
166 }
167
168 pub fn with_mesh<Mesh: MeshLike + 'static, F: FnMut(Option<&mut Mesh>)>(
169 &mut self,
170 mesh_handle: &MeshHandle<Mesh>,
171 mut closure: F,
172 ) {
173 self.mark_for_update(mesh_handle);
174 closure(
175 self.mesh_pool
176 .borrow_mut()
177 .acquire_mesh_mut::<Mesh>(mesh_handle.identifier()),
178 )
179 }
180
181 pub fn with_mesh_unchecked<Mesh: MeshLike + 'static, F: FnMut(&mut Mesh)>(
182 &mut self,
183 mesh_handle: &MeshHandle<Mesh>,
184 mut closure: F,
185 ) {
186 self.mark_for_update(mesh_handle);
187 closure(
188 self.mesh_pool
189 .borrow_mut()
190 .acquire_mesh_mut_unchecked::<Mesh>(mesh_handle.identifier()),
191 )
192 }
193
194 pub fn resize(&mut self, size: (u32, u32)) {
195 self.size = size;
196
197 self.camera.set_aspect(size.0 as f32 / size.1 as f32);
198
199 if self.window_ctx.is_some() {
200 self.resize_surface(size.0, size.1);
201 }
202 }
203
204 pub fn run(&mut self) {
205 let event_loop = EventLoop::new().unwrap();
206 event_loop.run_app(self).unwrap();
207 }
208
209 pub fn snapshot(&mut self, time: f32, path: &str) {
210 self.timeline.seek(time);
211 self.timeline.process();
212
213 self.render_offscreen();
214
215 let raw_image = self
216 .renderer
217 .as_ref()
218 .unwrap()
219 .read_texture_rgbau8(self.offscreen_texture.as_mut().unwrap(), self.size);
220
221 ImageBuffer::<image::Rgba<u8>, Vec<u8>>::from_raw(self.size.0, self.size.1, raw_image)
222 .unwrap()
223 .save(path)
224 .unwrap();
225 }
226
227 pub fn render_offscreen(&mut self) {
228 self.prepare_offscreen_rendering();
229 self.update_meshes();
230 self.renderer.as_mut().unwrap().render(
231 self.offscreen_texture.as_mut().unwrap(),
232 OFFSCREEN_TEXTURE_FORMAT,
233 &mut self.scene.borrow_mut().instances,
234 &self.camera,
235 &self.clear_color,
236 );
237 }
238
239 fn prepare_offscreen_rendering(&mut self) {
240 if self.offscreen_texture.is_some() {
241 return;
242 }
243
244 if self.renderer.is_none() {
245 let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor {
246 backends: wgpu::Backends::PRIMARY,
247 ..Default::default()
248 });
249
250 pollster::block_on(async {
251 let adapter = instance
252 .request_adapter(&wgpu::RequestAdapterOptions {
253 force_fallback_adapter: false,
254 ..Default::default()
255 })
256 .await
257 .unwrap();
258
259 let (device, queue) = adapter
260 .request_device(&wgpu::DeviceDescriptor::default())
261 .await
262 .unwrap();
263
264 self.renderer = Some(Renderer::new(device, queue));
265 })
266 }
267
268 self.offscreen_texture = Some(self.renderer.as_ref().unwrap().device.create_texture(
269 &wgpu::wgt::TextureDescriptor {
270 label: Some("Offline texture"),
271 size: wgpu::Extent3d {
272 width: self.size.0,
273 height: self.size.1,
274 depth_or_array_layers: 1,
275 },
276 mip_level_count: 1,
277 sample_count: 1,
278 dimension: wgpu::TextureDimension::D2,
279 format: OFFSCREEN_TEXTURE_FORMAT,
280 usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
281 view_formats: &[],
282 },
283 ));
284 }
285
286 fn resize_surface(&mut self, width: u32, height: u32) {
287 let mut window_ctx = self.window_ctx.take().unwrap();
288 let surface_config = &mut window_ctx.surface_config;
289
290 surface_config.width = width;
291 surface_config.height = height;
292
293 window_ctx
294 .surface
295 .configure(&self.renderer.as_ref().unwrap().device, surface_config);
296
297 self.window_ctx = Some(window_ctx);
298
299 self.size = (width, height);
300 }
301}
302
303impl<'res, T: Timeline<'res>, C: Camera> winit::application::ApplicationHandler
304 for Canvas<'res, T, C>
305{
306 fn resumed(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {
307 let window = Arc::new(
308 event_loop
309 .create_window(
310 Window::default_attributes()
311 .with_inner_size(LogicalSize::new(self.size.0, self.size.1))
312 .with_title("Mraphics Preview"),
313 )
314 .unwrap(),
315 );
316
317 let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor {
318 backends: wgpu::Backends::PRIMARY,
319 ..Default::default()
320 });
321
322 let surface = instance.create_surface(Arc::clone(&window)).unwrap();
323
324 pollster::block_on(async {
325 let adapter = instance
326 .request_adapter(&wgpu::RequestAdapterOptions {
327 force_fallback_adapter: false,
328 compatible_surface: Some(&surface),
329 ..Default::default()
330 })
331 .await
332 .unwrap();
333
334 let (device, queue) = adapter
335 .request_device(&wgpu::DeviceDescriptor::default())
336 .await
337 .unwrap();
338
339 let surface_caps = surface.get_capabilities(&adapter);
340 let surface_config = wgpu::SurfaceConfiguration {
341 width: self.size.0,
342 height: self.size.1,
343 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
344 format: wgpu::TextureFormat::Rgba8Unorm,
345 present_mode: surface_caps.present_modes[0],
346 alpha_mode: surface_caps.alpha_modes[0],
347 view_formats: vec![],
348 desired_maximum_frame_latency: 2,
349 };
350
351 surface.configure(&device, &surface_config);
352
353 self.window_ctx = Some(WindowContext {
354 window,
355 surface,
356 surface_config,
357 });
358 self.renderer = Some(Renderer::new(device, queue));
359 });
360
361 self.timeline.start();
362 }
363
364 fn window_event(
365 &mut self,
366 event_loop: &winit::event_loop::ActiveEventLoop,
367 _window_id: winit::window::WindowId,
368 event: WindowEvent,
369 ) {
370 match event {
371 WindowEvent::CloseRequested => {
372 event_loop.exit();
373 }
374 WindowEvent::Resized(size) => {
375 self.camera
376 .set_aspect(size.width as f32 / size.height as f32);
377 self.resize_surface(size.width, size.height);
378 }
379 WindowEvent::RedrawRequested => {
380 let window_ctx = self.window_ctx.take().unwrap();
381
382 self.timeline.forward();
383
384 self.update_meshes();
385
386 let texture = match window_ctx.surface.get_current_texture() {
387 Ok(texture) => texture,
388 Err(_) => {
389 return;
391 }
392 };
393
394 self.renderer.as_mut().unwrap().render(
395 &texture.texture,
396 window_ctx.surface_config.format,
397 &mut self.scene.borrow_mut().instances,
398 &self.camera,
399 &self.clear_color,
400 );
401
402 texture.present();
403
404 window_ctx.window.request_redraw();
405
406 self.window_ctx = Some(window_ctx);
407 }
408 _ => {}
409 }
410
411 (self.on_window_event)(event_loop, &event, &mut self.camera);
412 }
413}