1use nalgebra::{Matrix4, Point3, Vector3};
2use serde::{Deserialize, Serialize};
3use serde_wasm_bindgen::to_value;
4use wasm_bindgen::prelude::*;
5use wasm_bindgen_futures::spawn_local;
6use web_sys::HtmlCanvasElement;
7use wgpu::util::DeviceExt;
8use winit::{
9 dpi::LogicalSize,
10 event::*,
11 event_loop::{self, ControlFlow, EventLoop},
12 window::{Window, WindowBuilder},
13};
14
15use bytemuck::{Pod, Zeroable};
16use std::cell::RefCell;
17use std::rc::Rc;
18use wasm_bindgen::prelude::*;
19use wasm_bindgen::JsCast;
20use web_sys::window;
21
22use gloo_utils::format::JsValueSerdeExt;
23use gltf::buffer::{Source, View};
24use gltf::Glb;
25use gltf::Gltf;
26use std::ops::{Deref, DerefMut};
27use std::sync::atomic::{AtomicBool, Ordering};
28use std::sync::Mutex;
29use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard};
30use wasm_bindgen_futures::future_to_promise;
31
32use crate::renderer::shapes::Pyramid::Pyramid;
33use crate::renderer::Grid::Grid;
34use crate::renderer::Landscape::Landscape;
35use crate::renderer::Model::{Mesh, Model};
36use crate::renderer::SimpleCamera::SimpleCamera;
37
38#[wasm_bindgen]
40extern "C" {
41 #[wasm_bindgen(js_namespace = ["window", "__TAURI__", "tauri"])]
42 pub async fn invoke(cmd: &str, args: JsValue) -> JsValue;
43}
44
45#[derive(Serialize)]
46pub struct ReadModelParams {
47 pub projectId: String,
48 pub modelFilename: String,
49}
50
51#[derive(Serialize)]
52pub struct GetLandscapeParams {
53 pub projectId: String,
54 pub landscapeFilename: String,
55}
56
57#[repr(C)]
58#[derive(Copy, Clone, Debug)]
59pub struct Vertex {
60 pub position: [f32; 3],
61 pub normal: [f32; 3],
62 pub tex_coords: [f32; 2],
63 pub color: [f32; 3],
64}
65
66unsafe impl Pod for Vertex {}
68unsafe impl Zeroable for Vertex {}
69
70impl Vertex {
71 const ATTRIBS: [wgpu::VertexAttribute; 4] =
72 wgpu::vertex_attr_array![0 => Float32x3, 1 => Float32x3, 2 => Float32x2, 3 => Float32x3];
73
74 pub fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {
75 wgpu::VertexBufferLayout {
76 array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
77 step_mode: wgpu::VertexStepMode::Vertex,
78 attributes: &Self::ATTRIBS,
79 }
80 }
81}
82
83static mut CAMERA: Option<SimpleCamera> = None;
84
85thread_local! {
86 static CAMERA_INIT: std::cell::Cell<bool> = std::cell::Cell::new(false);
87}
88
89pub fn get_camera() -> &'static mut SimpleCamera {
90 CAMERA_INIT.with(|init| {
91 if !init.get() {
92 unsafe {
93 CAMERA = Some(SimpleCamera::new(
94 Point3::new(0.0, 0.0, 5.0),
95 Vector3::new(0.0, 0.0, -1.0),
96 Vector3::new(0.0, 1.0, 0.0),
97 45.0f32.to_radians(),
98 0.1,
99 100.0,
100 ));
101 }
102 init.set(true);
103 }
104 });
105
106 unsafe { CAMERA.as_mut().unwrap() }
107}
108
109struct RendererState {
113 pyramids: Vec<Pyramid>,
114 grids: Vec<Grid>,
115 models: Vec<Model>,
116 landscapes: Vec<Landscape>,
117
118 device: Arc<wgpu::Device>,
119 queue: Arc<wgpu::Queue>,
120 model_bind_group_layout: Arc<wgpu::BindGroupLayout>,
121 texture_bind_group_layout: Arc<wgpu::BindGroupLayout>,
122 texture_render_mode_buffer: Arc<wgpu::Buffer>,
123 color_render_mode_buffer: Arc<wgpu::Buffer>,
124}
125
126impl RendererState {
128 async fn new(
129 device: Arc<wgpu::Device>,
130 queue: Arc<wgpu::Queue>,
131 model_bind_group_layout: Arc<wgpu::BindGroupLayout>,
132 texture_bind_group_layout: Arc<wgpu::BindGroupLayout>,
133 texture_render_mode_buffer: Arc<wgpu::Buffer>,
134 color_render_mode_buffer: Arc<wgpu::Buffer>,
135 ) -> Self {
136 let mut grids = Vec::new();
138 grids.push(Grid::new(
139 &device,
140 &model_bind_group_layout,
141 &color_render_mode_buffer,
142 ));
143
144 let mut pyramids = Vec::new();
145 let mut models = Vec::new();
149
150 let mut landscapes = Vec::new();
151
152 Self {
153 pyramids,
154 grids,
155 models,
156 landscapes,
157
158 device,
159 queue,
160 model_bind_group_layout,
161 texture_bind_group_layout,
162 texture_render_mode_buffer,
163 color_render_mode_buffer,
164 }
165 }
166
167 async fn add_model(&mut self, bytes: &Vec<u8>) {
168 let model = Model::from_glb(
169 bytes,
170 &self.device,
171 &self.queue,
172 &self.model_bind_group_layout,
173 &self.texture_bind_group_layout,
174 &self.texture_render_mode_buffer,
175 &self.color_render_mode_buffer,
176 )
177 .await;
178
179 self.models.push(model);
180 }
181
182 fn add_landscape(&mut self, data: &LandscapeData) {
183 let landscape = Landscape::new(
184 data,
185 &self.device,
186 &self.queue,
187 &self.model_bind_group_layout,
188 &self.texture_bind_group_layout,
189 &self.color_render_mode_buffer,
191 );
192
193 self.landscapes.push(landscape);
194 }
195}
196
197static RENDERING_PAUSED: AtomicBool = AtomicBool::new(false);
296
297fn pause_rendering() {
299 RENDERING_PAUSED.store(true, Ordering::SeqCst);
300}
301
302fn resume_rendering() {
304 RENDERING_PAUSED.store(false, Ordering::SeqCst);
305}
306
307fn is_rendering_paused() -> bool {
309 RENDERING_PAUSED.load(Ordering::SeqCst)
310}
311
312static mut RENDERER_STATE: Option<Mutex<RendererState>> = None;
316
317thread_local! {
318 static RENDERER_STATE_INIT: std::cell::Cell<bool> = std::cell::Cell::new(false);
319}
320
321fn initialize_renderer_state(state: RendererState) {
323 unsafe {
324 RENDERER_STATE = Some(Mutex::new(state));
325 }
326 RENDERER_STATE_INIT.with(|init| {
327 init.set(true);
328 });
329}
330
331pub fn get_renderer_state() -> &'static Mutex<RendererState> {
333 RENDERER_STATE_INIT.with(|init| {
334 if !init.get() {
335 panic!("RendererState not initialized");
336 }
337 });
338
339 unsafe { RENDERER_STATE.as_ref().unwrap() }
340}
341
342#[wasm_bindgen]
426pub async fn start_render_loop() {
427 let window = web_sys::window().unwrap();
428 let document = window.document().unwrap();
429 let canvas = document
430 .get_element_by_id("scene-canvas")
431 .unwrap()
432 .dyn_into::<HtmlCanvasElement>()
433 .unwrap();
434
435 let dx12_compiler = wgpu::Dx12Compiler::Dxc {
442 dxil_path: None, dxc_path: None, };
445
446 let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
447 backends: wgpu::Backends::PRIMARY,
448 dx12_shader_compiler: dx12_compiler,
449 flags: wgpu::InstanceFlags::empty(),
450 gles_minor_version: wgpu::Gles3MinorVersion::Version2,
451 });
452
453 let height = canvas.height();
454 let width = canvas.width();
455
456 let event_loop = event_loop::EventLoop::new().unwrap();
457 let builder = WindowBuilder::new().with_inner_size(LogicalSize::new(width, height));
458 #[cfg(target_arch = "wasm32")] let builder = {
460 use winit::platform::web::WindowBuilderExtWebSys;
461 builder.with_canvas(Some(canvas))
462 };
463 let winit_window = builder.build(&event_loop).unwrap();
464
465 let surface = unsafe {
466 instance
467 .create_surface(winit_window)
468 .expect("Couldn't create GPU Surface")
469 };
470
471 let adapter = instance
472 .request_adapter(&wgpu::RequestAdapterOptions {
473 power_preference: wgpu::PowerPreference::default(),
474 compatible_surface: Some(&surface),
475 force_fallback_adapter: false,
476 })
477 .await
478 .ok_or("Failed to find an appropriate adapter")
479 .unwrap();
480
481 let (device, queue) = adapter
482 .request_device(
483 &wgpu::DeviceDescriptor {
484 label: None,
485 required_features: wgpu::Features::empty(),
486 required_limits: wgpu::Limits::default(),
487 },
488 None,
489 )
490 .await
491 .expect("Failed to create device");
492
493 let device = Arc::new(device);
494 let queue = Arc::new(queue);
495
496 let swapchain_capabilities = surface.get_capabilities(&adapter);
498 let swap_chain_format = swapchain_capabilities.formats[0]; let mut config = surface.get_default_config(&adapter, width, height).unwrap();
515 surface.configure(&device, &config);
516
517 let vertex_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
519 label: Some("Vertex Shader"),
520 source: wgpu::ShaderSource::Wgsl(include_str!("./shaders/primary_vertex.wgsl").into()),
521 });
522
523 let fragment_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
524 label: Some("Fragment Shader"),
525 source: wgpu::ShaderSource::Wgsl(include_str!("./shaders/primary_fragment.wgsl").into()),
526 });
527
528 let camera = get_camera();
529
530 camera.update_aspect_ratio(config.width as f32 / config.height as f32);
531 camera.update_view_projection_matrix();
532
533 let camera_matrix = camera.view_projection_matrix;
534 let camera_uniform_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
535 label: Some("Camera Uniform Buffer"),
536 contents: bytemuck::cast_slice(camera_matrix.as_slice()),
537 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
538 });
539
540 let camera_bind_group_layout =
542 device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
543 label: Some("Bind Group Layout"),
544 entries: &[wgpu::BindGroupLayoutEntry {
545 binding: 0,
546 visibility: wgpu::ShaderStages::VERTEX,
547 ty: wgpu::BindingType::Buffer {
548 ty: wgpu::BufferBindingType::Uniform,
549 has_dynamic_offset: false,
550 min_binding_size: None,
551 },
552 count: None,
553 }],
554 });
555
556 let model_bind_group_layout =
557 device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
558 entries: &[wgpu::BindGroupLayoutEntry {
559 binding: 0,
560 visibility: wgpu::ShaderStages::VERTEX,
561 ty: wgpu::BindingType::Buffer {
562 ty: wgpu::BufferBindingType::Uniform,
563 has_dynamic_offset: false,
564 min_binding_size: None,
565 },
566 count: None,
567 }],
568 label: Some("model_bind_group_layout"),
569 });
570
571 let model_bind_group_layout = Arc::new(model_bind_group_layout);
572
573 let texture_bind_group_layout =
574 device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
575 entries: &[
576 wgpu::BindGroupLayoutEntry {
577 binding: 0,
578 visibility: wgpu::ShaderStages::FRAGMENT,
579 ty: wgpu::BindingType::Texture {
580 multisampled: false,
581 view_dimension: wgpu::TextureViewDimension::D2,
582 sample_type: wgpu::TextureSampleType::Float { filterable: true },
583 },
584 count: None,
585 },
586 wgpu::BindGroupLayoutEntry {
587 binding: 1,
588 visibility: wgpu::ShaderStages::FRAGMENT,
589 ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
590 count: None,
591 },
592 wgpu::BindGroupLayoutEntry {
593 binding: 2,
594 visibility: wgpu::ShaderStages::FRAGMENT,
595 ty: wgpu::BindingType::Buffer {
596 ty: wgpu::BufferBindingType::Uniform,
597 has_dynamic_offset: false,
598 min_binding_size: None,
599 },
600 count: None,
601 },
602 ],
603 label: Some("Model Bind Group Layout"),
604 });
605
606 let texture_bind_group_layout = Arc::new(texture_bind_group_layout);
607
608 let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
609 layout: &camera_bind_group_layout,
610 entries: &[wgpu::BindGroupEntry {
611 binding: 0,
612 resource: camera_uniform_buffer.as_entire_binding(),
613 }],
614 label: Some("Bind Group"),
615 });
616
617 let color_render_mode_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
618 label: Some("Color Render Mode Buffer"),
619 contents: bytemuck::cast_slice(&[0i32]), usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
621 });
622
623 let color_render_mode_buffer = Arc::new(color_render_mode_buffer);
624
625 let texture_render_mode_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
626 label: Some("Texture Render Mode Buffer"),
627 contents: bytemuck::cast_slice(&[1i32]), usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
629 });
630
631 let texture_render_mode_buffer = Arc::new(texture_render_mode_buffer);
632
633 let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
634 label: Some("Render Pipeline Layout"),
635 bind_group_layouts: &[
636 &camera_bind_group_layout,
637 &model_bind_group_layout,
638 &texture_bind_group_layout,
639 ],
640 push_constant_ranges: &[],
641 });
642
643 let depth_texture = device.create_texture(&wgpu::TextureDescriptor {
644 size: wgpu::Extent3d {
645 width: width,
646 height: height,
647 depth_or_array_layers: 1,
648 },
649 mip_level_count: 1,
650 sample_count: 1,
651 dimension: wgpu::TextureDimension::D2,
652 format: wgpu::TextureFormat::Depth24Plus,
653 usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
654 label: Some("Depth Texture"),
655 view_formats: &[],
656 });
657
658 let depth_view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default());
659
660 let depth_stencil_state = wgpu::DepthStencilState {
661 format: wgpu::TextureFormat::Depth24Plus,
662 depth_write_enabled: true,
663 depth_compare: wgpu::CompareFunction::Less,
664 stencil: wgpu::StencilState::default(),
665 bias: wgpu::DepthBiasState::default(),
666 };
667
668 let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
670 label: Some("Render Pipeline"),
671 layout: Some(&pipeline_layout),
672 vertex: wgpu::VertexState {
673 module: &vertex_shader,
674 entry_point: "main",
675 buffers: &[Vertex::desc()],
676 compilation_options: wgpu::PipelineCompilationOptions {
677 ..Default::default()
678 },
679 },
680 fragment: Some(wgpu::FragmentState {
681 module: &fragment_shader,
682 entry_point: "main",
683 targets: &[Some(wgpu::ColorTargetState {
684 format: swap_chain_format,
685 blend: Some(wgpu::BlendState::REPLACE),
686 write_mask: wgpu::ColorWrites::ALL,
687 })],
688 compilation_options: wgpu::PipelineCompilationOptions {
689 ..Default::default()
690 },
691 }),
692 primitive: wgpu::PrimitiveState {
693 topology: wgpu::PrimitiveTopology::TriangleList,
694 strip_index_format: None,
696 front_face: wgpu::FrontFace::Ccw,
697 cull_mode: Some(wgpu::Face::Back),
698 polygon_mode: wgpu::PolygonMode::Fill,
700 unclipped_depth: false,
701 conservative: false,
702 },
703 depth_stencil: Some(depth_stencil_state),
705 multisample: wgpu::MultisampleState {
706 count: 1,
707 mask: !0,
708 alpha_to_coverage_enabled: false,
709 },
710 multiview: None,
711 });
712
713 let state = RendererState::new(
732 device.clone(),
733 queue.clone(),
734 model_bind_group_layout.clone(),
735 texture_bind_group_layout.clone(),
736 texture_render_mode_buffer.clone(),
737 color_render_mode_buffer.clone(),
738 )
739 .await;
740
741 initialize_renderer_state(state);
742
743 let state = get_renderer_state();
744 let f = Rc::new(RefCell::new(None));
748 let g = f.clone();
749
750 let closure = Closure::wrap(Box::new(move || {
751 if !is_rendering_paused() {
752 let device = device.clone();
753 let state_guard = state.lock().unwrap();
754
755 render_frame(
756 &state_guard,
757 &surface,
758 &device,
759 &queue,
760 &render_pipeline,
761 &depth_view,
762 &camera_bind_group,
763 &camera_uniform_buffer,
764 );
765
766 drop(state_guard);
767 }
768
769 request_animation_frame(f.borrow().as_ref().unwrap());
771 }) as Box<dyn FnMut()>);
772
773 *g.borrow_mut() = Some(closure);
774
775 request_animation_frame(g.borrow().as_ref().unwrap());
777}
778
779fn request_animation_frame(f: &Closure<dyn FnMut()>) {
780 window()
781 .unwrap()
782 .request_animation_frame(f.as_ref().unchecked_ref())
783 .expect("should register `requestAnimationFrame` OK");
784}
785
786fn render_frame(
787 state: &RendererState,
788 surface: &wgpu::Surface,
789 device: &wgpu::Device,
790 queue: &wgpu::Queue,
791 render_pipeline: &wgpu::RenderPipeline,
794 depth_view: &wgpu::TextureView,
795 camera_bind_group: &wgpu::BindGroup,
796 camera_uniform_buffer: &wgpu::Buffer,
797) {
798 let mut camera = get_camera();
800
801 let frame = surface
803 .get_current_texture()
804 .expect("Failed to acquire next swap chain texture");
805 let view = frame
806 .texture
807 .create_view(&wgpu::TextureViewDescriptor::default());
808
809 let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
810 label: Some("Render Encoder"),
811 });
812
813 {
814 let color = wgpu::Color {
815 r: 0.1,
816 g: 0.2,
817 b: 0.3,
818 a: 1.0,
819 };
820 let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
821 label: Some("Render Pass"),
822 color_attachments: &[Some(wgpu::RenderPassColorAttachment {
823 view: &view,
824 resolve_target: None,
825 ops: wgpu::Operations {
826 load: wgpu::LoadOp::Clear(color),
827 store: wgpu::StoreOp::Store,
828 },
829 })],
830 depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
832 view: &depth_view, depth_ops: Some(wgpu::Operations {
834 load: wgpu::LoadOp::Clear(1.0), store: wgpu::StoreOp::Store,
836 }),
837 stencil_ops: None, }),
839 timestamp_writes: None,
840 occlusion_query_set: None,
841 });
842
843 render_pass.set_pipeline(&render_pipeline);
845
846 camera.update();
847 let camera_matrix = camera.view_projection_matrix;
848 queue.write_buffer(
849 &camera_uniform_buffer,
850 0,
851 bytemuck::cast_slice(camera_matrix.as_slice()),
852 );
853
854 for landscape in &state.landscapes {
897 landscape.transform.update_uniform_buffer(&queue);
898 render_pass.set_bind_group(0, &camera_bind_group, &[]);
899 render_pass.set_bind_group(1, &landscape.bind_group, &[]);
900 render_pass.set_bind_group(2, &landscape.texture_bind_group, &[]);
901
902 render_pass.set_vertex_buffer(0, landscape.vertex_buffer.slice(..));
903 render_pass
904 .set_index_buffer(landscape.index_buffer.slice(..), wgpu::IndexFormat::Uint32);
905
906 render_pass.draw_indexed(0..landscape.index_count as u32, 0, 0..1);
907 }
908 }
909
910 queue.submit(Some(encoder.finish()));
911 frame.present();
912}
913
914#[wasm_bindgen]
915pub fn handle_key_press(key_code: String, is_pressed: bool) {
916 let camera = get_camera();
917 let state = get_renderer_state();
918 let mut state_guard = state.lock().unwrap();
919
920 web_sys::console::log_1(&format!("Key pressed (2): {}", key_code).into());
921
922 match key_code.as_str() {
923 "w" => {
924 if is_pressed {
925 web_sys::console::log_1(&"Key W pressed".into());
927 camera.position += camera.direction * 0.1;
928 }
929 }
930 "s" => {
931 if is_pressed {
932 web_sys::console::log_1(&"Key S pressed".into());
934 camera.position -= camera.direction * 0.1;
935 }
936 }
937 "a" => {
938 if is_pressed {
939 web_sys::console::log_1(&"Key A pressed".into());
941 let right = camera.direction.cross(&camera.up).normalize();
942 camera.position -= right * 0.1;
943 }
944 }
945 "d" => {
946 if is_pressed {
947 web_sys::console::log_1(&"Key D pressed".into());
949 let right = camera.direction.cross(&camera.up).normalize();
950 camera.position += right * 0.1;
951 }
952 }
953 "ArrowUp" => {
954 if is_pressed {
955 web_sys::console::log_1(&"Key ArrowUp pressed".into());
957 if state_guard.models.len() > 0 {
964 state_guard.models[0].meshes[0]
965 .transform
966 .translate(Vector3::new(0.0, 0.1, 0.0));
967 }
968 }
969 }
970 "ArrowDown" => {
971 if is_pressed {
972 web_sys::console::log_1(&"Key ArrowDown pressed".into());
974 }
976 }
977 "ArrowLeft" => {
978 if is_pressed {
979 web_sys::console::log_1(&"Key ArrowLeft pressed".into());
981 }
983 }
984 "ArrowRight" => {
985 if is_pressed {
986 web_sys::console::log_1(&"Key ArrowRight pressed".into());
988 }
990 }
991 _ => {
992 }
994 }
995
996 camera.update();
997}
998
999#[wasm_bindgen]
1000pub fn handle_mouse_move(dx: f32, dy: f32) {
1001 let camera = get_camera();
1002 let sensitivity = 0.005;
1003
1004 let dx = -dx * sensitivity;
1005 let dy = dy * sensitivity;
1006
1007 camera.rotate(dx, dy);
1008
1009 camera.update();
1010}
1011
1012#[wasm_bindgen]
1013pub fn handle_add_model(projectId: String, modelFilename: String) {
1014 pause_rendering();
1015
1016 let state = get_renderer_state();
1017 let mut state_guard = state.lock().unwrap();
1018
1019 spawn_local(async move {
1021 let params = to_value(&ReadModelParams {
1024 projectId,
1025 modelFilename,
1026 })
1027 .unwrap();
1028 let bytes = invoke("read_model", params).await;
1030 let bytes = bytes
1031 .into_serde()
1032 .expect("Failed to transform byte string to value");
1033
1034 state_guard.add_model(&bytes).await;
1035
1036 drop(state_guard);
1037
1038 resume_rendering();
1039 });
1040}
1041
1042#[derive(Serialize, Deserialize)]
1043pub struct LandscapeData {
1044 pub width: usize,
1047 pub height: usize,
1048 pub pixel_data: Vec<Vec<PixelData>>,
1050}
1051
1052#[derive(Serialize, Deserialize)]
1053pub struct PixelData {
1054 pub height_value: f32,
1055 pub position: [f32; 3],
1056 pub tex_coords: [f32; 2],
1057}
1058
1059#[wasm_bindgen]
1060pub fn handle_add_landscape(projectId: String, landscapeFilename: String) {
1061 pause_rendering();
1062
1063 let state = get_renderer_state();
1064 let mut state_guard = state.lock().unwrap();
1065
1066 spawn_local(async move {
1068 let params = to_value(&GetLandscapeParams {
1071 projectId,
1072 landscapeFilename,
1073 })
1074 .unwrap();
1075 let js_data = invoke("get_landscape_pixels", params).await;
1077 let data: LandscapeData = js_data
1078 .into_serde()
1079 .expect("Failed to transform byte string to value");
1080
1081 state_guard.add_landscape(&data);
1082
1083 drop(state_guard);
1084
1085 resume_rendering();
1086 });
1087}