witchcraft_renderer/types/
renderer.rs1use crate::{types, Vertex};
2use wgpu::util::DeviceExt;
3use winit::{event::WindowEvent, window::Window};
4
5use crate::util::*;
6
7pub struct Renderer {
9 clear_color: wgpu::Color,
10 surface: wgpu::Surface,
11 device: wgpu::Device,
12 queue: wgpu::Queue,
13 config: wgpu::SurfaceConfiguration,
14 size: winit::dpi::PhysicalSize<u32>,
15
16 camera: types::Camera,
17 camera_uniform: types::CameraUniform,
18 camera_buffer: wgpu::Buffer,
19 camera_bind_group: wgpu::BindGroup,
20
21 shader: wgpu::ShaderModule,
22 render_pipelines: Vec<Box<dyn types::RenderPipeline>>,
23}
24impl Renderer {
25 pub async fn new(window: &Window) -> anyhow::Result<Self> {
30 let size = window.inner_size();
31
32 let instance = wgpu::Instance::new(wgpu::Backends::all());
33 let surface = unsafe { instance.create_surface(&window) };
34
35 let adapter = instance
36 .request_adapter(&wgpu::RequestAdapterOptions {
37 power_preference: wgpu::PowerPreference::HighPerformance,
38 compatible_surface: Some(&surface),
39 force_fallback_adapter: false,
40 })
41 .await
42 .unwrap();
43
44 let (device, queue) = adapter
45 .request_device(
46 &wgpu::DeviceDescriptor {
47 label: Some("rendering device"),
48 features: wgpu::Features::all_native_mask(),
49 limits: wgpu::Limits::default(),
50 },
51 None,
52 )
53 .await?;
54
55 let config = wgpu::SurfaceConfiguration {
56 usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
57 format: surface.get_preferred_format(&adapter).unwrap(),
58 width: size.width,
59 height: size.height,
60 present_mode: wgpu::PresentMode::Fifo,
61 };
62
63 let mut camera = types::Camera {
65 eye: (0.0, 0.0, 2.0).into(),
66 target: (0.0, 0.0, 0.0).into(),
67 up: cgmath::Vector3::unit_y(),
68 aspect: config.width as f32 / config.height as f32,
69 fovy: 45.0,
70 znear: 0.1,
71 zfar: 100.0,
72 };
73
74 let mut camera_uniform = types::CameraUniform::new();
75 camera_uniform.update_view_proj(&camera);
76
77 let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
78 label: Some("camera buffer"),
79 contents: bytemuck::cast_slice(&[camera_uniform]),
80 usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
81 });
82
83 let camera_bind_group_layout =
84 device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
85 label: Some("camera bind group layout"),
86 entries: &[wgpu::BindGroupLayoutEntry {
87 binding: 0,
88 visibility: wgpu::ShaderStages::VERTEX,
89 ty: wgpu::BindingType::Buffer {
90 ty: wgpu::BufferBindingType::Uniform,
91 has_dynamic_offset: false,
92 min_binding_size: None,
93 },
94 count: None,
95 }],
96 });
97
98 let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
99 label: Some("camera bind group"),
100 layout: &camera_bind_group_layout,
101 entries: &[wgpu::BindGroupEntry {
102 binding: 0,
103 resource: camera_buffer.as_entire_binding(),
104 }],
105 });
106
107 let shader = device.create_shader_module(&wgpu::ShaderModuleDescriptor {
108 label: Some("shader"),
109 source: wgpu::ShaderSource::Wgsl(read_file_to_str("shader.wgsl").unwrap().into()),
110 });
111
112 Ok(Self {
113 clear_color: wgpu::Color {
114 r: 0.0,
115 g: 0.0,
116 b: 0.0,
117 a: 1.0,
118 },
119 size,
120 surface,
121 device,
122 queue,
123 config,
124
125 camera,
126 camera_uniform,
127 camera_buffer,
128 camera_bind_group,
129
130 shader,
131 render_pipelines: Vec::new(),
132 })
133 }
134
135 pub fn with_camera(mut self, camera: types::Camera) -> Self {
137 self.camera = camera;
138 self.update();
139 self
140 }
141
142 pub fn add_pipeline(&mut self, pipeline: Box<dyn types::RenderPipeline>) {
143 self.render_pipelines.push(pipeline);
144 }
145
146 pub fn get_texture(
148 &self,
149 label: &str,
150 texture_path: &std::path::Path,
151 ) -> anyhow::Result<types::Texture> {
152 use std::fs::File;
153 use std::io::BufReader;
154
155 let file = File::open(texture_path)?;
156 let reader = BufReader::new(file);
157 let bytes = reader.buffer();
158
159 types::Texture::from_bytes(label, &self.device, &self.queue, bytes)
160 }
161
162 pub fn update(&mut self) {
164 self.camera_uniform.update_view_proj(&self.camera);
165 self.queue.write_buffer(
166 &self.camera_buffer,
167 0,
168 bytemuck::cast_slice(&[self.camera_uniform]),
169 );
170 }
171
172 pub async fn render(&mut self) -> Result<(), wgpu::SurfaceError> {
175 let output = self.surface.get_current_texture()?;
177 let view = output
178 .texture
179 .create_view(&wgpu::TextureViewDescriptor::default());
180
181 let mut encoder = self
183 .device
184 .create_command_encoder(&wgpu::CommandEncoderDescriptor {
185 label: Some("renderer command encoder"),
186 });
187
188 {
189 let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
190 label: Some("renderer render pass"),
191 color_attachments: &[wgpu::RenderPassColorAttachment {
192 view: &view,
193 resolve_target: None,
194 ops: wgpu::Operations {
195 load: wgpu::LoadOp::Clear(self.clear_color.clone()),
196 store: true,
197 },
198 }],
199 depth_stencil_attachment: None, });
201
202 render_pass.set_bind_group(1, &self.camera_bind_group, &[]);
203
204 for render_pipeline in &self.render_pipelines {
205 match render_pipeline.get_texture() {
207 Some(texture) => {
208 render_pass.set_pipeline(render_pipeline.get_pipeline());
209 render_pass
210 .set_vertex_buffer(1, render_pipeline.get_instance_buffer().slice(..));
211
212 render_pass.set_bind_group(0, render_pipeline.get_bind_group(), &[]);
213 render_pass
214 .set_vertex_buffer(0, render_pipeline.get_vertex_buffer().slice(..));
215 render_pass.set_index_buffer(
216 render_pipeline.get_index_buffer().slice(..),
217 wgpu::IndexFormat::Uint32,
218 );
219
220 render_pass.draw_indexed(
221 0..render_pipeline.get_num_indices(),
222 0,
223 render_pipeline.get_instances(),
224 );
225 }
226 None => {}
227 }
228 }
229 }
230
231 self.queue.submit(std::iter::once(encoder.finish()));
232 output.present();
233
234 Ok(())
235 }
236}