Skip to main content

arcane_engine/renderer/
mod.rs

1mod gpu;
2mod sprite;
3mod texture;
4pub mod camera;
5mod tilemap;
6mod lighting;
7pub mod font;
8pub mod shader;
9pub mod postprocess;
10
11pub use gpu::GpuContext;
12pub use sprite::{SpriteCommand, SpritePipeline};
13pub use texture::{TextureId, TextureStore};
14pub use camera::Camera2D;
15pub use tilemap::{Tilemap, TilemapStore};
16pub use lighting::{LightingState, LightingUniform, PointLight, LightData, MAX_LIGHTS};
17pub use shader::ShaderStore;
18pub use postprocess::PostProcessPipeline;
19
20use anyhow::Result;
21
22/// Top-level renderer that owns the GPU context, sprite pipeline, and textures.
23pub struct Renderer {
24    pub gpu: GpuContext,
25    pub sprites: SpritePipeline,
26    pub shaders: ShaderStore,
27    pub postprocess: PostProcessPipeline,
28    pub textures: TextureStore,
29    pub camera: Camera2D,
30    pub lighting: LightingState,
31    /// Sprite commands queued for the current frame.
32    pub frame_commands: Vec<SpriteCommand>,
33    /// Display scale factor (e.g. 2.0 on Retina). Used to convert physical → logical pixels.
34    pub scale_factor: f32,
35    /// Clear color for the render pass background. Default: dark blue-gray.
36    pub clear_color: [f32; 4],
37}
38
39impl Renderer {
40    /// Create a new renderer attached to a winit window.
41    pub fn new(window: std::sync::Arc<winit::window::Window>) -> Result<Self> {
42        let scale_factor = window.scale_factor() as f32;
43        let gpu = GpuContext::new(window)?;
44        let sprites = SpritePipeline::new(&gpu);
45        let shaders = ShaderStore::new(&gpu);
46        let postprocess = PostProcessPipeline::new(&gpu);
47        let textures = TextureStore::new();
48        // Set camera viewport to logical pixels so world units are DPI-independent
49        let logical_w = gpu.config.width as f32 / scale_factor;
50        let logical_h = gpu.config.height as f32 / scale_factor;
51        let camera = Camera2D {
52            viewport_size: [logical_w, logical_h],
53            ..Camera2D::default()
54        };
55        Ok(Self {
56            gpu,
57            sprites,
58            shaders,
59            postprocess,
60            textures,
61            camera,
62            lighting: LightingState::default(),
63            frame_commands: Vec::new(),
64            scale_factor,
65            clear_color: [0.1, 0.1, 0.15, 1.0],
66        })
67    }
68
69    /// Render the current frame's sprite commands and present.
70    pub fn render_frame(&mut self) -> Result<()> {
71        let output = self.gpu.surface.get_current_texture()?;
72        let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default());
73
74        let mut encoder = self.gpu.device.create_command_encoder(
75            &wgpu::CommandEncoderDescriptor { label: Some("frame_encoder") },
76        );
77
78        // Sort by layer → shader_id → blend_mode → texture_id for batching
79        self.frame_commands.sort_by(|a, b| {
80            a.layer
81                .cmp(&b.layer)
82                .then(a.shader_id.cmp(&b.shader_id))
83                .then(a.blend_mode.cmp(&b.blend_mode))
84                .then(a.texture_id.cmp(&b.texture_id))
85        });
86
87        // Flush dirty custom shader uniforms
88        self.shaders.flush(&self.gpu);
89
90        let lighting_uniform = self.lighting.to_uniform();
91        let clear_color = wgpu::Color {
92            r: self.clear_color[0] as f64,
93            g: self.clear_color[1] as f64,
94            b: self.clear_color[2] as f64,
95            a: self.clear_color[3] as f64,
96        };
97
98        if self.postprocess.has_effects() {
99            // Render sprites to offscreen target, then apply effects to surface
100            let sprite_target = self.postprocess.sprite_target(&self.gpu);
101            self.sprites.render(
102                &self.gpu,
103                &self.textures,
104                &self.shaders,
105                &self.camera,
106                &lighting_uniform,
107                &self.frame_commands,
108                sprite_target,
109                &mut encoder,
110                clear_color,
111            );
112            self.postprocess.apply(&self.gpu, &mut encoder, &view);
113        } else {
114            // No effects — render directly to surface
115            self.sprites.render(
116                &self.gpu,
117                &self.textures,
118                &self.shaders,
119                &self.camera,
120                &lighting_uniform,
121                &self.frame_commands,
122                &view,
123                &mut encoder,
124                clear_color,
125            );
126        }
127
128        self.gpu.queue.submit(std::iter::once(encoder.finish()));
129        output.present();
130
131        self.frame_commands.clear();
132        Ok(())
133    }
134
135    /// Resize the surface when the window size changes.
136    /// GPU surface uses physical pixels; camera viewport uses logical pixels.
137    pub fn resize(&mut self, physical_width: u32, physical_height: u32, scale_factor: f32) {
138        if physical_width > 0 && physical_height > 0 {
139            self.scale_factor = scale_factor;
140            self.gpu.config.width = physical_width;
141            self.gpu.config.height = physical_height;
142            self.gpu.surface.configure(&self.gpu.device, &self.gpu.config);
143            // Camera uses logical pixels so 1 world unit ≈ 1 logical pixel at zoom 1
144            self.camera.viewport_size = [
145                physical_width as f32 / scale_factor,
146                physical_height as f32 / scale_factor,
147            ];
148        }
149    }
150}