Skip to main content

arcane_engine/renderer/
mod.rs

1mod gpu;
2mod sprite;
3mod texture;
4pub mod camera;
5mod tilemap;
6mod lighting;
7pub mod font;
8pub mod msdf;
9pub mod shader;
10pub mod postprocess;
11pub mod radiance;
12
13pub use gpu::GpuContext;
14pub use sprite::{SpriteCommand, SpritePipeline};
15pub use texture::{TextureId, TextureStore};
16pub use camera::Camera2D;
17pub use tilemap::{Tilemap, TilemapStore};
18pub use lighting::{LightingState, LightingUniform, PointLight, LightData, MAX_LIGHTS};
19pub use msdf::{MsdfFont, MsdfFontStore, MsdfGlyph};
20pub use shader::ShaderStore;
21pub use postprocess::PostProcessPipeline;
22pub use radiance::{RadiancePipeline, RadianceState, EmissiveSurface, Occluder, DirectionalLight, SpotLight};
23
24use anyhow::Result;
25
26/// Top-level renderer that owns the GPU context, sprite pipeline, and textures.
27pub struct Renderer {
28    pub gpu: GpuContext,
29    pub sprites: SpritePipeline,
30    pub shaders: ShaderStore,
31    pub postprocess: PostProcessPipeline,
32    pub textures: TextureStore,
33    pub camera: Camera2D,
34    pub lighting: LightingState,
35    pub radiance: RadiancePipeline,
36    pub radiance_state: RadianceState,
37    /// Sprite commands queued for the current frame.
38    pub frame_commands: Vec<SpriteCommand>,
39    /// Display scale factor (e.g. 2.0 on Retina). Used to convert physical → logical pixels.
40    pub scale_factor: f32,
41    /// Clear color for the render pass background. Default: dark blue-gray.
42    pub clear_color: [f32; 4],
43}
44
45impl Renderer {
46    /// Create a new renderer attached to a winit window.
47    pub fn new(window: std::sync::Arc<winit::window::Window>) -> Result<Self> {
48        let scale_factor = window.scale_factor() as f32;
49        let gpu = GpuContext::new(window)?;
50        let sprites = SpritePipeline::new(&gpu);
51        let shaders = ShaderStore::new(&gpu);
52        let postprocess = PostProcessPipeline::new(&gpu);
53        let radiance_pipeline = RadiancePipeline::new(&gpu);
54        let textures = TextureStore::new();
55        // Set camera viewport to logical pixels so world units are DPI-independent
56        let logical_w = gpu.config.width as f32 / scale_factor;
57        let logical_h = gpu.config.height as f32 / scale_factor;
58        let camera = Camera2D {
59            viewport_size: [logical_w, logical_h],
60            ..Camera2D::default()
61        };
62        Ok(Self {
63            gpu,
64            sprites,
65            shaders,
66            postprocess,
67            radiance: radiance_pipeline,
68            radiance_state: RadianceState::new(),
69            textures,
70            camera,
71            lighting: LightingState::default(),
72            frame_commands: Vec::new(),
73            scale_factor,
74            clear_color: [0.1, 0.1, 0.15, 1.0],
75        })
76    }
77
78    /// Render the current frame's sprite commands and present.
79    pub fn render_frame(&mut self) -> Result<()> {
80        let output = self.gpu.surface.get_current_texture()?;
81        let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default());
82
83        let mut encoder = self.gpu.device.create_command_encoder(
84            &wgpu::CommandEncoderDescriptor { label: Some("frame_encoder") },
85        );
86
87        // Sort by layer → shader_id → blend_mode → texture_id for batching
88        self.frame_commands.sort_by(|a, b| {
89            a.layer
90                .cmp(&b.layer)
91                .then(a.shader_id.cmp(&b.shader_id))
92                .then(a.blend_mode.cmp(&b.blend_mode))
93                .then(a.texture_id.cmp(&b.texture_id))
94        });
95
96        // Flush dirty custom shader uniforms
97        self.shaders.flush(&self.gpu);
98
99        let lighting_uniform = self.lighting.to_uniform();
100        let clear_color = wgpu::Color {
101            r: self.clear_color[0] as f64,
102            g: self.clear_color[1] as f64,
103            b: self.clear_color[2] as f64,
104            a: self.clear_color[3] as f64,
105        };
106
107        // Run radiance cascade GI compute pass (if enabled)
108        let gi_active = self.radiance.compute(
109            &self.gpu,
110            &mut encoder,
111            &self.radiance_state,
112            &self.lighting,
113            self.camera.x,
114            self.camera.y,
115            self.camera.viewport_size[0],
116            self.camera.viewport_size[1],
117        );
118
119        if self.postprocess.has_effects() {
120            // Render sprites to offscreen target, then apply effects to surface
121            {
122                let sprite_target = self.postprocess.sprite_target(&self.gpu);
123                self.sprites.render(
124                    &self.gpu,
125                    &self.textures,
126                    &self.shaders,
127                    &self.camera,
128                    &lighting_uniform,
129                    &self.frame_commands,
130                    sprite_target,
131                    &mut encoder,
132                    clear_color,
133                );
134            }
135            // Apply GI light texture to the offscreen target before post-processing
136            if gi_active {
137                let sprite_target = self.postprocess.sprite_target(&self.gpu);
138                self.radiance.compose(&mut encoder, sprite_target);
139            }
140            self.postprocess.apply(&self.gpu, &mut encoder, &view);
141        } else {
142            // No effects — render directly to surface
143            self.sprites.render(
144                &self.gpu,
145                &self.textures,
146                &self.shaders,
147                &self.camera,
148                &lighting_uniform,
149                &self.frame_commands,
150                &view,
151                &mut encoder,
152                clear_color,
153            );
154            // Apply GI light texture to the surface
155            if gi_active {
156                self.radiance.compose(&mut encoder, &view);
157            }
158        }
159
160        self.gpu.queue.submit(std::iter::once(encoder.finish()));
161        output.present();
162
163        self.frame_commands.clear();
164        Ok(())
165    }
166
167    /// Resize the surface when the window size changes.
168    /// GPU surface uses physical pixels; camera viewport uses logical pixels.
169    pub fn resize(&mut self, physical_width: u32, physical_height: u32, scale_factor: f32) {
170        if physical_width > 0 && physical_height > 0 {
171            self.scale_factor = scale_factor;
172            self.gpu.config.width = physical_width;
173            self.gpu.config.height = physical_height;
174            self.gpu.surface.configure(&self.gpu.device, &self.gpu.config);
175            // Camera uses logical pixels so 1 world unit ≈ 1 logical pixel at zoom 1
176            self.camera.viewport_size = [
177                physical_width as f32 / scale_factor,
178                physical_height as f32 / scale_factor,
179            ];
180        }
181    }
182}