1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
//! `egui_backend` crate primarily provides traits to abstract away Window and Rendering parts of egui backends.
//! this allows us to use any window backend with any gfx backend crate.
//!
//! egui is an immediate mode gui library. The lifecycle of egui in every frame goes like this:
//! 1. takes input from the window backend. eg: mouse position, keyboard events, resize..
//! 2. constructs gui objects like windows / panels / buttons etc.. and deals with any input interactions.
//! 3. outputs those gui objects as gpu friendly data to be drawn by a gfx backend.
//!
//! So, we need a WindowBackend to provide input to egui and a GfxBackend to draw egui's output.
//! egui already provides an official backends for wgpu, winit and glow, along with a higher level wrapper crate called `eframe`
//! eframe uses `winit` on desktop, custom backend on web and `wgpu`/`glow` for rendering.
//! If that serves your usecase, then it is recommended to keep using that.
//!
//! `egui_backend` crate instead tries to enable separation of window + gfx concerns using traits.
//!
//! this crate provides 3 traits:
//! 1. [`WindowBackend`]: implemented by window backends like [winit](https://docs.rs/winit), [glfw](https://docs.rs/glfw), [sdl2](https://docs.rs/sdl2) etc..
//! 2. [`GfxBackend`]: implemented by rendering backends like [wgpu](https://docs.rs/wgpu), [glow](https://docs.rs/glow), [three-d](https://docs.rs/three-d),
//! 3. [`UserApp`]: implemented by end user's struct which holds the app data as well as egui context and the renderer.
//!
//! This crate will also try to provide functions or structs which are useful across all backends.
//! 1. [`BackendConfig`]: has some configuration which needs to be provided at startup.
//!
//! look at the docs of the relevant trait to learn more.

// #[cfg(target_feature = "egui")]
pub use egui;
// #[cfg(target_feature = "egui")]
use egui::{ClippedPrimitive, FullOutput, PlatformOutput, RawInput, TexturesDelta};
pub use raw_window_handle;
use raw_window_handle::{HasRawDisplayHandle, HasRawWindowHandle};
use std::time::Duration;

/// Intended to provide a common struct which all window backends accept as their configuration.
/// To set size/position/title etc.. just use the windowbackend trait functions after you created the window.
/// This struct is primarily intended for settings which are to be specified *before* creating a window like opengl or transparency etc..
#[derive(Debug, Clone)]
pub struct BackendConfig {
    /// true by default
    pub is_opengl: bool,
    pub opengl_config: Option<OpenGlConfig>,
    pub transparent: Option<bool>,
}

impl Default for BackendConfig {
    fn default() -> Self {
        // let is_opengl = cfg!(target_arch = "wasm32");
        let is_opengl = true;
        Self {
            is_opengl,
            transparent: None,
            opengl_config: Default::default(),
        }
    }
}
/// Implement this trait for your windowing backend. the main responsibility of a
/// Windowing Backend is to
/// 1. run event loop and call the necessary functions of Gfx and UserApp every frame.
/// 2. poll and gather events.
/// 3. convert the events to egui's event types before submitting raw input.
/// 4. provide details like window size or window handle to Gfx Backend for creation/update of surface / swapchain.
pub trait WindowBackend: Sized {
    /// This will be WindowBackend's configuration. if necessary, just add Boxed closures as its
    /// fields and run them before window creation, after window creation etc.. to provide maximum
    /// configurability to users
    type Configuration: Default + Sized;
    /// This type is used by GfxBackend to create/manage swapchain/surfaces. We use an associated type,
    /// because impl Trait is not yet supported in return positions of trait functions.
    /// For now, we only support a single window.
    type WindowType: HasRawDisplayHandle + HasRawWindowHandle + Sized;
    /// Create a new window backend.
    /// config is the custom configuration of a specific window backend
    /// while backend_config is a general config struct for common enough settings like window title.
    fn new(config: Self::Configuration, backend_config: BackendConfig) -> Self;
    /// extracts all the events of this frame.
    fn take_raw_input(&mut self) -> RawInput;
    /// This gives us the "Window" struct of this particular backend. should implement raw window handle apis.
    /// if this is None, it means window hasn't been created, or has been destroyed for some reason.
    /// usually on android, this means the app is suspended. Other platforms always return a live window.
    fn get_window(&mut self) -> Option<&mut Self::WindowType>;
    /// sometimes, the frame buffer size might have changed and the resize event is still not received.
    /// in those cases, wgpu / vulkan like render apis will throw an error if you try to acquire swapchain image
    /// with an outdated size. you will need to provide the *latest* size.
    /// if the return value is `None`, the window doesn't exist yet. eg: on android, after suspend but before resume event.
    fn get_live_physical_size_framebuffer(&mut self) -> Option<[u32; 2]>;
    /// Run the event loop
    /// Window Backend must call the relevant UserApp functions at the right time.
    fn run_event_loop<U: UserApp<UserWindowBackend = Self> + 'static>(user_app: U);
    /// config if GfxBackend needs them. usually tells the GfxBackend whether we have an opengl or non-opengl window.
    /// for example, if a vulkan backend gets a window with opengl, it can gracefully panic instead of segfaulting.
    /// this also serves as an indicator for opengl gfx backends, on whether this backend supports `swap_buffers` or `get_proc_address` functions.
    fn get_config(&self) -> &BackendConfig;
    /// optional. only implemented by gl windowing libraries like glfw/sdl2 which hold the gl context with Window
    /// gfx backends like glow (or raw opengl) will call this if needed.
    /// panic! if your WindowBackend doesn't implemented this functionality (eg: winit)
    fn swap_buffers(&mut self) {
        unimplemented!("swap buffers is not implemented for this window backend");
    }
    /// A direct helper function to tell us if the window is backed by opengl or non-opengl (vk/dx/mtl).
    fn is_opengl(&self) -> bool;

    /// get openGL function addresses. optional, just like `Self::swap_buffers`.
    /// panic! if it doesn't apply to your WindowBackend. eg: winit.
    fn get_proc_address(&mut self, symbol: &str) -> *const core::ffi::c_void {
        unimplemented!(
            "get_proc_address is not implemented for this window backend. called with {symbol}"
        );
    }
    /// To change the title of the window
    fn set_window_title(&mut self, title: &str);
    /// The position of the window relative to top left of the monitor/screen/workspace.
    fn get_window_position(&mut self) -> Option<[f32; 2]>;
    /// set the position of the window relative to top left of the monitor/screen/workspace.
    fn set_window_position(&mut self, pos: [f32; 2]);
    /// get the size of the window in logical pixels. Use `Self::get_live_physical_framebuffer_size` for physical surface size.
    fn get_window_size(&mut self) -> Option<[f32; 2]>;
    /// set the window size in logical pixels.
    fn set_window_size(&mut self, size: [f32; 2]);
    /// check if window is minimized.
    /// Warn: On some platforms, size of a minimized window might be returned as "zero"
    fn get_window_minimized(&mut self) -> Option<bool>;
    /// To minimize/restore a window.
    fn set_minimize_window(&mut self, min: bool);
    /// If window is maximized.
    fn get_window_maximized(&mut self) -> Option<bool>;
    /// To maximize/restore the window.
    fn set_maximize_window(&mut self, max: bool);
    /// If the window is visible on screen.
    fn get_window_visibility(&mut self) -> Option<bool>;
    /// To show/hide the window. Usually, you will want to hide the window until you have prepared/drawn
    /// to the surface atleast once, and then show the window. Otherwise, Users might see garbage until the first frame.
    fn set_window_visibility(&mut self, vis: bool);
    /// If the window will always stay on top of other windows
    fn get_always_on_top(&mut self) -> Option<bool>;
    /// To make the window always stay on top of other windows. Usually used for Overlays.
    fn set_always_on_top(&mut self, always_on_top: bool);
    /// If the window is "passthrough".
    /// Passthrough simply means that the window is only visually visible, but input will go to whatever is behind/below the window.
    fn get_passthrough(&mut self) -> Option<bool>;
    /// To make the window passthrough or non-passthrough. used by overlays.
    /// By checking if you application gui (egui) requires the input or not, you can set this to act as an overlay.
    fn set_passthrough(&mut self, passthrough: bool);
}

/// Trait for Gfx backends. these could be Gfx APIs like opengl or vulkan or wgpu etc..
/// or higher level renderers like three-d or rend3 or custom renderers etc..
pub trait GfxBackend {
    /// similar to WindowBackendConfig. A custom config struct for the creation of GfxBackend
    type Configuration: Default;

    /// create a new GfxBackend using info from window backend and custom config struct
    /// `WindowBackend` trait provides the backend config, which can be used by the renderer to check
    /// for compatibility.
    ///
    /// for example, a glow renderer might want an opengl context. but if the window was created without one,
    /// the glow renderer should panic.
    fn new(window_backend: &mut impl WindowBackend, config: Self::Configuration) -> Self;

    /// Android only. callend on app suspension, which destroys the window.
    /// so, will need to destroy the `Surface` and recreate during resume event.
    /// ### Panic
    /// Panic on other platforms
    fn suspend(&mut self, _window_backend: &mut impl WindowBackend) {
        unimplemented!("This window backend doesn't implement suspend event");
    }
    /// Android Only. called when app is resumed after suspension.
    /// On Android, window can only be created on resume event. so, you cannot create a `Surface` before entering the event loop.
    /// when this fn is called, we can create a new surface (swapchain) for the window.
    /// doesn't apply on other platforms.
    fn resume(&mut self, _window_backend: &mut impl WindowBackend) {}
    /// called if framebuffer has been resized. use this to reconfigure your swapchain/surface/viewport..
    fn resize_framebuffer(&mut self, window_backend: &mut impl WindowBackend);
    /// prepare the surface / swapchain etc.. by acquiring an image for the current frame.
    /// use `WindowBackend::get_live_physical_size_framebuffer` fn to resize your swapchain if it is out of date.
    fn prepare_frame(&mut self, window_backend: &mut impl WindowBackend);

    /// This is where the renderers will start creating renderpasses, issue draw calls etc.. using the data previously prepared.
    fn render_egui(
        &mut self,
        meshes: Vec<ClippedPrimitive>,
        textures_delta: TexturesDelta,
        logical_screen_size: [f32; 2],
    );

    /// This is called at the end of the frame. after everything is drawn, you can now present
    /// on opengl, renderer might call `WindowBackend::swap_buffers`.
    /// on wgpu / vulkan, renderer might submit commands to queues, present swapchain image etc..
    fn present(&mut self, window_backend: &mut impl WindowBackend);
}

/// This is the trait most users care about.
/// Just have a struct with WindowBackend, GfxBackend and egui context as fields.
/// and implmenet the `get_all` and `gui_run` fn for a simple app.
/// or you can overload the `run` fn for more advanced stuff like filtering input events etc..
pub trait UserApp {
    type UserGfxBackend: GfxBackend;
    type UserWindowBackend: WindowBackend;
    /// A shortcut function to get windodw, gfx backends as well as egui context.
    /// Primarily used to provide default implementations of `resize_framebuffer`, `resume`, `suspend` and `run` fns.
    fn get_all(
        &mut self,
    ) -> (
        &mut Self::UserWindowBackend,
        &mut Self::UserGfxBackend,
        &egui::Context,
    );

    fn resize_framebuffer(&mut self) {
        let (wb, gb, _) = self.get_all();
        gb.resize_framebuffer(wb);
    }
    fn resume(&mut self) {
        let (wb, gb, _) = self.get_all();
        gb.resume(wb);
    }
    fn suspend(&mut self) {
        let (wb, gb, _) = self.get_all();
        gb.suspend(wb);
    }
    fn run(&mut self, logical_size: [f32; 2]) -> Option<(PlatformOutput, Duration)> {
        let (wb, gb, egui_context) = self.get_all();
        let egui_context = egui_context.clone();
        // don't bother doing anything if there's no window
        if let Some(full_output) = if wb.get_window().is_some() {
            let input = wb.take_raw_input();
            gb.prepare_frame(wb);
            egui_context.begin_frame(input);
            self.gui_run();
            Some(egui_context.end_frame())
        } else {
            None
        } {
            let FullOutput {
                platform_output,
                repaint_after,
                textures_delta,
                shapes,
            } = full_output;
            let (wb, gb, egui_context) = self.get_all();
            let egui_context = egui_context.clone();

            gb.render_egui(
                egui_context.tessellate(shapes),
                textures_delta,
                logical_size,
            );
            gb.present(wb);
            return Some((platform_output, repaint_after));
        }
        None
    }
    /// This is the only function user needs to implement. this function will be called every frame by the default implementation of `run` fn.
    /// Just use the egui context to build the user interface, and after this function is called, `run` fn default impl will take care of drawing egui.
    fn gui_run(&mut self);
}

/// Some nice util functions commonly used by egui backends.
pub mod util {

    /// input: clip rectangle in logical pixels, scale and framebuffer size in physical pixels
    /// we will get [x, y, width, height] of the scissor rectangle.
    ///
    /// internally, it will
    /// 1. multiply clip rect and scale  to convert the logical rectangle to a physical rectangle in framebuffer space.
    /// 2. clamp the rectangle between 0..width and 0..height of the frambuffer. make sure that width/height are positive/zero.
    /// 3. return Some only if width/height of scissor region are not zero.
    ///
    /// This fn is for wgpu/metal/directx.
    /// For opengl, use [`scissor_from_clip_rect_opengl`].
    pub fn scissor_from_clip_rect(
        clip_rect: &egui::Rect,
        scale: f32,
        physical_framebuffer_size: [u32; 2],
    ) -> Option<[u32; 4]> {
        // copy paste from official egui impl because i have no idea what this is :D

        // first, we turn the clip rectangle into physical framebuffer coordinates
        // clip_min is top left point and clip_max is bottom right.
        let clip_min_x = scale * clip_rect.min.x;
        let clip_min_y = scale * clip_rect.min.y;
        let clip_max_x = scale * clip_rect.max.x;
        let clip_max_y = scale * clip_rect.max.y;

        // round to integers
        let clip_min_x = clip_min_x.round() as i32;
        let clip_min_y = clip_min_y.round() as i32;
        let clip_max_x = clip_max_x.round() as i32;
        let clip_max_y = clip_max_y.round() as i32;

        // clamp top_left of clip rect to be within framebuffer bounds
        let clip_min_x = clip_min_x.clamp(0, physical_framebuffer_size[0] as i32);
        let clip_min_y = clip_min_y.clamp(0, physical_framebuffer_size[1] as i32);
        // clamp bottom right of clip rect to be between top_left of clip rect and framebuffer bottom right bounds
        let clip_max_x = clip_max_x.clamp(clip_min_x, physical_framebuffer_size[0] as i32);
        let clip_max_y = clip_max_y.clamp(clip_min_y, physical_framebuffer_size[1] as i32);
        // x,y are simply top left coords
        let x = clip_min_x as u32;
        let y = clip_min_y as u32;
        // width height by subtracting bottom right with top left coords.
        let width = (clip_max_x - clip_min_x) as u32;
        let height = (clip_max_y - clip_min_y) as u32;
        // return only if scissor width/height are not zero. otherwise, no need for a scissor rect at all
        (width != 0 && height != 0).then_some([x, y, width, height])
    }
    /// For wgpu, dx, metal, use [`scissor_from_clip_rect`]..
    ///
    /// **NOTE**:
    /// egui coordinates are in logical window space with top left being [0, 0].
    /// In opengl, bottom left is [0, 0].
    /// so, we need to use bottom left clip-rect coordinate as x,y instead of top left.
    /// 1. bottom left corner's y coordinate is simply top left corner's y added with clip rect height
    /// 2. but this `y` is represents top border + y units. in opengl, we need units from bottom border  
    /// 3. we know that for any point y, distance between top and y + distance between bottom and y gives us total height
    /// 4. so, height - y units from top gives us y units from bottom.
    /// math is suprisingly hard to write down.. just draw it on a paper, it makes sense.
    pub fn scissor_from_clip_rect_opengl(
        clip_rect: &egui::Rect,
        scale: f32,
        physical_framebuffer_size: [u32; 2],
    ) -> Option<[u32; 4]> {
        scissor_from_clip_rect(clip_rect, scale, physical_framebuffer_size).map(|mut arr| {
            arr[1] = physical_framebuffer_size[1] - (arr[1] + arr[3]);
            arr
        })
    }
}

#[derive(Debug, Clone, Default)]
pub struct OpenGlConfig {
    /// minimum major opengl version
    /// 2 or 3 is common
    pub major: Option<u8>,
    /// minor version.
    pub minor: Option<u8>,
    /// If we want an ES context
    /// false is default
    pub es: Option<bool>,
    /// try creating srgb surface for window
    pub srgb: Option<bool>,
    /// depth bits
    pub depth: Option<u8>,
    /// stencil bits
    pub stencil: Option<u8>,
    /// The number of bits per each color channel.
    /// default should be rgba with 8 bits each.
    pub color_bits: Option<[u8; 4]>,
    /// Must be a power of 2
    pub multi_samples: Option<u8>,
    /// If false, we request a compatible context.
    /// If true, core context.
    /// true is default.
    pub core: Option<bool>,
}