Skip to main content

fyrox_impl/scene/
camera.rs

1// Copyright (c) 2019-present Dmitry Stepanov and Fyrox Engine contributors.
2//
3// Permission is hereby granted, free of charge, to any person obtaining a copy
4// of this software and associated documentation files (the "Software"), to deal
5// in the Software without restriction, including without limitation the rights
6// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7// copies of the Software, and to permit persons to whom the Software is
8// furnished to do so, subject to the following conditions:
9//
10// The above copyright notice and this permission notice shall be included in all
11// copies or substantial portions of the Software.
12//
13// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19// SOFTWARE.
20
21//! Contains all methods and structures to create and manage cameras. See [`Camera`] docs for more info.
22
23use crate::{
24    asset::{state::LoadError, untyped::ResourceKind},
25    core::{
26        algebra::{Matrix4, Point3, Vector2, Vector3, Vector4},
27        color::Color,
28        math::{aabb::AxisAlignedBoundingBox, frustum::Frustum, ray::Ray, Rect},
29        pool::Handle,
30        reflect::prelude::*,
31        type_traits::prelude::*,
32        uuid::{uuid, Uuid},
33        uuid_provider,
34        variable::InheritableVariable,
35        visitor::{Visit, VisitResult, Visitor},
36    },
37    graph::SceneGraph,
38    resource::texture::{
39        TextureKind, TexturePixelKind, TextureResource, TextureResourceExtension, TextureWrapMode,
40    },
41    scene::{
42        base::{Base, BaseBuilder},
43        debug::SceneDrawingContext,
44        graph::Graph,
45        node::constructor::NodeConstructor,
46        node::{Node, NodeTrait, UpdateContext},
47    },
48};
49use fyrox_graph::constructor::ConstructorProvider;
50use serde::{Deserialize, Serialize};
51use std::{
52    fmt::{Display, Formatter},
53    ops::{Deref, DerefMut},
54};
55use strum_macros::{AsRefStr, EnumString, VariantNames};
56
57/// Perspective projection make parallel lines to converge at some point. Objects will be smaller
58/// with increasing distance. This the projection type "used" by human eyes, photographic lens and
59/// it looks most realistic.
60#[derive(Reflect, Clone, Debug, PartialEq, Visit, Serialize, Deserialize)]
61pub struct PerspectiveProjection {
62    /// Vertical angle at the top of viewing frustum, in radians. Larger values will increase field
63    /// of view and create fish-eye effect, smaller values could be used to create "binocular" effect
64    /// or scope effect.
65    #[reflect(min_value = 0.0, max_value = 6.28, step = 0.1)]
66    pub fov: f32,
67    /// Location of the near clipping plane. If it is larger than [`Self::z_far`] then it will be
68    /// treated like far clipping plane.
69    #[reflect(min_value = 0.0, step = 0.1)]
70    pub z_near: f32,
71    /// Location of the far clipping plane. If it is less than [`Self::z_near`] then it will be
72    /// treated like near clipping plane.
73    #[reflect(min_value = 0.0, step = 0.1)]
74    pub z_far: f32,
75}
76
77impl Default for PerspectiveProjection {
78    fn default() -> Self {
79        Self {
80            fov: 75.0f32.to_radians(),
81            z_near: 0.025,
82            z_far: 2048.0,
83        }
84    }
85}
86
87impl PerspectiveProjection {
88    /// Returns perspective projection matrix.
89    #[inline]
90    pub fn matrix(&self, frame_size: Vector2<f32>) -> Matrix4<f32> {
91        let limit = 10.0 * f32::EPSILON;
92
93        let z_near = self.z_far.min(self.z_near);
94        let mut z_far = self.z_far.max(self.z_near);
95
96        // Prevent planes from superimposing which could cause panic.
97        if z_far - z_near < limit {
98            z_far += limit;
99        }
100
101        Matrix4::new_perspective(
102            (frame_size.x / frame_size.y).max(limit),
103            self.fov,
104            z_near,
105            z_far,
106        )
107    }
108}
109
110/// Parallel projection. Object's size won't be affected by distance from the viewer, it can be
111/// used for 2D games.
112#[derive(Reflect, Clone, Debug, PartialEq, Visit, Serialize, Deserialize)]
113pub struct OrthographicProjection {
114    /// Location of the near clipping plane. If it is larger than [`Self::z_far`] then it will be
115    /// treated like far clipping plane.
116    #[reflect(min_value = 0.0, step = 0.1)]
117    pub z_near: f32,
118    /// Location of the far clipping plane. If it is less than [`Self::z_near`] then it will be
119    /// treated like near clipping plane.
120    #[reflect(min_value = 0.0, step = 0.1)]
121    pub z_far: f32,
122    /// Vertical size of the "view box". Horizontal size is derived value and depends on the aspect
123    /// ratio of the viewport. Any values very close to zero (from both sides) will be clamped to
124    /// some minimal value to prevent singularities from occuring.
125    #[reflect(step = 0.1)]
126    pub vertical_size: f32,
127}
128
129impl Default for OrthographicProjection {
130    fn default() -> Self {
131        Self {
132            z_near: 0.0,
133            z_far: 2048.0,
134            vertical_size: 5.0,
135        }
136    }
137}
138
139impl OrthographicProjection {
140    /// Returns orthographic projection matrix.
141    #[inline]
142    pub fn matrix(&self, frame_size: Vector2<f32>) -> Matrix4<f32> {
143        fn clamp_to_limit_signed(value: f32, limit: f32) -> f32 {
144            if value < 0.0 && -value < limit {
145                -limit
146            } else if value >= 0.0 && value < limit {
147                limit
148            } else {
149                value
150            }
151        }
152
153        let limit = 10.0 * f32::EPSILON;
154
155        let aspect = (frame_size.x / frame_size.y).max(limit);
156
157        // Prevent collapsing projection "box" into a point, which could cause panic.
158        let vertical_size = clamp_to_limit_signed(self.vertical_size, limit);
159        let horizontal_size = clamp_to_limit_signed(aspect * vertical_size, limit);
160
161        let z_near = self.z_far.min(self.z_near);
162        let mut z_far = self.z_far.max(self.z_near);
163
164        // Prevent planes from superimposing which could cause panic.
165        if z_far - z_near < limit {
166            z_far += limit;
167        }
168
169        let left = -horizontal_size;
170        let top = vertical_size;
171        let right = horizontal_size;
172        let bottom = -vertical_size;
173        Matrix4::new_orthographic(left, right, bottom, top, z_near, z_far)
174    }
175}
176
177/// A method of projection. Different projection types suitable for different purposes:
178///
179/// 1) Perspective projection most useful for 3D games, it makes a scene to look most natural,
180/// objects will look smaller with increasing distance.
181/// 2) Orthographic projection most useful for 2D games, objects won't look smaller with increasing
182/// distance.
183#[derive(
184    Reflect,
185    Clone,
186    Debug,
187    PartialEq,
188    Visit,
189    AsRefStr,
190    EnumString,
191    VariantNames,
192    Serialize,
193    Deserialize,
194)]
195pub enum Projection {
196    /// See [`PerspectiveProjection`] docs.
197    Perspective(PerspectiveProjection),
198    /// See [`OrthographicProjection`] docs.
199    Orthographic(OrthographicProjection),
200}
201
202uuid_provider!(Projection = "0eb5bec0-fc4e-4945-99b6-e6c5392ad971");
203
204impl Projection {
205    /// Sets the new value for the near clipping plane.
206    #[inline]
207    #[must_use]
208    pub fn with_z_near(mut self, z_near: f32) -> Self {
209        match self {
210            Projection::Perspective(ref mut v) => v.z_near = z_near,
211            Projection::Orthographic(ref mut v) => v.z_near = z_near,
212        }
213        self
214    }
215
216    /// Sets the new value for the far clipping plane.
217    #[inline]
218    #[must_use]
219    pub fn with_z_far(mut self, z_far: f32) -> Self {
220        match self {
221            Projection::Perspective(ref mut v) => v.z_far = z_far,
222            Projection::Orthographic(ref mut v) => v.z_far = z_far,
223        }
224        self
225    }
226
227    /// Sets the new value for the near clipping plane.
228    #[inline]
229    pub fn set_z_near(&mut self, z_near: f32) {
230        match self {
231            Projection::Perspective(v) => v.z_near = z_near,
232            Projection::Orthographic(v) => v.z_near = z_near,
233        }
234    }
235
236    /// Sets the new value for the far clipping plane.
237    #[inline]
238    pub fn set_z_far(&mut self, z_far: f32) {
239        match self {
240            Projection::Perspective(v) => v.z_far = z_far,
241            Projection::Orthographic(v) => v.z_far = z_far,
242        }
243    }
244
245    /// Returns near clipping plane distance.
246    #[inline]
247    pub fn z_near(&self) -> f32 {
248        match self {
249            Projection::Perspective(v) => v.z_near,
250            Projection::Orthographic(v) => v.z_near,
251        }
252    }
253
254    /// Returns far clipping plane distance.
255    #[inline]
256    pub fn z_far(&self) -> f32 {
257        match self {
258            Projection::Perspective(v) => v.z_far,
259            Projection::Orthographic(v) => v.z_far,
260        }
261    }
262
263    /// Returns projection matrix.
264    #[inline]
265    pub fn matrix(&self, frame_size: Vector2<f32>) -> Matrix4<f32> {
266        match self {
267            Projection::Perspective(v) => v.matrix(frame_size),
268            Projection::Orthographic(v) => v.matrix(frame_size),
269        }
270    }
271
272    /// Returns `true` if the current projection is perspective.
273    #[inline]
274    pub fn is_perspective(&self) -> bool {
275        matches!(self, Projection::Perspective(_))
276    }
277
278    /// Returns `true` if the current projection is orthographic.
279    #[inline]
280    pub fn is_orthographic(&self) -> bool {
281        matches!(self, Projection::Orthographic(_))
282    }
283}
284
285impl Default for Projection {
286    fn default() -> Self {
287        Self::Perspective(PerspectiveProjection::default())
288    }
289}
290
291/// Exposure is a parameter that describes how many light should be collected for one
292/// frame. The higher the value, the more brighter the final frame will be and vice versa.
293#[derive(
294    Visit,
295    Copy,
296    Clone,
297    PartialEq,
298    Debug,
299    Reflect,
300    AsRefStr,
301    EnumString,
302    VariantNames,
303    Serialize,
304    Deserialize,
305)]
306pub enum Exposure {
307    /// Automatic exposure based on the frame luminance. High luminance values will result
308    /// in lower exposure levels and vice versa.
309    Auto {
310        /// A min luminance value. The lower the value, the higher exposure values will be used for
311        /// dark images. The default value is 0.035.
312        #[reflect(min_value = 0.0, step = 0.1)]
313        min_luminance: f32,
314        /// A max luminance value. The higher the value, the lower exposure values will be used for
315        /// bright images. The default value is 10.0.
316        #[reflect(min_value = 0.0, step = 0.1)]
317        max_luminance: f32,
318    },
319
320    /// Specific exposure level. To "disable" any HDR effects use 1.0 as a value. This is the default
321    /// option.
322    Manual(f32),
323}
324
325uuid_provider!(Exposure = "0e35ee3d-8baa-4b0c-b3dd-6c31a08c121e");
326
327impl Default for Exposure {
328    fn default() -> Self {
329        Self::Manual(1.0)
330    }
331}
332
333/// Camera allows you to see world from specific point in world. You must have at least one camera in
334/// your scene to see anything.
335///
336/// ## Projection
337///
338/// There are two main projection modes supported by Camera node: perspective and orthogonal projections.
339/// Perspective projection is used primarily to display 3D scenes, while orthogonal projection could be
340/// used for both 3D and 2D. Orthogonal projection could also be used in CAD software.
341///
342/// ## Skybox
343///
344/// Skybox is a cube around the camera with six textures forming seamless "sky". It could be anything,
345/// starting from simple blue sky and ending with outer space.
346///
347/// ## Multiple cameras
348///
349/// Fyrox supports multiple cameras per scene, it means that you can create split screen games, make
350/// picture-in-picture insertions in your main camera view and any other combinations you need.
351///
352/// ## Performance
353///
354/// Each camera forces engine to re-render same scene one more time, which may cause almost double load
355/// of your GPU.
356#[derive(Debug, Visit, Reflect, Clone, ComponentProvider)]
357#[reflect(derived_type = "Node")]
358pub struct Camera {
359    base: Base,
360
361    #[reflect(setter = "set_projection")]
362    projection: InheritableVariable<Projection>,
363
364    #[reflect(setter = "set_viewport")]
365    viewport: InheritableVariable<Rect<f32>>,
366
367    #[reflect(setter = "set_enabled")]
368    enabled: InheritableVariable<bool>,
369
370    #[reflect(setter = "set_environment")]
371    environment: InheritableVariable<Option<TextureResource>>,
372
373    #[reflect(setter = "set_exposure")]
374    exposure: InheritableVariable<Exposure>,
375
376    #[reflect(setter = "set_color_grading_lut")]
377    color_grading_lut: InheritableVariable<Option<ColorGradingLut>>,
378
379    #[reflect(setter = "set_color_grading_enabled")]
380    color_grading_enabled: InheritableVariable<bool>,
381
382    #[reflect(setter = "set_hdr_adaptation_speed")]
383    hdr_adaptation_speed: InheritableVariable<f32>,
384
385    #[reflect(setter = "set_render_target")]
386    #[visit(skip)]
387    render_target: Option<TextureResource>,
388
389    #[visit(skip)]
390    #[reflect(hidden)]
391    view_matrix: Matrix4<f32>,
392
393    #[visit(skip)]
394    #[reflect(hidden)]
395    projection_matrix: Matrix4<f32>,
396}
397
398impl Deref for Camera {
399    type Target = Base;
400
401    fn deref(&self) -> &Self::Target {
402        &self.base
403    }
404}
405
406impl DerefMut for Camera {
407    fn deref_mut(&mut self) -> &mut Self::Target {
408        &mut self.base
409    }
410}
411
412impl Default for Camera {
413    fn default() -> Self {
414        CameraBuilder::new(BaseBuilder::new()).build_camera()
415    }
416}
417
418impl TypeUuidProvider for Camera {
419    fn type_uuid() -> Uuid {
420        uuid!("198d3aca-433c-4ce1-bb25-3190699b757f")
421    }
422}
423
424/// A set of camera fitting parameters for different projection modes. You should take these parameters
425/// and modify camera position and projection accordingly. In case of perspective projection all you need
426/// to do is to set new world-space position of the camera. In cae of orthographic projection, do previous
427/// step and also modify vertical size of orthographic projection (see [`OrthographicProjection`] for more
428/// info).
429pub enum FitParameters {
430    /// Fitting parameters for perspective projection.
431    Perspective {
432        /// New world-space position of the camera.
433        position: Vector3<f32>,
434        /// Distance from the center of an AABB of the object to the `position`.
435        distance: f32,
436    },
437    /// Fitting parameters for orthographic projection.
438    Orthographic {
439        /// New world-space position of the camera.
440        position: Vector3<f32>,
441        /// New vertical size for orthographic projection.
442        vertical_size: f32,
443    },
444}
445
446impl FitParameters {
447    fn fallback_perspective() -> Self {
448        Self::Perspective {
449            position: Default::default(),
450            distance: 1.0,
451        }
452    }
453}
454
455impl Camera {
456    /// Explicitly calculates view and projection matrices. Normally, you should not call
457    /// this method, it will be called automatically when new frame starts.
458    #[inline]
459    pub fn calculate_matrices(&mut self, frame_size: Vector2<f32>) {
460        let pos = self.base.global_position();
461        let look = self.base.look_vector();
462        let up = self.base.up_vector();
463
464        self.view_matrix = Matrix4::look_at_rh(&Point3::from(pos), &Point3::from(pos + look), &up);
465        self.projection_matrix = self.projection.matrix(frame_size);
466    }
467
468    /// Sets new viewport in resolution-independent format. In other words
469    /// each parameter of viewport defines portion of your current resolution
470    /// in percents. In example viewport (0.0, 0.0, 0.5, 1.0) will force camera
471    /// to use left half of your screen and (0.5, 0.0, 0.5, 1.0) - right half.
472    /// Why not just use pixels directly? Because you can change resolution while
473    /// your application is running and you'd be force to manually recalculate
474    /// pixel values everytime when resolution changes.
475    pub fn set_viewport(&mut self, mut viewport: Rect<f32>) -> Rect<f32> {
476        viewport.position.x = viewport.position.x.clamp(0.0, 1.0);
477        viewport.position.y = viewport.position.y.clamp(0.0, 1.0);
478        viewport.size.x = viewport.size.x.clamp(0.0, 1.0);
479        viewport.size.y = viewport.size.y.clamp(0.0, 1.0);
480        self.viewport.set_value_and_mark_modified(viewport)
481    }
482
483    /// Returns current viewport.
484    pub fn viewport(&self) -> Rect<f32> {
485        *self.viewport
486    }
487
488    /// Calculates viewport rectangle in pixels based on internal resolution-independent
489    /// viewport. It is useful when you need to get real viewport rectangle in pixels.
490    ///
491    /// # Notes
492    ///
493    /// Viewport cannot be less than 1x1 pixel in size, so the method clamps values to
494    /// range `[1; infinity]`. This is strictly needed because having viewport of 0 in size
495    /// will cause panics in various places. It happens because viewport size is used as
496    /// divisor in math formulas, but you cannot divide by zero.
497    #[inline]
498    pub fn viewport_pixels(&self, frame_size: Vector2<f32>) -> Rect<i32> {
499        Rect::new(
500            (self.viewport.x() * frame_size.x) as i32,
501            (self.viewport.y() * frame_size.y) as i32,
502            ((self.viewport.w() * frame_size.x) as i32).max(1),
503            ((self.viewport.h() * frame_size.y) as i32).max(1),
504        )
505    }
506
507    /// Returns current view-projection matrix.
508    #[inline]
509    pub fn view_projection_matrix(&self) -> Matrix4<f32> {
510        self.projection_matrix * self.view_matrix
511    }
512
513    /// Returns current projection matrix.
514    #[inline]
515    pub fn projection_matrix(&self) -> Matrix4<f32> {
516        self.projection_matrix
517    }
518
519    /// Returns current view matrix.
520    #[inline]
521    pub fn view_matrix(&self) -> Matrix4<f32> {
522        self.view_matrix
523    }
524
525    /// Returns inverse view matrix.
526    #[inline]
527    pub fn inv_view_matrix(&self) -> Option<Matrix4<f32>> {
528        self.view_matrix.try_inverse()
529    }
530
531    /// Returns current projection mode.
532    #[inline]
533    pub fn projection(&self) -> &Projection {
534        &self.projection
535    }
536
537    /// Returns current projection mode.
538    #[inline]
539    pub fn projection_value(&self) -> Projection {
540        (*self.projection).clone()
541    }
542
543    /// Returns current projection mode as mutable reference.
544    #[inline]
545    pub fn projection_mut(&mut self) -> &mut Projection {
546        self.projection.get_value_mut_and_mark_modified()
547    }
548
549    /// Sets current projection mode.
550    #[inline]
551    pub fn set_projection(&mut self, projection: Projection) -> Projection {
552        self.projection.set_value_and_mark_modified(projection)
553    }
554
555    /// Returns state of camera: enabled or not.
556    #[inline]
557    pub fn is_enabled(&self) -> bool {
558        *self.enabled
559    }
560
561    /// Enables or disables camera. Disabled cameras will be ignored during
562    /// rendering. This allows you to exclude views from specific cameras from
563    /// final picture.
564    #[inline]
565    pub fn set_enabled(&mut self, enabled: bool) -> bool {
566        self.enabled.set_value_and_mark_modified(enabled)
567    }
568
569    /// Sets new environment.
570    pub fn set_environment(
571        &mut self,
572        environment: Option<TextureResource>,
573    ) -> Option<TextureResource> {
574        self.environment.set_value_and_mark_modified(environment)
575    }
576
577    /// Return optional mutable reference to current environment.
578    pub fn environment_mut(&mut self) -> Option<&mut TextureResource> {
579        self.environment.get_value_mut_and_mark_modified().as_mut()
580    }
581
582    /// Return optional shared reference to current environment.
583    pub fn environment_ref(&self) -> Option<&TextureResource> {
584        self.environment.as_ref()
585    }
586
587    /// Return current environment map.
588    pub fn environment_map(&self) -> Option<TextureResource> {
589        (*self.environment).clone()
590    }
591
592    /// Sets the speed of automatic adaptation for the current frame luminance. In other words,
593    /// it defines how fast the reaction to the new frame brightness will be. The lower the value,
594    /// the longer it will take to adjust the exposure for the new brightness level.
595    pub fn set_hdr_adaptation_speed(&mut self, speed: f32) -> f32 {
596        self.hdr_adaptation_speed.set_value_and_mark_modified(speed)
597    }
598
599    /// The speed of automatic adaptation for the current frame luminance.
600    pub fn hdr_adaptation_speed(&self) -> f32 {
601        *self.hdr_adaptation_speed
602    }
603
604    /// Creates picking ray from given screen coordinates.
605    pub fn make_ray(&self, screen_coord: Vector2<f32>, screen_size: Vector2<f32>) -> Ray {
606        let viewport = self.viewport_pixels(screen_size);
607        let nx = screen_coord.x / (viewport.w() as f32) * 2.0 - 1.0;
608        // Invert y here because OpenGL has origin at left bottom corner,
609        // but window coordinates starts from left *upper* corner.
610        let ny = (viewport.h() as f32 - screen_coord.y) / (viewport.h() as f32) * 2.0 - 1.0;
611        let inv_view_proj = self
612            .view_projection_matrix()
613            .try_inverse()
614            .unwrap_or_default();
615        let near = inv_view_proj * Vector4::new(nx, ny, -1.0, 1.0);
616        let far = inv_view_proj * Vector4::new(nx, ny, 1.0, 1.0);
617        let begin = near.xyz().scale(1.0 / near.w);
618        let end = far.xyz().scale(1.0 / far.w);
619        Ray::from_two_points(begin, end)
620    }
621
622    /// Calculates new fitting parameters for the given axis-aligned bounding box using current camera's
623    /// global transform and provided aspect ratio. See [`FitParameters`] docs for more info.
624    ///
625    /// This method returns fitting parameters and **do not** modify camera's state. It is needed, because in
626    /// some cases your camera could be attached to some sort of a hinge node and setting its local position
627    /// in order to fit it to the given AABB would break the preset spatial relations between nodes. Instead,
628    /// the method returns a set of parameters that can be used as you want.
629    #[inline]
630    #[must_use]
631    pub fn fit(
632        &self,
633        aabb: &AxisAlignedBoundingBox,
634        aspect_ratio: f32,
635        scale: f32,
636    ) -> FitParameters {
637        if aabb.is_invalid_or_degenerate() {
638            return FitParameters::fallback_perspective();
639        }
640
641        let look_vector = self
642            .look_vector()
643            .try_normalize(f32::EPSILON)
644            .unwrap_or_default();
645
646        match self.projection.deref() {
647            Projection::Perspective(perspective) => {
648                let radius = aabb.half_extents().max();
649
650                let denominator = (perspective.fov * 0.5).sin();
651                if denominator == 0.0 {
652                    return FitParameters::fallback_perspective();
653                }
654
655                let distance = radius / denominator * scale;
656                FitParameters::Perspective {
657                    position: aabb.center() - look_vector.scale(distance),
658                    distance,
659                }
660            }
661            Projection::Orthographic(_) => {
662                let mut min_x = f32::MAX;
663                let mut min_y = f32::MAX;
664                let mut max_x = -f32::MAX;
665                let mut max_y = -f32::MAX;
666                let inv = self.global_transform().try_inverse().unwrap_or_default();
667                for point in aabb.corners() {
668                    let local = inv.transform_point(&Point3::from(point));
669                    if local.x < min_x {
670                        min_x = local.x;
671                    }
672                    if local.y < min_y {
673                        min_y = local.y;
674                    }
675                    if local.x > max_x {
676                        max_x = local.x;
677                    }
678                    if local.y > max_y {
679                        max_y = local.y;
680                    }
681                }
682
683                FitParameters::Orthographic {
684                    position: aabb.center()
685                        - look_vector.scale((aabb.max - aabb.min).norm() * scale),
686                    vertical_size: (max_y - min_y).max((max_x - min_x) * aspect_ratio) * scale,
687                }
688            }
689        }
690    }
691
692    /// Returns current frustum of the camera.
693    #[inline]
694    pub fn frustum(&self) -> Frustum {
695        Frustum::from_view_projection_matrix(self.view_projection_matrix()).unwrap_or_default()
696    }
697
698    /// Projects given world space point on screen plane.
699    pub fn project(
700        &self,
701        world_pos: Vector3<f32>,
702        screen_size: Vector2<f32>,
703    ) -> Option<Vector2<f32>> {
704        let viewport = self.viewport_pixels(screen_size);
705        let proj = self.view_projection_matrix()
706            * Vector4::new(world_pos.x, world_pos.y, world_pos.z, 1.0);
707        if proj.w != 0.0 && proj.z >= 0.0 {
708            let k = (1.0 / proj.w) * 0.5;
709            Some(Vector2::new(
710                viewport.x() as f32 + viewport.w() as f32 * (proj.x * k + 0.5),
711                viewport.h() as f32
712                    - (viewport.y() as f32 + viewport.h() as f32 * (proj.y * k + 0.5)),
713            ))
714        } else {
715            None
716        }
717    }
718
719    /// Sets new color grading LUT.
720    pub fn set_color_grading_lut(
721        &mut self,
722        lut: Option<ColorGradingLut>,
723    ) -> Option<ColorGradingLut> {
724        self.color_grading_lut.set_value_and_mark_modified(lut)
725    }
726
727    /// Returns current color grading map.
728    pub fn color_grading_lut(&self) -> Option<ColorGradingLut> {
729        (*self.color_grading_lut).clone()
730    }
731
732    /// Returns current color grading map by ref.
733    pub fn color_grading_lut_ref(&self) -> Option<&ColorGradingLut> {
734        self.color_grading_lut.as_ref()
735    }
736
737    /// Enables or disables color grading.
738    pub fn set_color_grading_enabled(&mut self, enable: bool) -> bool {
739        self.color_grading_enabled
740            .set_value_and_mark_modified(enable)
741    }
742
743    /// Whether color grading enabled or not.
744    pub fn color_grading_enabled(&self) -> bool {
745        *self.color_grading_enabled
746    }
747
748    /// Sets new exposure. See `Exposure` struct docs for more info.
749    pub fn set_exposure(&mut self, exposure: Exposure) -> Exposure {
750        self.exposure.set_value_and_mark_modified(exposure)
751    }
752
753    /// Returns current exposure value.
754    pub fn exposure(&self) -> Exposure {
755        *self.exposure
756    }
757
758    /// Sets a new render target of the camera. If set, the camera will render to the specified
759    /// render target and will not appear in the final frame. Typical usage is something like this:
760    ///
761    /// ```rust
762    /// # use fyrox_impl::scene::camera::Camera;
763    /// # use fyrox_texture::{TextureResource, TextureResourceExtension};
764    /// fn set_render_target(camera: &mut Camera) {
765    ///     // Create a render target of 256x256 pixels. The size of the render target can be changed
766    ///     // at runtime, and the engine will automatically adjust GPU resources for you. The render
767    ///     // target is a resource, thus it can be shared across multiple "users". For instance, you
768    ///     // can apply this render target texture to a quad in your game world, and it will make a
769    ///     // sort of virtual camera (surveillance camera).
770    ///     let render_target = TextureResource::new_render_target(256, 256);
771    ///     camera.set_render_target(Some(render_target));
772    /// }
773    /// ```
774    ///
775    /// # Serialization
776    ///
777    /// The render target is non-serializable, and you have to re-create it after deserialization.
778    pub fn set_render_target(
779        &mut self,
780        render_target: Option<TextureResource>,
781    ) -> Option<TextureResource> {
782        std::mem::replace(&mut self.render_target, render_target)
783    }
784
785    /// Returns a reference to the current render target (if any).
786    pub fn render_target(&self) -> Option<&TextureResource> {
787        self.render_target.as_ref()
788    }
789}
790
791impl ConstructorProvider<Node, Graph> for Camera {
792    fn constructor() -> NodeConstructor {
793        NodeConstructor::new::<Self>().with_variant("Camera", |_| {
794            CameraBuilder::new(BaseBuilder::new().with_name("Camera"))
795                .build_node()
796                .into()
797        })
798    }
799}
800
801impl NodeTrait for Camera {
802    /// Returns current **local-space** bounding box.
803    #[inline]
804    fn local_bounding_box(&self) -> AxisAlignedBoundingBox {
805        // TODO: Maybe calculate AABB using frustum corners?
806        self.base.local_bounding_box()
807    }
808
809    /// Returns current **world-space** bounding box.
810    fn world_bounding_box(&self) -> AxisAlignedBoundingBox {
811        self.base.world_bounding_box()
812    }
813
814    fn id(&self) -> Uuid {
815        Self::type_uuid()
816    }
817
818    fn update(&mut self, context: &mut UpdateContext) {
819        let frame_size = if let Some(TextureKind::Rectangle { width, height }) = self
820            .render_target
821            .as_ref()
822            .and_then(|rt| rt.data_ref().as_loaded_ref().map(|rt| rt.kind()))
823        {
824            Vector2::new(width as f32, height as f32)
825        } else {
826            context.frame_size
827        };
828
829        self.calculate_matrices(frame_size);
830    }
831
832    fn debug_draw(&self, ctx: &mut SceneDrawingContext) {
833        let transform = self.global_transform_without_scaling();
834        ctx.draw_pyramid(
835            self.frustum().center(),
836            self.frustum().right_top_front_corner(),
837            self.frustum().left_top_front_corner(),
838            self.frustum().left_bottom_front_corner(),
839            self.frustum().right_bottom_front_corner(),
840            Color::GREEN,
841            transform,
842        );
843    }
844}
845
846/// All possible error that may occur during color grading look-up table creation.
847#[derive(Debug)]
848pub enum ColorGradingLutCreationError {
849    /// There is not enough data in provided texture to build LUT.
850    NotEnoughData {
851        /// Required amount of bytes.
852        required: usize,
853        /// Actual data size.
854        current: usize,
855    },
856
857    /// Pixel format is not supported. It must be either RGB8 or RGBA8.
858    InvalidPixelFormat(TexturePixelKind),
859
860    /// Texture error.
861    Texture(LoadError),
862}
863
864impl Display for ColorGradingLutCreationError {
865    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
866        match self {
867            ColorGradingLutCreationError::NotEnoughData { required, current } => {
868                write!(
869                    f,
870                    "There is not enough data in provided \
871                texture to build LUT. Required: {required}, current: {current}.",
872                )
873            }
874            ColorGradingLutCreationError::InvalidPixelFormat(v) => {
875                write!(
876                    f,
877                    "Pixel format is not supported. It must be either RGB8 \
878                or RGBA8, but texture has {v:?} pixel format"
879                )
880            }
881            ColorGradingLutCreationError::Texture(v) => {
882                write!(f, "Texture load error: {v}")
883            }
884        }
885    }
886}
887
888/// Color grading look up table (LUT). Color grading is used to modify color space of the
889/// rendered frame; it maps one color space to another. It is widely used effect in games,
890/// you've probably noticed either "warmness" or "coldness" in colors in various scenes in
891/// games - this is achieved by color grading.
892///
893/// See [more info in Unreal engine docs](https://docs.unrealengine.com/4.26/en-US/RenderingAndGraphics/PostProcessEffects/UsingLUTs/)
894#[derive(Visit, Clone, Default, PartialEq, Debug, Reflect, Eq)]
895pub struct ColorGradingLut {
896    unwrapped_lut: Option<TextureResource>,
897
898    #[visit(skip)]
899    #[reflect(hidden)]
900    lut: Option<TextureResource>,
901}
902
903uuid_provider!(ColorGradingLut = "bca9c90a-7cde-4960-8814-c132edfc9614");
904
905impl ColorGradingLut {
906    /// Creates 3D look-up texture from 2D strip.
907    ///
908    /// # Input Texture Requirements
909    ///
910    /// Width: 1024px
911    /// Height: 16px
912    /// Pixel Format: RGB8/RGBA8
913    ///
914    /// # Usage
915    ///
916    /// Typical usage would be:
917    ///
918    /// ```no_run
919    /// # use fyrox_impl::scene::camera::ColorGradingLut;
920    /// # use fyrox_impl::asset::manager::{ResourceManager};
921    /// # use fyrox_impl::resource::texture::Texture;
922    ///
923    /// async fn create_lut(resource_manager: ResourceManager) -> ColorGradingLut {
924    ///     ColorGradingLut::new(resource_manager.request::<Texture>(
925    ///         "your_lut.jpg",
926    ///     ))
927    ///     .await
928    ///     .unwrap()
929    /// }
930    /// ```
931    ///
932    /// Then pass LUT to either CameraBuilder or to camera instance, and don't forget to enable
933    /// color grading.
934    pub async fn new(unwrapped_lut: TextureResource) -> Result<Self, ColorGradingLutCreationError> {
935        match unwrapped_lut.await {
936            Ok(unwrapped_lut) => {
937                let data = unwrapped_lut.data_ref();
938
939                if data.pixel_kind() != TexturePixelKind::RGBA8
940                    && data.pixel_kind() != TexturePixelKind::RGB8
941                {
942                    return Err(ColorGradingLutCreationError::InvalidPixelFormat(
943                        data.pixel_kind(),
944                    ));
945                }
946
947                let bytes = data.data();
948
949                const RGBA8_SIZE: usize = 16 * 16 * 16 * 4;
950                const RGB8_SIZE: usize = 16 * 16 * 16 * 3;
951
952                if data.pixel_kind() == TexturePixelKind::RGBA8 {
953                    if bytes.len() != RGBA8_SIZE {
954                        return Err(ColorGradingLutCreationError::NotEnoughData {
955                            required: RGBA8_SIZE,
956                            current: bytes.len(),
957                        });
958                    }
959                } else if bytes.len() != RGB8_SIZE {
960                    return Err(ColorGradingLutCreationError::NotEnoughData {
961                        required: RGB8_SIZE,
962                        current: bytes.len(),
963                    });
964                }
965
966                let pixel_size = if data.pixel_kind() == TexturePixelKind::RGBA8 {
967                    4
968                } else {
969                    3
970                };
971
972                let mut lut_bytes = Vec::with_capacity(16 * 16 * 16 * 3);
973
974                for z in 0..16 {
975                    for y in 0..16 {
976                        for x in 0..16 {
977                            let pixel_index = z * 16 + y * 16 * 16 + x;
978                            let pixel_byte_pos = pixel_index * pixel_size;
979
980                            lut_bytes.push(bytes[pixel_byte_pos]); // R
981                            lut_bytes.push(bytes[pixel_byte_pos + 1]); // G
982                            lut_bytes.push(bytes[pixel_byte_pos + 2]); // B
983                        }
984                    }
985                }
986
987                let lut = TextureResource::from_bytes(
988                    Uuid::new_v4(),
989                    TextureKind::Volume {
990                        width: 16,
991                        height: 16,
992                        depth: 16,
993                    },
994                    TexturePixelKind::RGB8,
995                    lut_bytes,
996                    ResourceKind::Embedded,
997                )
998                .unwrap();
999
1000                let mut lut_ref = lut.data_ref();
1001
1002                lut_ref.set_s_wrap_mode(TextureWrapMode::ClampToEdge);
1003                lut_ref.set_t_wrap_mode(TextureWrapMode::ClampToEdge);
1004
1005                drop(lut_ref);
1006                drop(data);
1007
1008                Ok(Self {
1009                    lut: Some(lut),
1010                    unwrapped_lut: Some(unwrapped_lut),
1011                })
1012            }
1013            Err(e) => Err(ColorGradingLutCreationError::Texture(e)),
1014        }
1015    }
1016
1017    /// Returns color grading unwrapped look-up table. This is initial texture that was
1018    /// used to create the look-up table.
1019    pub fn unwrapped_lut(&self) -> TextureResource {
1020        self.unwrapped_lut.clone().unwrap()
1021    }
1022
1023    /// Returns 3D color grading look-up table ready for use on GPU.
1024    pub fn lut(&self) -> TextureResource {
1025        self.lut.clone().unwrap()
1026    }
1027
1028    /// Returns 3D color grading look-up table by ref ready for use on GPU.
1029    pub fn lut_ref(&self) -> &TextureResource {
1030        self.lut.as_ref().unwrap()
1031    }
1032}
1033
1034/// Camera builder is used to create new camera in declarative manner.
1035/// This is typical implementation of Builder pattern.
1036pub struct CameraBuilder {
1037    base_builder: BaseBuilder,
1038    fov: f32,
1039    z_near: f32,
1040    z_far: f32,
1041    viewport: Rect<f32>,
1042    enabled: bool,
1043    environment: Option<TextureResource>,
1044    exposure: Exposure,
1045    color_grading_lut: Option<ColorGradingLut>,
1046    color_grading_enabled: bool,
1047    projection: Projection,
1048    render_target: Option<TextureResource>,
1049    hdr_adaptation_speed: f32,
1050}
1051
1052impl CameraBuilder {
1053    /// Creates new camera builder using given base node builder.
1054    pub fn new(base_builder: BaseBuilder) -> Self {
1055        Self {
1056            enabled: true,
1057            base_builder,
1058            fov: 75.0f32.to_radians(),
1059            z_near: 0.025,
1060            z_far: 2048.0,
1061            viewport: Rect::new(0.0, 0.0, 1.0, 1.0),
1062            environment: None,
1063            exposure: Default::default(),
1064            color_grading_lut: None,
1065            color_grading_enabled: false,
1066            projection: Projection::default(),
1067            render_target: None,
1068            hdr_adaptation_speed: 0.5,
1069        }
1070    }
1071
1072    /// Sets desired field of view in radians.
1073    pub fn with_fov(mut self, fov: f32) -> Self {
1074        self.fov = fov;
1075        self
1076    }
1077
1078    /// Sets desired near projection plane.
1079    pub fn with_z_near(mut self, z_near: f32) -> Self {
1080        self.z_near = z_near;
1081        self
1082    }
1083
1084    /// Sets desired far projection plane.
1085    pub fn with_z_far(mut self, z_far: f32) -> Self {
1086        self.z_far = z_far;
1087        self
1088    }
1089
1090    /// Sets desired viewport.
1091    pub fn with_viewport(mut self, viewport: Rect<f32>) -> Self {
1092        self.viewport = viewport;
1093        self
1094    }
1095
1096    /// Sets desired initial state of camera: enabled or disabled.
1097    pub fn enabled(mut self, enabled: bool) -> Self {
1098        self.enabled = enabled;
1099        self
1100    }
1101
1102    /// Sets desired environment map.
1103    pub fn with_environment(mut self, environment: TextureResource) -> Self {
1104        self.environment = Some(environment);
1105        self
1106    }
1107
1108    /// Sets desired color grading LUT.
1109    pub fn with_color_grading_lut(mut self, lut: ColorGradingLut) -> Self {
1110        self.color_grading_lut = Some(lut);
1111        self
1112    }
1113
1114    /// Sets whether color grading should be enabled or not.
1115    pub fn with_color_grading_enabled(mut self, enabled: bool) -> Self {
1116        self.color_grading_enabled = enabled;
1117        self
1118    }
1119
1120    /// Sets desired exposure options.
1121    pub fn with_exposure(mut self, exposure: Exposure) -> Self {
1122        self.exposure = exposure;
1123        self
1124    }
1125
1126    /// Sets desired projection mode.
1127    pub fn with_projection(mut self, projection: Projection) -> Self {
1128        self.projection = projection;
1129        self
1130    }
1131
1132    /// Sets desired render target for the camera.
1133    pub fn with_render_target(mut self, render_target: Option<TextureResource>) -> Self {
1134        self.render_target = render_target;
1135        self
1136    }
1137
1138    /// Sets the speed of automatic adaptation for the current frame luminance.
1139    pub fn with_hdr_adaptation_speed(mut self, speed: f32) -> Self {
1140        self.hdr_adaptation_speed = speed;
1141        self
1142    }
1143
1144    /// Creates new instance of camera.
1145    pub fn build_camera(self) -> Camera {
1146        Camera {
1147            enabled: self.enabled.into(),
1148            base: self.base_builder.build_base(),
1149            projection: self.projection.into(),
1150            viewport: self.viewport.into(),
1151            // No need to calculate these matrices - they'll be automatically
1152            // recalculated before rendering.
1153            view_matrix: Matrix4::identity(),
1154            projection_matrix: Matrix4::identity(),
1155            environment: self.environment.into(),
1156            exposure: self.exposure.into(),
1157            color_grading_lut: self.color_grading_lut.into(),
1158            color_grading_enabled: self.color_grading_enabled.into(),
1159            hdr_adaptation_speed: self.hdr_adaptation_speed.into(),
1160            render_target: self.render_target,
1161        }
1162    }
1163
1164    /// Creates new instance of camera node.
1165    pub fn build_node(self) -> Node {
1166        Node::new(self.build_camera())
1167    }
1168
1169    /// Creates new instance of camera node and adds it to the graph.
1170    pub fn build(self, graph: &mut Graph) -> Handle<Camera> {
1171        graph.add_node(self.build_node()).to_variant()
1172    }
1173}