fyrox_impl/renderer/observer.rs
1// Copyright (c) 2019-present Dmitry Stepanov and Fyrox Engine contributors.
2//
3// Permission is hereby granted, free of charge, to any person obtaining a copy
4// of this software and associated documentation files (the "Software"), to deal
5// in the Software without restriction, including without limitation the rights
6// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7// copies of the Software, and to permit persons to whom the Software is
8// furnished to do so, subject to the following conditions:
9//
10// The above copyright notice and this permission notice shall be included in all
11// copies or substantial portions of the Software.
12//
13// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19// SOFTWARE.
20
21//! An observer holds all the information required to render a scene from a particular point of view.
22//! Contains all information for rendering, effectively decouples rendering entities from scene
23//! entities. See [`Observer`] docs for more info.
24
25use crate::{
26 core::{
27 algebra::{Matrix4, Point3, Vector2, Vector3},
28 math::{frustum::Frustum, Rect},
29 pool::Handle,
30 },
31 graphics::gpu_texture::CubeMapFace,
32 renderer::utils::CubeMapFaceDescriptor,
33 scene::{
34 camera::{Camera, ColorGradingLut, Exposure, PerspectiveProjection, Projection},
35 collider::BitMask,
36 node::Node,
37 probe::ReflectionProbe,
38 EnvironmentLightingSource, Scene,
39 },
40};
41use fyrox_core::color::Color;
42use fyrox_texture::TextureResource;
43
44/// Observer position contains all the data, that describes an observer position in 3D space. It
45/// could be a real camera, light source's "virtual camera" that is used for shadow mapping, etc.
46#[derive(Clone, Default)]
47pub struct ObserverPosition {
48 /// World-space position of the observer.
49 pub translation: Vector3<f32>,
50 /// Position of the near clipping plane.
51 pub z_near: f32,
52 /// Position of the far clipping plane.
53 pub z_far: f32,
54 /// The view matrix of the observer.
55 pub view_matrix: Matrix4<f32>,
56 /// Projection matrix of the observer.
57 pub projection_matrix: Matrix4<f32>,
58 /// Combination of the view and projection matrix.
59 pub view_projection_matrix: Matrix4<f32>,
60}
61
62impl ObserverPosition {
63 /// Creates a new observer position from a scene camera.
64 pub fn from_camera(camera: &Camera) -> Self {
65 Self {
66 translation: camera.global_position(),
67 z_near: camera.projection().z_near(),
68 z_far: camera.projection().z_far(),
69 view_matrix: camera.view_matrix(),
70 projection_matrix: camera.projection_matrix(),
71 view_projection_matrix: camera.view_projection_matrix(),
72 }
73 }
74}
75
76/// Collections of observers in a scene.
77#[derive(Default)]
78pub struct ObserversCollection {
79 /// Camera observers.
80 pub cameras: Vec<Observer>,
81 /// Reflection probes, rendered first.
82 pub reflection_probes: Vec<Observer>,
83}
84
85impl ObserversCollection {
86 /// Creates a new observers collection from a scene. This method collects all observers that
87 /// need to render the scene (which includes camera and reflection probes).
88 pub fn from_scene(scene: &Scene, frame_size: Vector2<f32>) -> Self {
89 let mut observers = Self::default();
90 for node in scene.graph.linear_iter() {
91 if node.is_globally_enabled() {
92 if let Some(camera) = node.cast::<Camera>() {
93 if camera.is_enabled() {
94 observers
95 .cameras
96 .push(Observer::from_camera(camera, frame_size));
97 }
98 } else if let Some(probe) = node.cast::<ReflectionProbe>() {
99 if probe.updated.get() {
100 continue;
101 }
102 probe.updated.set(true);
103
104 let projection = Projection::Perspective(PerspectiveProjection {
105 fov: 90.0f32.to_radians(),
106 z_near: *probe.z_near,
107 z_far: *probe.z_far,
108 });
109 let resolution = probe.resolution() as f32;
110 let cube_size = Vector2::repeat(probe.resolution() as f32);
111 let projection_matrix = projection.matrix(cube_size);
112
113 for cube_face in CubeMapFaceDescriptor::cube_faces() {
114 let translation = probe.global_rendering_position();
115 let view_matrix = Matrix4::look_at_rh(
116 &Point3::from(translation),
117 &Point3::from(translation + cube_face.look),
118 &cube_face.up,
119 );
120 let view_projection_matrix = projection_matrix * view_matrix;
121 observers.reflection_probes.push(Observer {
122 handle: node.handle(),
123 reflection_probe_data: Some(ReflectionProbeData {
124 cube_map_face: cube_face.face,
125 environment_lighting_source: *probe.environment_lighting_source,
126 ambient_lighting_color: *probe.ambient_lighting_color,
127 }),
128 render_target: Some(probe.render_target().clone()),
129 position: ObserverPosition {
130 translation,
131 z_near: *probe.z_near,
132 z_far: *probe.z_far,
133 view_matrix,
134 projection_matrix,
135 view_projection_matrix,
136 },
137 environment_map: None,
138 render_mask: *probe.render_mask,
139 projection: projection.clone(),
140 color_grading_lut: None,
141 color_grading_enabled: false,
142 exposure: Default::default(),
143 viewport: Rect::new(0, 0, resolution as i32, resolution as i32),
144 frustum: Frustum::from_view_projection_matrix(view_projection_matrix)
145 .unwrap_or_default(),
146 hdr_adaptation_speed: 1.0,
147 })
148 }
149 }
150 }
151 }
152 observers
153 }
154}
155
156/// The data used by the renderer when it's rendering a reflection probe.
157pub struct ReflectionProbeData {
158 /// Cube map face of a cube render target to which to render a scene.
159 pub cube_map_face: CubeMapFace,
160 /// Environment lighting source of the reflection probe. See [`EnvironmentLightingSource`] docs
161 /// for more info.
162 pub environment_lighting_source: EnvironmentLightingSource,
163 /// Ambient lighting color of the reflection probe.
164 pub ambient_lighting_color: Color,
165}
166
167/// An observer holds all the information required to render a scene from a particular point of view.
168/// Contains all information for rendering, effectively decouples rendering entities from scene
169/// entities. Observer can be constructed from an arbitrary set of data or from scene entities,
170/// such as cameras, reflection probes.
171pub struct Observer {
172 /// The handle of a scene node (camera, reflection probe, etc.) that was used to create this
173 /// Observer.
174 pub handle: Handle<Node>,
175 /// Additional data used by reflection probes only.
176 pub reflection_probe_data: Option<ReflectionProbeData>,
177 /// Render target to which to render the scene.
178 pub render_target: Option<TextureResource>,
179 /// Position of the observer. See [`ObserverPosition`] docs for more info.
180 pub position: ObserverPosition,
181 /// Environment map which will be used for IBL and reflections. If not set, then scene's skybox
182 /// will be used as an environment map.
183 pub environment_map: Option<TextureResource>,
184 /// A set of switches that defines which "layers" of the scene will be rendered.
185 pub render_mask: BitMask,
186 /// Projection mode that will be used to project the scene on screen's 2D plane.
187 pub projection: Projection,
188 /// Optional color grading lookup table. See [`ColorGradingLut`] docs for more info.
189 pub color_grading_lut: Option<ColorGradingLut>,
190 /// A flag, that defines whether the color grading enabled or not.
191 pub color_grading_enabled: bool,
192 /// Exposure settings that will be applied to scene's HDR image to convert it to the final
193 /// low dynamic range image that will be shown on a display.
194 pub exposure: Exposure,
195 /// Viewport rectangle in screen space. Defines a porting of the screen that needs to be rendered.
196 pub viewport: Rect<i32>,
197 /// Frustum of the observer, it can be used for frustum culling.
198 pub frustum: Frustum,
199 /// Defines the speed of automatic adaptation for the current frame luminance. In other words,
200 /// it defines how fast the reaction to the new frame brightness will be. The lower the value,
201 /// the longer it will take to adjust the exposure for the new brightness level.
202 pub hdr_adaptation_speed: f32,
203}
204
205impl Observer {
206 /// Creates a new observer from a scene camera.
207 pub fn from_camera(camera: &Camera, mut frame_size: Vector2<f32>) -> Self {
208 if let Some(render_target) = camera.render_target() {
209 if let Some(size) = render_target
210 .data_ref()
211 .as_loaded_ref()
212 .and_then(|rt| rt.kind().rectangle_size().map(|size| size.cast::<f32>()))
213 {
214 frame_size = size;
215 }
216 }
217 Observer {
218 handle: camera.handle(),
219 environment_map: camera.environment_map(),
220 render_mask: *camera.render_mask,
221 projection: camera.projection().clone(),
222 position: ObserverPosition::from_camera(camera),
223 render_target: camera.render_target().cloned(),
224 color_grading_lut: camera.color_grading_lut(),
225 color_grading_enabled: camera.color_grading_enabled(),
226 exposure: camera.exposure(),
227 viewport: camera.viewport_pixels(frame_size),
228 frustum: camera.frustum(),
229 reflection_probe_data: None,
230 hdr_adaptation_speed: camera.hdr_adaptation_speed(),
231 }
232 }
233}