rapier3d/geometry/
broad_phase_bvh.rs

1use crate::dynamics::{IntegrationParameters, RigidBodySet};
2use crate::geometry::{Aabb, BroadPhasePairEvent, ColliderHandle, ColliderPair, ColliderSet};
3use crate::math::Real;
4use parry::partitioning::{Bvh, BvhWorkspace};
5use parry::utils::hashmap::{Entry, HashMap};
6
7/// The broad-phase collision detector that quickly filters out distant object pairs.
8///
9/// The broad-phase is the "first pass" of collision detection. It uses a hierarchical
10/// bounding volume tree (BVH) to quickly identify which collider pairs are close enough
11/// to potentially collide, avoiding expensive narrow-phase checks for distant objects.
12///
13/// Think of it as a "spatial index" that answers: "Which objects are near each other?"
14///
15/// You typically don't interact with this directly - it's managed by [`PhysicsPipeline`](crate::pipeline::PhysicsPipeline).
16/// However, you can use it to create a [`QueryPipeline`](crate::pipeline::QueryPipeline) for spatial queries.
17#[derive(Default, Clone)]
18#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
19pub struct BroadPhaseBvh {
20    pub(crate) tree: Bvh,
21    #[cfg_attr(feature = "serde-serialize", serde(skip))]
22    workspace: BvhWorkspace,
23    pairs: HashMap<(ColliderHandle, ColliderHandle), u32>,
24    frame_index: u32,
25    optimization_strategy: BvhOptimizationStrategy,
26}
27
28// TODO: would be interesting to try out:
29// "Fast Insertion-Based Optimization of Bounding Volume Hierarchies"
30// by Bittner et al.
31/// Selection of strategies to maintain through time the broad-phase BVH in shape that remains
32/// efficient for collision-detection and scene queries.
33#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
34#[derive(Default, PartialEq, Eq, Copy, Clone)]
35pub enum BvhOptimizationStrategy {
36    /// Different sub-trees of the BVH will be optimized at each frame.
37    #[default]
38    SubtreeOptimizer,
39    /// Disables incremental BVH optimization (discouraged).
40    ///
41    /// This should not be used except for debugging purpose.
42    None,
43}
44
45const ENABLE_TREE_VALIDITY_CHECK: bool = false;
46
47impl BroadPhaseBvh {
48    const CHANGE_DETECTION_ENABLED: bool = true;
49    const CHANGE_DETECTION_FACTOR: Real = 1.0e-2;
50
51    /// Initializes a new empty broad-phase.
52    pub fn new() -> Self {
53        Self::default()
54    }
55
56    /// Initializes a new empty broad-phase with the specified strategy for incremental
57    /// BVH optimization.
58    pub fn with_optimization_strategy(optimization_strategy: BvhOptimizationStrategy) -> Self {
59        Self {
60            optimization_strategy,
61            ..Default::default()
62        }
63    }
64
65    /// Updates the broad-phase.
66    ///
67    /// The results are output through the `events` struct. The broad-phase algorithm is only
68    /// required to generate new events (i.e. no need to re-send an `AddPair` event if it was already
69    /// sent previously and no `RemovePair` happened since then). Sending redundant events is allowed
70    /// but can result in a slight computational overhead.
71    ///
72    /// The `colliders` set is mutable only to provide access to
73    /// [`collider.set_internal_broad_phase_proxy_index`]. Other properties of the collider should
74    /// **not** be modified during the broad-phase update.
75    ///
76    /// # Parameters
77    /// - `params`: the integration parameters governing the simulation.
78    /// - `colliders`: the set of colliders. Change detection with `collider.needs_broad_phase_update()`
79    ///   can be relied on at this stage.
80    /// - `modified_colliders`: colliders that are know to be modified since the last update.
81    /// - `removed_colliders`: colliders that got removed since the last update. Any associated data
82    ///   in the broad-phase should be removed by this call to `update`.
83    /// - `events`: the broad-phase’s output. They indicate what collision pairs need to be created
84    ///   and what pairs need to be removed. It is OK to create pairs for colliders that don’t
85    ///   actually collide (though this can increase computational overhead in the narrow-phase)
86    ///   but it is important not to indicate removal of a collision pair if the underlying colliders
87    ///   are still touching or closer than `prediction_distance`.
88    pub fn update(
89        &mut self,
90        params: &IntegrationParameters,
91        colliders: &ColliderSet,
92        bodies: &RigidBodySet,
93        modified_colliders: &[ColliderHandle],
94        removed_colliders: &[ColliderHandle],
95        events: &mut Vec<BroadPhasePairEvent>,
96    ) {
97        self.frame_index = self.frame_index.overflowing_add(1).0;
98
99        // Removals must be handled first, in case another collider in
100        // `modified_colliders` shares the same index.
101        for handle in removed_colliders {
102            self.tree.remove(handle.into_raw_parts().0);
103        }
104
105        // if modified_colliders.is_empty() {
106        //     return;
107        // }
108
109        let first_pass = self.tree.is_empty();
110
111        // let t0 = std::time::Instant::now();
112        for modified in modified_colliders {
113            if let Some(collider) = colliders.get(*modified) {
114                if !collider.is_enabled() || !collider.changes.needs_broad_phase_update() {
115                    continue;
116                }
117
118                let aabb = collider.compute_broad_phase_aabb(params, bodies);
119
120                let change_detection_skin = if Self::CHANGE_DETECTION_ENABLED {
121                    Self::CHANGE_DETECTION_FACTOR * params.length_unit
122                } else {
123                    0.0
124                };
125
126                self.tree.insert_or_update_partially(
127                    aabb,
128                    modified.into_raw_parts().0,
129                    change_detection_skin,
130                );
131            }
132        }
133
134        if ENABLE_TREE_VALIDITY_CHECK {
135            if first_pass {
136                self.tree.assert_well_formed();
137            }
138
139            self.tree.assert_well_formed_topology_only();
140        }
141
142        // let t0 = std::time::Instant::now();
143        match self.optimization_strategy {
144            BvhOptimizationStrategy::SubtreeOptimizer => {
145                self.tree.optimize_incremental(&mut self.workspace);
146            }
147            BvhOptimizationStrategy::None => {}
148        };
149        // println!(
150        //     "Incremental optimization: {}",
151        //     t0.elapsed().as_secs_f32() * 1000.0
152        // );
153
154        // NOTE: we run refit after optimization so we can skip updating internal nodes during
155        //       optimization, and so we can reorder the tree in memory (in depth-first order)
156        //       to make it more cache friendly after the rebuild shuffling everything around.
157        // let t0 = std::time::Instant::now();
158        self.tree.refit(&mut self.workspace);
159
160        if ENABLE_TREE_VALIDITY_CHECK {
161            self.tree.assert_well_formed();
162        }
163
164        // println!("Refit: {}", t0.elapsed().as_secs_f32() * 1000.0);
165        // println!(
166        //     "leaf count: {}/{} (changed: {})",
167        //     self.tree.leaf_count(),
168        //     self.tree.reachable_leaf_count(0),
169        //     self.tree.changed_leaf_count(0),
170        // );
171        // self.tree.assert_is_depth_first();
172        // self.tree.assert_well_formed();
173        // println!(
174        //     "Is well formed. Tree height: {}",
175        //     self.tree.subtree_height(0),
176        // );
177        // // println!("Tree quality: {}", self.tree.quality_metric());
178
179        let mut pairs_collector = |co1: u32, co2: u32| {
180            assert_ne!(co1, co2);
181
182            let Some((_, mut handle1)) = colliders.get_unknown_gen(co1) else {
183                return;
184            };
185            let Some((_, mut handle2)) = colliders.get_unknown_gen(co2) else {
186                return;
187            };
188
189            if co1 > co2 {
190                std::mem::swap(&mut handle1, &mut handle2);
191            }
192
193            match self.pairs.entry((handle1, handle2)) {
194                Entry::Occupied(e) => *e.into_mut() = self.frame_index,
195                Entry::Vacant(e) => {
196                    e.insert(self.frame_index);
197                    events.push(BroadPhasePairEvent::AddPair(ColliderPair::new(
198                        handle1, handle2,
199                    )));
200                }
201            }
202        };
203
204        // let t0 = std::time::Instant::now();
205        self.tree
206            .traverse_bvtt_single_tree::<{ Self::CHANGE_DETECTION_ENABLED }>(
207                &mut self.workspace,
208                &mut pairs_collector,
209            );
210        // println!("Detection: {}", t0.elapsed().as_secs_f32() * 1000.0);
211        // println!(">>>>>> Num events: {}", events.iter().len());
212
213        // Find outdated entries.
214        // TODO PERF:
215        // Currently, the narrow-phase isn’t capable of removing its own outdated
216        // collision pairs. So we need to run a pass here to find aabbs that are
217        // no longer overlapping. This, and the pair deduplication happening in
218        // the `pairs_collector` is expensive and should be done more efficiently
219        // by the narrow-phase itself (or islands) once we rework it.
220        //
221        // let t0 = std::time::Instant::now();
222        self.pairs.retain(|(h0, h1), timestamp| {
223            if *timestamp != self.frame_index {
224                if !colliders.contains(*h0) || !colliders.contains(*h1) {
225                    // At least one of the colliders no longer exist, don’t retain the pair.
226                    return false;
227                }
228
229                let Some(node0) = self.tree.leaf_node(h0.into_raw_parts().0) else {
230                    return false;
231                };
232                let Some(node1) = self.tree.leaf_node(h1.into_raw_parts().0) else {
233                    return false;
234                };
235
236                if (!Self::CHANGE_DETECTION_ENABLED || node0.is_changed() || node1.is_changed())
237                    && !node0.intersects(node1)
238                {
239                    events.push(BroadPhasePairEvent::DeletePair(ColliderPair::new(*h0, *h1)));
240                    false
241                } else {
242                    true
243                }
244            } else {
245                // If the timestamps match, we already saw this pair during traversal.
246                // There can be rare occurrences where the timestamp will be equal
247                // even though we didn’t see the pair during traversal. This happens
248                // if the frame index overflowed. But this is fine, we’ll catch it
249                // in another frame.
250                true
251            }
252        });
253
254        // println!(
255        //     "Post-filtering: {} (added pairs: {}, removed pairs: {})",
256        //     t0.elapsed().as_secs_f32() * 1000.0,
257        //     added_pairs,
258        //     removed_pairs
259        // );
260    }
261
262    /// Sets the AABB associated to the given collider.
263    ///
264    /// The AABB change will be immediately applied and propagated through the underlying BVH.
265    /// Change detection will automatically take it into account during the next broad-phase update.
266    pub fn set_aabb(&mut self, params: &IntegrationParameters, handle: ColliderHandle, aabb: Aabb) {
267        let change_detection_skin = if Self::CHANGE_DETECTION_ENABLED {
268            Self::CHANGE_DETECTION_FACTOR * params.length_unit
269        } else {
270            0.0
271        };
272        self.tree.insert_with_change_detection(
273            aabb,
274            handle.into_raw_parts().0,
275            change_detection_skin,
276        );
277    }
278}