wgpu_core/command/
memory_init.rs

1use std::{collections::hash_map::Entry, ops::Range, sync::Arc, vec::Drain};
2
3use crate::{
4    device::Device,
5    init_tracker::*,
6    resource::{DestroyedResourceError, ParentDevice, Texture, Trackable},
7    snatch::SnatchGuard,
8    track::{DeviceTracker, TextureTracker},
9    FastHashMap,
10};
11
12use super::{clear::clear_texture, BakedCommands, ClearError};
13
14/// Surface that was discarded by `StoreOp::Discard` of a preceding renderpass.
15/// Any read access to this surface needs to be preceded by a texture initialization.
16#[derive(Clone)]
17pub(crate) struct TextureSurfaceDiscard {
18    pub texture: Arc<Texture>,
19    pub mip_level: u32,
20    pub layer: u32,
21}
22
23pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
24
25#[derive(Default)]
26pub(crate) struct CommandBufferTextureMemoryActions {
27    /// The tracker actions that we need to be executed before the command
28    /// buffer is executed.
29    init_actions: Vec<TextureInitTrackerAction>,
30    /// All the discards that haven't been followed by init again within the
31    /// command buffer i.e. everything in this list resets the texture init
32    /// state *after* the command buffer execution
33    discards: Vec<TextureSurfaceDiscard>,
34}
35
36impl CommandBufferTextureMemoryActions {
37    pub(crate) fn drain_init_actions(&mut self) -> Drain<TextureInitTrackerAction> {
38        self.init_actions.drain(..)
39    }
40
41    pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
42        self.discards.push(discard);
43    }
44
45    // Registers a TextureInitTrackerAction.
46    // Returns previously discarded surface that need to be initialized *immediately* now.
47    // Only returns a non-empty list if action is MemoryInitKind::NeedsInitializedMemory.
48    #[must_use]
49    pub(crate) fn register_init_action(
50        &mut self,
51        action: &TextureInitTrackerAction,
52    ) -> SurfacesInDiscardState {
53        let mut immediately_necessary_clears = SurfacesInDiscardState::new();
54
55        // Note that within a command buffer we may stack arbitrary memory init
56        // actions on the same texture Since we react to them in sequence, they
57        // are going to be dropped again at queue submit
58        //
59        // We don't need to add MemoryInitKind::NeedsInitializedMemory to
60        // init_actions if a surface is part of the discard list. But that would
61        // mean splitting up the action which is more than we'd win here.
62        self.init_actions.extend(
63            action
64                .texture
65                .initialization_status
66                .read()
67                .check_action(action),
68        );
69
70        // We expect very few discarded surfaces at any point in time which is
71        // why a simple linear search is likely best. (i.e. most of the time
72        // self.discards is empty!)
73        let init_actions = &mut self.init_actions;
74        self.discards.retain(|discarded_surface| {
75            if discarded_surface.texture.is_equal(&action.texture)
76                && action.range.layer_range.contains(&discarded_surface.layer)
77                && action
78                    .range
79                    .mip_range
80                    .contains(&discarded_surface.mip_level)
81            {
82                if let MemoryInitKind::NeedsInitializedMemory = action.kind {
83                    immediately_necessary_clears.push(discarded_surface.clone());
84
85                    // Mark surface as implicitly initialized (this is relevant
86                    // because it might have been uninitialized prior to
87                    // discarding
88                    init_actions.push(TextureInitTrackerAction {
89                        texture: discarded_surface.texture.clone(),
90                        range: TextureInitRange {
91                            mip_range: discarded_surface.mip_level
92                                ..(discarded_surface.mip_level + 1),
93                            layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
94                        },
95                        kind: MemoryInitKind::ImplicitlyInitialized,
96                    });
97                }
98                false
99            } else {
100                true
101            }
102        });
103
104        immediately_necessary_clears
105    }
106
107    // Shortcut for register_init_action when it is known that the action is an
108    // implicit init, not requiring any immediate resource init.
109    pub(crate) fn register_implicit_init(
110        &mut self,
111        texture: &Arc<Texture>,
112        range: TextureInitRange,
113    ) {
114        let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
115            texture: texture.clone(),
116            range,
117            kind: MemoryInitKind::ImplicitlyInitialized,
118        });
119        assert!(must_be_empty.is_empty());
120    }
121}
122
123// Utility function that takes discarded surfaces from (several calls to)
124// register_init_action and initializes them on the spot.
125//
126// Takes care of barriers as well!
127pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
128    inits: InitIter,
129    encoder: &mut dyn hal::DynCommandEncoder,
130    texture_tracker: &mut TextureTracker,
131    device: &Device,
132    snatch_guard: &SnatchGuard<'_>,
133) {
134    for init in inits {
135        clear_texture(
136            &init.texture,
137            TextureInitRange {
138                mip_range: init.mip_level..(init.mip_level + 1),
139                layer_range: init.layer..(init.layer + 1),
140            },
141            encoder,
142            texture_tracker,
143            &device.alignments,
144            device.zero_buffer.as_ref(),
145            snatch_guard,
146        )
147        .unwrap();
148    }
149}
150
151impl BakedCommands {
152    // inserts all buffer initializations that are going to be needed for
153    // executing the commands and updates resource init states accordingly
154    pub(crate) fn initialize_buffer_memory(
155        &mut self,
156        device_tracker: &mut DeviceTracker,
157        snatch_guard: &SnatchGuard<'_>,
158    ) -> Result<(), DestroyedResourceError> {
159        profiling::scope!("initialize_buffer_memory");
160
161        // Gather init ranges for each buffer so we can collapse them.
162        // It is not possible to do this at an earlier point since previously
163        // executed command buffer change the resource init state.
164        let mut uninitialized_ranges_per_buffer = FastHashMap::default();
165        for buffer_use in self.buffer_memory_init_actions.drain(..) {
166            let mut initialization_status = buffer_use.buffer.initialization_status.write();
167
168            // align the end to 4
169            let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
170            let end = if end_remainder == 0 {
171                buffer_use.range.end
172            } else {
173                buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
174            };
175            let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
176
177            match buffer_use.kind {
178                MemoryInitKind::ImplicitlyInitialized => {}
179                MemoryInitKind::NeedsInitializedMemory => {
180                    match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
181                        Entry::Vacant(e) => {
182                            e.insert((
183                                buffer_use.buffer.clone(),
184                                uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
185                            ));
186                        }
187                        Entry::Occupied(mut e) => {
188                            e.get_mut().1.extend(uninitialized_ranges);
189                        }
190                    }
191                }
192            }
193        }
194
195        for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
196            // Collapse touching ranges.
197            ranges.sort_by_key(|r| r.start);
198            for i in (1..ranges.len()).rev() {
199                // The memory init tracker made sure of this!
200                assert!(ranges[i - 1].end <= ranges[i].start);
201                if ranges[i].start == ranges[i - 1].end {
202                    ranges[i - 1].end = ranges[i].end;
203                    ranges.swap_remove(i); // Ordering not important at this point
204                }
205            }
206
207            // Don't do use_replace since the buffer may already no longer have
208            // a ref_count.
209            //
210            // However, we *know* that it is currently in use, so the tracker
211            // must already know about it.
212            let transition = device_tracker
213                .buffers
214                .set_single(&buffer, hal::BufferUses::COPY_DST);
215
216            let raw_buf = buffer.try_raw(snatch_guard)?;
217
218            unsafe {
219                self.encoder.raw.transition_buffers(
220                    transition
221                        .map(|pending| pending.into_hal(&buffer, snatch_guard))
222                        .as_slice(),
223                );
224            }
225
226            for range in ranges.iter() {
227                assert!(
228                    range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
229                    "Buffer {:?} has an uninitialized range with a start \
230                         not aligned to 4 (start was {})",
231                    raw_buf,
232                    range.start
233                );
234                assert!(
235                    range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
236                    "Buffer {:?} has an uninitialized range with an end \
237                         not aligned to 4 (end was {})",
238                    raw_buf,
239                    range.end
240                );
241
242                unsafe {
243                    self.encoder.raw.clear_buffer(raw_buf, range.clone());
244                }
245            }
246        }
247        Ok(())
248    }
249
250    // inserts all texture initializations that are going to be needed for
251    // executing the commands and updates resource init states accordingly any
252    // textures that are left discarded by this command buffer will be marked as
253    // uninitialized
254    pub(crate) fn initialize_texture_memory(
255        &mut self,
256        device_tracker: &mut DeviceTracker,
257        device: &Device,
258        snatch_guard: &SnatchGuard<'_>,
259    ) -> Result<(), DestroyedResourceError> {
260        profiling::scope!("initialize_texture_memory");
261
262        let mut ranges: Vec<TextureInitRange> = Vec::new();
263        for texture_use in self.texture_memory_actions.drain_init_actions() {
264            let mut initialization_status = texture_use.texture.initialization_status.write();
265            let use_range = texture_use.range;
266            let affected_mip_trackers = initialization_status
267                .mips
268                .iter_mut()
269                .enumerate()
270                .skip(use_range.mip_range.start as usize)
271                .take((use_range.mip_range.end - use_range.mip_range.start) as usize);
272
273            match texture_use.kind {
274                MemoryInitKind::ImplicitlyInitialized => {
275                    for (_, mip_tracker) in affected_mip_trackers {
276                        mip_tracker.drain(use_range.layer_range.clone());
277                    }
278                }
279                MemoryInitKind::NeedsInitializedMemory => {
280                    for (mip_level, mip_tracker) in affected_mip_trackers {
281                        for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
282                            ranges.push(TextureInitRange {
283                                mip_range: (mip_level as u32)..(mip_level as u32 + 1),
284                                layer_range,
285                            });
286                        }
287                    }
288                }
289            }
290
291            // TODO: Could we attempt some range collapsing here?
292            for range in ranges.drain(..) {
293                let clear_result = clear_texture(
294                    &texture_use.texture,
295                    range,
296                    self.encoder.raw.as_mut(),
297                    &mut device_tracker.textures,
298                    &device.alignments,
299                    device.zero_buffer.as_ref(),
300                    snatch_guard,
301                );
302
303                // A Texture can be destroyed between the command recording
304                // and now, this is out of our control so we have to handle
305                // it gracefully.
306                if let Err(ClearError::DestroyedResource(e)) = clear_result {
307                    return Err(e);
308                }
309
310                // Other errors are unexpected.
311                if let Err(error) = clear_result {
312                    panic!("{error}");
313                }
314            }
315        }
316
317        // Now that all buffers/textures have the proper init state for before
318        // cmdbuf start, we discard init states for textures it left discarded
319        // after its execution.
320        for surface_discard in self.texture_memory_actions.discards.iter() {
321            surface_discard
322                .texture
323                .initialization_status
324                .write()
325                .discard(surface_discard.mip_level, surface_discard.layer);
326        }
327
328        Ok(())
329    }
330}