wgpu_core/command/
memory_init.rs1use std::{collections::hash_map::Entry, ops::Range, sync::Arc, vec::Drain};
2
3use crate::{
4 device::Device,
5 init_tracker::*,
6 resource::{DestroyedResourceError, ParentDevice, Texture, Trackable},
7 snatch::SnatchGuard,
8 track::{DeviceTracker, TextureTracker},
9 FastHashMap,
10};
11
12use super::{clear::clear_texture, BakedCommands, ClearError};
13
14#[derive(Clone)]
17pub(crate) struct TextureSurfaceDiscard {
18 pub texture: Arc<Texture>,
19 pub mip_level: u32,
20 pub layer: u32,
21}
22
23pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
24
25#[derive(Default)]
26pub(crate) struct CommandBufferTextureMemoryActions {
27 init_actions: Vec<TextureInitTrackerAction>,
30 discards: Vec<TextureSurfaceDiscard>,
34}
35
36impl CommandBufferTextureMemoryActions {
37 pub(crate) fn drain_init_actions(&mut self) -> Drain<TextureInitTrackerAction> {
38 self.init_actions.drain(..)
39 }
40
41 pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
42 self.discards.push(discard);
43 }
44
45 #[must_use]
49 pub(crate) fn register_init_action(
50 &mut self,
51 action: &TextureInitTrackerAction,
52 ) -> SurfacesInDiscardState {
53 let mut immediately_necessary_clears = SurfacesInDiscardState::new();
54
55 self.init_actions.extend(
63 action
64 .texture
65 .initialization_status
66 .read()
67 .check_action(action),
68 );
69
70 let init_actions = &mut self.init_actions;
74 self.discards.retain(|discarded_surface| {
75 if discarded_surface.texture.is_equal(&action.texture)
76 && action.range.layer_range.contains(&discarded_surface.layer)
77 && action
78 .range
79 .mip_range
80 .contains(&discarded_surface.mip_level)
81 {
82 if let MemoryInitKind::NeedsInitializedMemory = action.kind {
83 immediately_necessary_clears.push(discarded_surface.clone());
84
85 init_actions.push(TextureInitTrackerAction {
89 texture: discarded_surface.texture.clone(),
90 range: TextureInitRange {
91 mip_range: discarded_surface.mip_level
92 ..(discarded_surface.mip_level + 1),
93 layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
94 },
95 kind: MemoryInitKind::ImplicitlyInitialized,
96 });
97 }
98 false
99 } else {
100 true
101 }
102 });
103
104 immediately_necessary_clears
105 }
106
107 pub(crate) fn register_implicit_init(
110 &mut self,
111 texture: &Arc<Texture>,
112 range: TextureInitRange,
113 ) {
114 let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
115 texture: texture.clone(),
116 range,
117 kind: MemoryInitKind::ImplicitlyInitialized,
118 });
119 assert!(must_be_empty.is_empty());
120 }
121}
122
123pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
128 inits: InitIter,
129 encoder: &mut dyn hal::DynCommandEncoder,
130 texture_tracker: &mut TextureTracker,
131 device: &Device,
132 snatch_guard: &SnatchGuard<'_>,
133) {
134 for init in inits {
135 clear_texture(
136 &init.texture,
137 TextureInitRange {
138 mip_range: init.mip_level..(init.mip_level + 1),
139 layer_range: init.layer..(init.layer + 1),
140 },
141 encoder,
142 texture_tracker,
143 &device.alignments,
144 device.zero_buffer.as_ref(),
145 snatch_guard,
146 )
147 .unwrap();
148 }
149}
150
151impl BakedCommands {
152 pub(crate) fn initialize_buffer_memory(
155 &mut self,
156 device_tracker: &mut DeviceTracker,
157 snatch_guard: &SnatchGuard<'_>,
158 ) -> Result<(), DestroyedResourceError> {
159 profiling::scope!("initialize_buffer_memory");
160
161 let mut uninitialized_ranges_per_buffer = FastHashMap::default();
165 for buffer_use in self.buffer_memory_init_actions.drain(..) {
166 let mut initialization_status = buffer_use.buffer.initialization_status.write();
167
168 let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
170 let end = if end_remainder == 0 {
171 buffer_use.range.end
172 } else {
173 buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
174 };
175 let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
176
177 match buffer_use.kind {
178 MemoryInitKind::ImplicitlyInitialized => {}
179 MemoryInitKind::NeedsInitializedMemory => {
180 match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
181 Entry::Vacant(e) => {
182 e.insert((
183 buffer_use.buffer.clone(),
184 uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
185 ));
186 }
187 Entry::Occupied(mut e) => {
188 e.get_mut().1.extend(uninitialized_ranges);
189 }
190 }
191 }
192 }
193 }
194
195 for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
196 ranges.sort_by_key(|r| r.start);
198 for i in (1..ranges.len()).rev() {
199 assert!(ranges[i - 1].end <= ranges[i].start);
201 if ranges[i].start == ranges[i - 1].end {
202 ranges[i - 1].end = ranges[i].end;
203 ranges.swap_remove(i); }
205 }
206
207 let transition = device_tracker
213 .buffers
214 .set_single(&buffer, hal::BufferUses::COPY_DST);
215
216 let raw_buf = buffer.try_raw(snatch_guard)?;
217
218 unsafe {
219 self.encoder.raw.transition_buffers(
220 transition
221 .map(|pending| pending.into_hal(&buffer, snatch_guard))
222 .as_slice(),
223 );
224 }
225
226 for range in ranges.iter() {
227 assert!(
228 range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
229 "Buffer {:?} has an uninitialized range with a start \
230 not aligned to 4 (start was {})",
231 raw_buf,
232 range.start
233 );
234 assert!(
235 range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
236 "Buffer {:?} has an uninitialized range with an end \
237 not aligned to 4 (end was {})",
238 raw_buf,
239 range.end
240 );
241
242 unsafe {
243 self.encoder.raw.clear_buffer(raw_buf, range.clone());
244 }
245 }
246 }
247 Ok(())
248 }
249
250 pub(crate) fn initialize_texture_memory(
255 &mut self,
256 device_tracker: &mut DeviceTracker,
257 device: &Device,
258 snatch_guard: &SnatchGuard<'_>,
259 ) -> Result<(), DestroyedResourceError> {
260 profiling::scope!("initialize_texture_memory");
261
262 let mut ranges: Vec<TextureInitRange> = Vec::new();
263 for texture_use in self.texture_memory_actions.drain_init_actions() {
264 let mut initialization_status = texture_use.texture.initialization_status.write();
265 let use_range = texture_use.range;
266 let affected_mip_trackers = initialization_status
267 .mips
268 .iter_mut()
269 .enumerate()
270 .skip(use_range.mip_range.start as usize)
271 .take((use_range.mip_range.end - use_range.mip_range.start) as usize);
272
273 match texture_use.kind {
274 MemoryInitKind::ImplicitlyInitialized => {
275 for (_, mip_tracker) in affected_mip_trackers {
276 mip_tracker.drain(use_range.layer_range.clone());
277 }
278 }
279 MemoryInitKind::NeedsInitializedMemory => {
280 for (mip_level, mip_tracker) in affected_mip_trackers {
281 for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
282 ranges.push(TextureInitRange {
283 mip_range: (mip_level as u32)..(mip_level as u32 + 1),
284 layer_range,
285 });
286 }
287 }
288 }
289 }
290
291 for range in ranges.drain(..) {
293 let clear_result = clear_texture(
294 &texture_use.texture,
295 range,
296 self.encoder.raw.as_mut(),
297 &mut device_tracker.textures,
298 &device.alignments,
299 device.zero_buffer.as_ref(),
300 snatch_guard,
301 );
302
303 if let Err(ClearError::DestroyedResource(e)) = clear_result {
307 return Err(e);
308 }
309
310 if let Err(error) = clear_result {
312 panic!("{error}");
313 }
314 }
315 }
316
317 for surface_discard in self.texture_memory_actions.discards.iter() {
321 surface_discard
322 .texture
323 .initialization_status
324 .write()
325 .discard(surface_discard.mip_level, surface_discard.layer);
326 }
327
328 Ok(())
329 }
330}