wgpu_core/
resource.rs

1use alloc::{borrow::Cow, borrow::ToOwned as _, boxed::Box, string::String, sync::Arc, vec::Vec};
2use core::{
3    borrow::Borrow,
4    fmt,
5    mem::{self, ManuallyDrop},
6    num::NonZeroU64,
7    ops::Range,
8    ptr::NonNull,
9};
10
11use smallvec::SmallVec;
12use thiserror::Error;
13use wgt::TextureSelector;
14
15#[cfg(feature = "trace")]
16use crate::device::trace;
17use crate::{
18    binding_model::BindGroup,
19    device::{
20        queue, resource::DeferredDestroy, BufferMapPendingClosure, Device, DeviceError,
21        DeviceMismatch, HostMap, MissingDownlevelFlags, MissingFeatures,
22    },
23    global::Global,
24    hal_api::HalApi,
25    id::{
26        AdapterId, BufferId, CommandEncoderId, DeviceId, QueueId, SurfaceId, TextureId,
27        TextureViewId,
28    },
29    init_tracker::{BufferInitTracker, TextureInitTracker},
30    lock::{rank, Mutex, RwLock},
31    resource_log,
32    snatch::{SnatchGuard, Snatchable},
33    timestamp_normalization::TimestampNormalizationBindGroup,
34    track::{SharedTrackerIndexAllocator, TrackerIndex},
35    weak_vec::WeakVec,
36    Label, LabelHelpers, SubmissionIndex,
37};
38
39use crate::id::{BlasId, TlasId};
40
41/// Information about the wgpu-core resource.
42///
43/// Each type representing a `wgpu-core` resource, like [`Device`],
44/// [`Buffer`], etc., contains a `ResourceInfo` which contains
45/// its latest submission index and label.
46///
47/// A resource may need to be retained for any of several reasons:
48/// and any lifetime logic will be handled by `Arc<Resource>` refcount
49///
50/// - The user may hold a reference to it (via a `wgpu::Buffer`, say).
51///
52/// - Other resources may depend on it (a texture view's backing
53///   texture, for example).
54///
55/// - It may be used by commands sent to the GPU that have not yet
56///   finished execution.
57///
58/// [`Device`]: crate::device::resource::Device
59/// [`Buffer`]: crate::resource::Buffer
60#[derive(Debug)]
61pub(crate) struct TrackingData {
62    tracker_index: TrackerIndex,
63    tracker_indices: Arc<SharedTrackerIndexAllocator>,
64}
65
66impl Drop for TrackingData {
67    fn drop(&mut self) {
68        self.tracker_indices.free(self.tracker_index);
69    }
70}
71
72impl TrackingData {
73    pub(crate) fn new(tracker_indices: Arc<SharedTrackerIndexAllocator>) -> Self {
74        Self {
75            tracker_index: tracker_indices.alloc(),
76            tracker_indices,
77        }
78    }
79
80    pub(crate) fn tracker_index(&self) -> TrackerIndex {
81        self.tracker_index
82    }
83}
84
85#[derive(Clone, Debug)]
86#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
87pub struct ResourceErrorIdent {
88    r#type: Cow<'static, str>,
89    label: String,
90}
91
92impl fmt::Display for ResourceErrorIdent {
93    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
94        write!(f, "{} with '{}' label", self.r#type, self.label)
95    }
96}
97
98pub trait ParentDevice: Labeled {
99    fn device(&self) -> &Arc<Device>;
100
101    fn is_equal(self: &Arc<Self>, other: &Arc<Self>) -> bool {
102        Arc::ptr_eq(self, other)
103    }
104
105    fn same_device_as<O: ParentDevice>(&self, other: &O) -> Result<(), DeviceError> {
106        if Arc::ptr_eq(self.device(), other.device()) {
107            Ok(())
108        } else {
109            Err(DeviceError::DeviceMismatch(Box::new(DeviceMismatch {
110                res: self.error_ident(),
111                res_device: self.device().error_ident(),
112                target: Some(other.error_ident()),
113                target_device: other.device().error_ident(),
114            })))
115        }
116    }
117
118    fn same_device(&self, device: &Device) -> Result<(), DeviceError> {
119        if core::ptr::eq(&**self.device(), device) {
120            Ok(())
121        } else {
122            Err(DeviceError::DeviceMismatch(Box::new(DeviceMismatch {
123                res: self.error_ident(),
124                res_device: self.device().error_ident(),
125                target: None,
126                target_device: device.error_ident(),
127            })))
128        }
129    }
130}
131
132#[macro_export]
133macro_rules! impl_parent_device {
134    ($ty:ident) => {
135        impl $crate::resource::ParentDevice for $ty {
136            fn device(&self) -> &Arc<Device> {
137                &self.device
138            }
139        }
140    };
141}
142
143pub trait ResourceType {
144    const TYPE: &'static str;
145}
146
147#[macro_export]
148macro_rules! impl_resource_type {
149    ($ty:ident) => {
150        impl $crate::resource::ResourceType for $ty {
151            const TYPE: &'static str = stringify!($ty);
152        }
153    };
154}
155
156pub trait Labeled: ResourceType {
157    /// Returns a string identifying this resource for logging and errors.
158    ///
159    /// It may be a user-provided string or it may be a placeholder from wgpu.
160    ///
161    /// It is non-empty unless the user-provided string was empty.
162    fn label(&self) -> &str;
163
164    fn error_ident(&self) -> ResourceErrorIdent {
165        ResourceErrorIdent {
166            r#type: Cow::Borrowed(Self::TYPE),
167            label: self.label().to_owned(),
168        }
169    }
170}
171
172#[macro_export]
173macro_rules! impl_labeled {
174    ($ty:ident) => {
175        impl $crate::resource::Labeled for $ty {
176            fn label(&self) -> &str {
177                &self.label
178            }
179        }
180    };
181}
182
183pub(crate) trait Trackable {
184    fn tracker_index(&self) -> TrackerIndex;
185}
186
187#[macro_export]
188macro_rules! impl_trackable {
189    ($ty:ident) => {
190        impl $crate::resource::Trackable for $ty {
191            fn tracker_index(&self) -> $crate::track::TrackerIndex {
192                self.tracking_data.tracker_index()
193            }
194        }
195    };
196}
197
198#[derive(Debug)]
199pub(crate) enum BufferMapState {
200    /// Mapped at creation.
201    Init { staging_buffer: StagingBuffer },
202    /// Waiting for GPU to be done before mapping
203    Waiting(BufferPendingMapping),
204    /// Mapped
205    Active {
206        mapping: hal::BufferMapping,
207        range: hal::MemoryRange,
208        host: HostMap,
209    },
210    /// Not mapped
211    Idle,
212}
213
214#[cfg(send_sync)]
215unsafe impl Send for BufferMapState {}
216#[cfg(send_sync)]
217unsafe impl Sync for BufferMapState {}
218
219#[cfg(send_sync)]
220pub type BufferMapCallback = Box<dyn FnOnce(BufferAccessResult) + Send + 'static>;
221#[cfg(not(send_sync))]
222pub type BufferMapCallback = Box<dyn FnOnce(BufferAccessResult) + 'static>;
223
224pub struct BufferMapOperation {
225    pub host: HostMap,
226    pub callback: Option<BufferMapCallback>,
227}
228
229impl fmt::Debug for BufferMapOperation {
230    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
231        f.debug_struct("BufferMapOperation")
232            .field("host", &self.host)
233            .field("callback", &self.callback.as_ref().map(|_| "?"))
234            .finish()
235    }
236}
237
238#[derive(Clone, Debug, Error)]
239#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
240#[non_exhaustive]
241pub enum BufferAccessError {
242    #[error(transparent)]
243    Device(#[from] DeviceError),
244    #[error("Buffer map failed")]
245    Failed,
246    #[error(transparent)]
247    DestroyedResource(#[from] DestroyedResourceError),
248    #[error("Buffer is already mapped")]
249    AlreadyMapped,
250    #[error("Buffer map is pending")]
251    MapAlreadyPending,
252    #[error(transparent)]
253    MissingBufferUsage(#[from] MissingBufferUsageError),
254    #[error("Buffer is not mapped")]
255    NotMapped,
256    #[error(
257        "Buffer map range must start aligned to `MAP_ALIGNMENT` and end to `COPY_BUFFER_ALIGNMENT`"
258    )]
259    UnalignedRange,
260    #[error("Buffer offset invalid: offset {offset} must be multiple of 8")]
261    UnalignedOffset { offset: wgt::BufferAddress },
262    #[error("Buffer range size invalid: range_size {range_size} must be multiple of 4")]
263    UnalignedRangeSize { range_size: wgt::BufferAddress },
264    #[error("Buffer access out of bounds: index {index} would underrun the buffer (limit: {min})")]
265    OutOfBoundsUnderrun {
266        index: wgt::BufferAddress,
267        min: wgt::BufferAddress,
268    },
269    #[error(
270        "Buffer access out of bounds: last index {index} would overrun the buffer (limit: {max})"
271    )]
272    OutOfBoundsOverrun {
273        index: wgt::BufferAddress,
274        max: wgt::BufferAddress,
275    },
276    #[error("Buffer map range start {start} is greater than end {end}")]
277    NegativeRange {
278        start: wgt::BufferAddress,
279        end: wgt::BufferAddress,
280    },
281    #[error("Buffer map aborted")]
282    MapAborted,
283    #[error(transparent)]
284    InvalidResource(#[from] InvalidResourceError),
285}
286
287#[derive(Clone, Debug, Error)]
288#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
289#[error("Usage flags {actual:?} of {res} do not contain required usage flags {expected:?}")]
290pub struct MissingBufferUsageError {
291    pub(crate) res: ResourceErrorIdent,
292    pub(crate) actual: wgt::BufferUsages,
293    pub(crate) expected: wgt::BufferUsages,
294}
295
296#[derive(Clone, Debug, Error)]
297#[error("Usage flags {actual:?} of {res} do not contain required usage flags {expected:?}")]
298pub struct MissingTextureUsageError {
299    pub(crate) res: ResourceErrorIdent,
300    pub(crate) actual: wgt::TextureUsages,
301    pub(crate) expected: wgt::TextureUsages,
302}
303
304#[derive(Clone, Debug, Error)]
305#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
306#[error("{0} has been destroyed")]
307pub struct DestroyedResourceError(pub ResourceErrorIdent);
308
309#[derive(Clone, Debug, Error)]
310#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
311#[error("{0} is invalid")]
312pub struct InvalidResourceError(pub ResourceErrorIdent);
313
314pub enum Fallible<T: ParentDevice> {
315    Valid(Arc<T>),
316    Invalid(Arc<String>),
317}
318
319impl<T: ParentDevice> Fallible<T> {
320    pub fn get(self) -> Result<Arc<T>, InvalidResourceError> {
321        match self {
322            Fallible::Valid(v) => Ok(v),
323            Fallible::Invalid(label) => Err(InvalidResourceError(ResourceErrorIdent {
324                r#type: Cow::Borrowed(T::TYPE),
325                label: (*label).clone(),
326            })),
327        }
328    }
329}
330
331impl<T: ParentDevice> Clone for Fallible<T> {
332    fn clone(&self) -> Self {
333        match self {
334            Self::Valid(v) => Self::Valid(v.clone()),
335            Self::Invalid(l) => Self::Invalid(l.clone()),
336        }
337    }
338}
339
340impl<T: ParentDevice> ResourceType for Fallible<T> {
341    const TYPE: &'static str = T::TYPE;
342}
343
344impl<T: ParentDevice + crate::storage::StorageItem> crate::storage::StorageItem for Fallible<T> {
345    type Marker = T::Marker;
346}
347
348pub type BufferAccessResult = Result<(), BufferAccessError>;
349
350#[derive(Debug)]
351pub(crate) struct BufferPendingMapping {
352    pub(crate) range: Range<wgt::BufferAddress>,
353    pub(crate) op: BufferMapOperation,
354    // hold the parent alive while the mapping is active
355    pub(crate) _parent_buffer: Arc<Buffer>,
356}
357
358pub type BufferDescriptor<'a> = wgt::BufferDescriptor<Label<'a>>;
359
360#[derive(Debug)]
361pub struct Buffer {
362    pub(crate) raw: Snatchable<Box<dyn hal::DynBuffer>>,
363    pub(crate) device: Arc<Device>,
364    pub(crate) usage: wgt::BufferUsages,
365    pub(crate) size: wgt::BufferAddress,
366    pub(crate) initialization_status: RwLock<BufferInitTracker>,
367    /// The `label` from the descriptor used to create the resource.
368    pub(crate) label: String,
369    pub(crate) tracking_data: TrackingData,
370    pub(crate) map_state: Mutex<BufferMapState>,
371    pub(crate) bind_groups: Mutex<WeakVec<BindGroup>>,
372    pub(crate) timestamp_normalization_bind_group: Snatchable<TimestampNormalizationBindGroup>,
373    pub(crate) indirect_validation_bind_groups: Snatchable<crate::indirect_validation::BindGroups>,
374}
375
376impl Drop for Buffer {
377    fn drop(&mut self) {
378        if let Some(raw) = self.timestamp_normalization_bind_group.take() {
379            raw.dispose(self.device.raw());
380        }
381
382        if let Some(raw) = self.indirect_validation_bind_groups.take() {
383            raw.dispose(self.device.raw());
384        }
385
386        if let Some(raw) = self.raw.take() {
387            resource_log!("Destroy raw {}", self.error_ident());
388            unsafe {
389                self.device.raw().destroy_buffer(raw);
390            }
391        }
392    }
393}
394
395impl Buffer {
396    pub(crate) fn raw<'a>(&'a self, guard: &'a SnatchGuard) -> Option<&'a dyn hal::DynBuffer> {
397        self.raw.get(guard).map(|b| b.as_ref())
398    }
399
400    pub(crate) fn try_raw<'a>(
401        &'a self,
402        guard: &'a SnatchGuard,
403    ) -> Result<&'a dyn hal::DynBuffer, DestroyedResourceError> {
404        self.raw
405            .get(guard)
406            .map(|raw| raw.as_ref())
407            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
408    }
409
410    pub(crate) fn check_destroyed<'a>(
411        &'a self,
412        guard: &'a SnatchGuard,
413    ) -> Result<(), DestroyedResourceError> {
414        self.raw
415            .get(guard)
416            .map(|_| ())
417            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
418    }
419
420    /// Checks that the given buffer usage contains the required buffer usage,
421    /// returns an error otherwise.
422    pub(crate) fn check_usage(
423        &self,
424        expected: wgt::BufferUsages,
425    ) -> Result<(), MissingBufferUsageError> {
426        if self.usage.contains(expected) {
427            Ok(())
428        } else {
429            Err(MissingBufferUsageError {
430                res: self.error_ident(),
431                actual: self.usage,
432                expected,
433            })
434        }
435    }
436
437    /// Returns the mapping callback in case of error so that the callback can be fired outside
438    /// of the locks that are held in this function.
439    pub(crate) fn map_async(
440        self: &Arc<Self>,
441        offset: wgt::BufferAddress,
442        size: Option<wgt::BufferAddress>,
443        op: BufferMapOperation,
444    ) -> Result<SubmissionIndex, (BufferMapOperation, BufferAccessError)> {
445        let range_size = if let Some(size) = size {
446            size
447        } else {
448            self.size.saturating_sub(offset)
449        };
450
451        if offset % wgt::MAP_ALIGNMENT != 0 {
452            return Err((op, BufferAccessError::UnalignedOffset { offset }));
453        }
454        if range_size % wgt::COPY_BUFFER_ALIGNMENT != 0 {
455            return Err((op, BufferAccessError::UnalignedRangeSize { range_size }));
456        }
457
458        let range = offset..(offset + range_size);
459
460        if range.start % wgt::MAP_ALIGNMENT != 0 || range.end % wgt::COPY_BUFFER_ALIGNMENT != 0 {
461            return Err((op, BufferAccessError::UnalignedRange));
462        }
463
464        let (pub_usage, internal_use) = match op.host {
465            HostMap::Read => (wgt::BufferUsages::MAP_READ, wgt::BufferUses::MAP_READ),
466            HostMap::Write => (wgt::BufferUsages::MAP_WRITE, wgt::BufferUses::MAP_WRITE),
467        };
468
469        if let Err(e) = self.check_usage(pub_usage) {
470            return Err((op, e.into()));
471        }
472
473        if range.start > range.end {
474            return Err((
475                op,
476                BufferAccessError::NegativeRange {
477                    start: range.start,
478                    end: range.end,
479                },
480            ));
481        }
482        if range.end > self.size {
483            return Err((
484                op,
485                BufferAccessError::OutOfBoundsOverrun {
486                    index: range.end,
487                    max: self.size,
488                },
489            ));
490        }
491
492        let device = &self.device;
493        if let Err(e) = device.check_is_valid() {
494            return Err((op, e.into()));
495        }
496
497        {
498            let snatch_guard = device.snatchable_lock.read();
499            if let Err(e) = self.check_destroyed(&snatch_guard) {
500                return Err((op, e.into()));
501            }
502        }
503
504        {
505            let map_state = &mut *self.map_state.lock();
506            *map_state = match *map_state {
507                BufferMapState::Init { .. } | BufferMapState::Active { .. } => {
508                    return Err((op, BufferAccessError::AlreadyMapped));
509                }
510                BufferMapState::Waiting(_) => {
511                    return Err((op, BufferAccessError::MapAlreadyPending));
512                }
513                BufferMapState::Idle => BufferMapState::Waiting(BufferPendingMapping {
514                    range,
515                    op,
516                    _parent_buffer: self.clone(),
517                }),
518            };
519        }
520
521        // TODO: we are ignoring the transition here, I think we need to add a barrier
522        // at the end of the submission
523        device
524            .trackers
525            .lock()
526            .buffers
527            .set_single(self, internal_use);
528
529        let submit_index = if let Some(queue) = device.get_queue() {
530            queue.lock_life().map(self).unwrap_or(0) // '0' means no wait is necessary
531        } else {
532            // We can safely unwrap below since we just set the `map_state` to `BufferMapState::Waiting`.
533            let (mut operation, status) = self.map(&device.snatchable_lock.read()).unwrap();
534            if let Some(callback) = operation.callback.take() {
535                callback(status);
536            }
537            0
538        };
539
540        Ok(submit_index)
541    }
542
543    /// This function returns [`None`] only if [`Self::map_state`] is not [`BufferMapState::Waiting`].
544    #[must_use]
545    pub(crate) fn map(&self, snatch_guard: &SnatchGuard) -> Option<BufferMapPendingClosure> {
546        // This _cannot_ be inlined into the match. If it is, the lock will be held
547        // open through the whole match, resulting in a deadlock when we try to re-lock
548        // the buffer back to active.
549        let mapping = mem::replace(&mut *self.map_state.lock(), BufferMapState::Idle);
550        let pending_mapping = match mapping {
551            BufferMapState::Waiting(pending_mapping) => pending_mapping,
552            // Mapping cancelled
553            BufferMapState::Idle => return None,
554            // Mapping queued at least twice by map -> unmap -> map
555            // and was already successfully mapped below
556            BufferMapState::Active { .. } => {
557                *self.map_state.lock() = mapping;
558                return None;
559            }
560            _ => panic!("No pending mapping."),
561        };
562        let status = if pending_mapping.range.start != pending_mapping.range.end {
563            let host = pending_mapping.op.host;
564            let size = pending_mapping.range.end - pending_mapping.range.start;
565            match crate::device::map_buffer(
566                self,
567                pending_mapping.range.start,
568                size,
569                host,
570                snatch_guard,
571            ) {
572                Ok(mapping) => {
573                    *self.map_state.lock() = BufferMapState::Active {
574                        mapping,
575                        range: pending_mapping.range.clone(),
576                        host,
577                    };
578                    Ok(())
579                }
580                Err(e) => Err(e),
581            }
582        } else {
583            *self.map_state.lock() = BufferMapState::Active {
584                mapping: hal::BufferMapping {
585                    ptr: NonNull::dangling(),
586                    is_coherent: true,
587                },
588                range: pending_mapping.range,
589                host: pending_mapping.op.host,
590            };
591            Ok(())
592        };
593        Some((pending_mapping.op, status))
594    }
595
596    // Note: This must not be called while holding a lock.
597    pub(crate) fn unmap(
598        self: &Arc<Self>,
599        #[cfg(feature = "trace")] buffer_id: BufferId,
600    ) -> Result<(), BufferAccessError> {
601        if let Some((mut operation, status)) = self.unmap_inner(
602            #[cfg(feature = "trace")]
603            buffer_id,
604        )? {
605            if let Some(callback) = operation.callback.take() {
606                callback(status);
607            }
608        }
609
610        Ok(())
611    }
612
613    fn unmap_inner(
614        self: &Arc<Self>,
615        #[cfg(feature = "trace")] buffer_id: BufferId,
616    ) -> Result<Option<BufferMapPendingClosure>, BufferAccessError> {
617        let device = &self.device;
618        let snatch_guard = device.snatchable_lock.read();
619        let raw_buf = self.try_raw(&snatch_guard)?;
620        match mem::replace(&mut *self.map_state.lock(), BufferMapState::Idle) {
621            BufferMapState::Init { staging_buffer } => {
622                #[cfg(feature = "trace")]
623                if let Some(ref mut trace) = *device.trace.lock() {
624                    let data = trace.make_binary("bin", staging_buffer.get_data());
625                    trace.add(trace::Action::WriteBuffer {
626                        id: buffer_id,
627                        data,
628                        range: 0..self.size,
629                        queued: true,
630                    });
631                }
632
633                let staging_buffer = staging_buffer.flush();
634
635                if let Some(queue) = device.get_queue() {
636                    let region = wgt::BufferSize::new(self.size).map(|size| hal::BufferCopy {
637                        src_offset: 0,
638                        dst_offset: 0,
639                        size,
640                    });
641                    let transition_src = hal::BufferBarrier {
642                        buffer: staging_buffer.raw(),
643                        usage: hal::StateTransition {
644                            from: wgt::BufferUses::MAP_WRITE,
645                            to: wgt::BufferUses::COPY_SRC,
646                        },
647                    };
648                    let transition_dst = hal::BufferBarrier::<dyn hal::DynBuffer> {
649                        buffer: raw_buf,
650                        usage: hal::StateTransition {
651                            from: wgt::BufferUses::empty(),
652                            to: wgt::BufferUses::COPY_DST,
653                        },
654                    };
655                    let mut pending_writes = queue.pending_writes.lock();
656                    let encoder = pending_writes.activate();
657                    unsafe {
658                        encoder.transition_buffers(&[transition_src, transition_dst]);
659                        if self.size > 0 {
660                            encoder.copy_buffer_to_buffer(
661                                staging_buffer.raw(),
662                                raw_buf,
663                                region.as_slice(),
664                            );
665                        }
666                    }
667                    pending_writes.consume(staging_buffer);
668                    pending_writes.insert_buffer(self);
669                }
670            }
671            BufferMapState::Idle => {
672                return Err(BufferAccessError::NotMapped);
673            }
674            BufferMapState::Waiting(pending) => {
675                return Ok(Some((pending.op, Err(BufferAccessError::MapAborted))));
676            }
677            BufferMapState::Active {
678                mapping,
679                range,
680                host,
681            } => {
682                #[allow(clippy::collapsible_if)]
683                if host == HostMap::Write {
684                    #[cfg(feature = "trace")]
685                    if let Some(ref mut trace) = *device.trace.lock() {
686                        let size = range.end - range.start;
687                        let data = trace.make_binary("bin", unsafe {
688                            core::slice::from_raw_parts(mapping.ptr.as_ptr(), size as usize)
689                        });
690                        trace.add(trace::Action::WriteBuffer {
691                            id: buffer_id,
692                            data,
693                            range: range.clone(),
694                            queued: false,
695                        });
696                    }
697                    if !mapping.is_coherent {
698                        unsafe { device.raw().flush_mapped_ranges(raw_buf, &[range]) };
699                    }
700                }
701                unsafe { device.raw().unmap_buffer(raw_buf) };
702            }
703        }
704        Ok(None)
705    }
706
707    pub(crate) fn destroy(self: &Arc<Self>) -> Result<(), DestroyError> {
708        let device = &self.device;
709
710        let temp = {
711            let mut snatch_guard = device.snatchable_lock.write();
712
713            let raw = match self.raw.snatch(&mut snatch_guard) {
714                Some(raw) => raw,
715                None => {
716                    return Err(DestroyError::AlreadyDestroyed);
717                }
718            };
719
720            let timestamp_normalization_bind_group = self
721                .timestamp_normalization_bind_group
722                .snatch(&mut snatch_guard);
723
724            let indirect_validation_bind_groups = self
725                .indirect_validation_bind_groups
726                .snatch(&mut snatch_guard);
727
728            drop(snatch_guard);
729
730            let bind_groups = {
731                let mut guard = self.bind_groups.lock();
732                mem::take(&mut *guard)
733            };
734
735            queue::TempResource::DestroyedBuffer(DestroyedBuffer {
736                raw: ManuallyDrop::new(raw),
737                device: Arc::clone(&self.device),
738                label: self.label().to_owned(),
739                bind_groups,
740                timestamp_normalization_bind_group,
741                indirect_validation_bind_groups,
742            })
743        };
744
745        if let Some(queue) = device.get_queue() {
746            let mut pending_writes = queue.pending_writes.lock();
747            if pending_writes.contains_buffer(self) {
748                pending_writes.consume_temp(temp);
749            } else {
750                let mut life_lock = queue.lock_life();
751                let last_submit_index = life_lock.get_buffer_latest_submission_index(self);
752                if let Some(last_submit_index) = last_submit_index {
753                    life_lock.schedule_resource_destruction(temp, last_submit_index);
754                }
755            }
756        }
757
758        Ok(())
759    }
760}
761
762#[derive(Clone, Debug, Error)]
763#[non_exhaustive]
764pub enum CreateBufferError {
765    #[error(transparent)]
766    Device(#[from] DeviceError),
767    #[error("Failed to map buffer while creating: {0}")]
768    AccessError(#[from] BufferAccessError),
769    #[error("Buffers that are mapped at creation have to be aligned to `COPY_BUFFER_ALIGNMENT`")]
770    UnalignedSize,
771    #[error("Invalid usage flags {0:?}")]
772    InvalidUsage(wgt::BufferUsages),
773    #[error("`MAP` usage can only be combined with the opposite `COPY`, requested {0:?}")]
774    UsageMismatch(wgt::BufferUsages),
775    #[error("Buffer size {requested} is greater than the maximum buffer size ({maximum})")]
776    MaxBufferSize { requested: u64, maximum: u64 },
777    #[error(transparent)]
778    MissingDownlevelFlags(#[from] MissingDownlevelFlags),
779    #[error("Failed to create bind group for indirect buffer validation: {0}")]
780    IndirectValidationBindGroup(DeviceError),
781}
782
783crate::impl_resource_type!(Buffer);
784crate::impl_labeled!(Buffer);
785crate::impl_parent_device!(Buffer);
786crate::impl_storage_item!(Buffer);
787crate::impl_trackable!(Buffer);
788
789/// A buffer that has been marked as destroyed and is staged for actual deletion soon.
790#[derive(Debug)]
791pub struct DestroyedBuffer {
792    raw: ManuallyDrop<Box<dyn hal::DynBuffer>>,
793    device: Arc<Device>,
794    label: String,
795    bind_groups: WeakVec<BindGroup>,
796    timestamp_normalization_bind_group: Option<TimestampNormalizationBindGroup>,
797    indirect_validation_bind_groups: Option<crate::indirect_validation::BindGroups>,
798}
799
800impl DestroyedBuffer {
801    pub fn label(&self) -> &dyn fmt::Debug {
802        &self.label
803    }
804}
805
806impl Drop for DestroyedBuffer {
807    fn drop(&mut self) {
808        let mut deferred = self.device.deferred_destroy.lock();
809        deferred.push(DeferredDestroy::BindGroups(mem::take(
810            &mut self.bind_groups,
811        )));
812        drop(deferred);
813
814        if let Some(raw) = self.timestamp_normalization_bind_group.take() {
815            raw.dispose(self.device.raw());
816        }
817
818        if let Some(raw) = self.indirect_validation_bind_groups.take() {
819            raw.dispose(self.device.raw());
820        }
821
822        resource_log!("Destroy raw Buffer (destroyed) {:?}", self.label());
823        // SAFETY: We are in the Drop impl and we don't use self.raw anymore after this point.
824        let raw = unsafe { ManuallyDrop::take(&mut self.raw) };
825        unsafe {
826            hal::DynDevice::destroy_buffer(self.device.raw(), raw);
827        }
828    }
829}
830
831#[cfg(send_sync)]
832unsafe impl Send for StagingBuffer {}
833#[cfg(send_sync)]
834unsafe impl Sync for StagingBuffer {}
835
836/// A temporary buffer, consumed by the command that uses it.
837///
838/// A [`StagingBuffer`] is designed for one-shot uploads of data to the GPU. It
839/// is always created mapped, and the command that uses it destroys the buffer
840/// when it is done.
841///
842/// [`StagingBuffer`]s can be created with [`queue_create_staging_buffer`] and
843/// used with [`queue_write_staging_buffer`]. They are also used internally by
844/// operations like [`queue_write_texture`] that need to upload data to the GPU,
845/// but that don't belong to any particular wgpu command buffer.
846///
847/// Used `StagingBuffer`s are accumulated in [`Device::pending_writes`], to be
848/// freed once their associated operation's queue submission has finished
849/// execution.
850///
851/// [`queue_create_staging_buffer`]: Global::queue_create_staging_buffer
852/// [`queue_write_staging_buffer`]: Global::queue_write_staging_buffer
853/// [`queue_write_texture`]: Global::queue_write_texture
854/// [`Device::pending_writes`]: crate::device::Device
855#[derive(Debug)]
856pub struct StagingBuffer {
857    raw: Box<dyn hal::DynBuffer>,
858    device: Arc<Device>,
859    pub(crate) size: wgt::BufferSize,
860    is_coherent: bool,
861    ptr: NonNull<u8>,
862}
863
864impl StagingBuffer {
865    pub(crate) fn new(device: &Arc<Device>, size: wgt::BufferSize) -> Result<Self, DeviceError> {
866        profiling::scope!("StagingBuffer::new");
867        let stage_desc = hal::BufferDescriptor {
868            label: crate::hal_label(Some("(wgpu internal) Staging"), device.instance_flags),
869            size: size.get(),
870            usage: wgt::BufferUses::MAP_WRITE | wgt::BufferUses::COPY_SRC,
871            memory_flags: hal::MemoryFlags::TRANSIENT,
872        };
873
874        let raw = unsafe { device.raw().create_buffer(&stage_desc) }
875            .map_err(|e| device.handle_hal_error(e))?;
876        let mapping = unsafe { device.raw().map_buffer(raw.as_ref(), 0..size.get()) }
877            .map_err(|e| device.handle_hal_error(e))?;
878
879        let staging_buffer = StagingBuffer {
880            raw,
881            device: device.clone(),
882            size,
883            is_coherent: mapping.is_coherent,
884            ptr: mapping.ptr,
885        };
886
887        Ok(staging_buffer)
888    }
889
890    /// SAFETY: You must not call any functions of `self`
891    /// until you stopped using the returned pointer.
892    pub(crate) unsafe fn ptr(&self) -> NonNull<u8> {
893        self.ptr
894    }
895
896    #[cfg(feature = "trace")]
897    pub(crate) fn get_data(&self) -> &[u8] {
898        unsafe { core::slice::from_raw_parts(self.ptr.as_ptr(), self.size.get() as usize) }
899    }
900
901    pub(crate) fn write_zeros(&mut self) {
902        unsafe { core::ptr::write_bytes(self.ptr.as_ptr(), 0, self.size.get() as usize) };
903    }
904
905    pub(crate) fn write(&mut self, data: &[u8]) {
906        assert!(data.len() >= self.size.get() as usize);
907        // SAFETY: With the assert above, all of `copy_nonoverlapping`'s
908        // requirements are satisfied.
909        unsafe {
910            core::ptr::copy_nonoverlapping(
911                data.as_ptr(),
912                self.ptr.as_ptr(),
913                self.size.get() as usize,
914            );
915        }
916    }
917
918    /// SAFETY: The offsets and size must be in-bounds.
919    pub(crate) unsafe fn write_with_offset(
920        &mut self,
921        data: &[u8],
922        src_offset: isize,
923        dst_offset: isize,
924        size: usize,
925    ) {
926        unsafe {
927            core::ptr::copy_nonoverlapping(
928                data.as_ptr().offset(src_offset),
929                self.ptr.as_ptr().offset(dst_offset),
930                size,
931            );
932        }
933    }
934
935    pub(crate) fn flush(self) -> FlushedStagingBuffer {
936        let device = self.device.raw();
937        if !self.is_coherent {
938            #[allow(clippy::single_range_in_vec_init)]
939            unsafe {
940                device.flush_mapped_ranges(self.raw.as_ref(), &[0..self.size.get()])
941            };
942        }
943        unsafe { device.unmap_buffer(self.raw.as_ref()) };
944
945        let StagingBuffer {
946            raw, device, size, ..
947        } = self;
948
949        FlushedStagingBuffer {
950            raw: ManuallyDrop::new(raw),
951            device,
952            size,
953        }
954    }
955}
956
957crate::impl_resource_type!(StagingBuffer);
958crate::impl_storage_item!(StagingBuffer);
959
960#[derive(Debug)]
961pub struct FlushedStagingBuffer {
962    raw: ManuallyDrop<Box<dyn hal::DynBuffer>>,
963    device: Arc<Device>,
964    pub(crate) size: wgt::BufferSize,
965}
966
967impl FlushedStagingBuffer {
968    pub(crate) fn raw(&self) -> &dyn hal::DynBuffer {
969        self.raw.as_ref()
970    }
971}
972
973impl Drop for FlushedStagingBuffer {
974    fn drop(&mut self) {
975        resource_log!("Destroy raw StagingBuffer");
976        // SAFETY: We are in the Drop impl and we don't use self.raw anymore after this point.
977        let raw = unsafe { ManuallyDrop::take(&mut self.raw) };
978        unsafe { self.device.raw().destroy_buffer(raw) };
979    }
980}
981
982pub type TextureDescriptor<'a> = wgt::TextureDescriptor<Label<'a>, Vec<wgt::TextureFormat>>;
983
984#[derive(Debug)]
985pub(crate) enum TextureInner {
986    Native {
987        raw: Box<dyn hal::DynTexture>,
988    },
989    Surface {
990        raw: Box<dyn hal::DynSurfaceTexture>,
991    },
992}
993
994impl TextureInner {
995    pub(crate) fn raw(&self) -> &dyn hal::DynTexture {
996        match self {
997            Self::Native { raw } => raw.as_ref(),
998            Self::Surface { raw, .. } => raw.as_ref().borrow(),
999        }
1000    }
1001}
1002
1003#[derive(Debug)]
1004pub enum TextureClearMode {
1005    BufferCopy,
1006    // View for clear via RenderPass for every subsurface (mip/layer/slice)
1007    RenderPass {
1008        clear_views: SmallVec<[ManuallyDrop<Box<dyn hal::DynTextureView>>; 1]>,
1009        is_color: bool,
1010    },
1011    Surface {
1012        clear_view: ManuallyDrop<Box<dyn hal::DynTextureView>>,
1013    },
1014    // Texture can't be cleared, attempting to do so will cause panic.
1015    // (either because it is impossible for the type of texture or it is being destroyed)
1016    None,
1017}
1018
1019#[derive(Debug)]
1020pub struct Texture {
1021    pub(crate) inner: Snatchable<TextureInner>,
1022    pub(crate) device: Arc<Device>,
1023    pub(crate) desc: wgt::TextureDescriptor<(), Vec<wgt::TextureFormat>>,
1024    pub(crate) _hal_usage: wgt::TextureUses,
1025    pub(crate) format_features: wgt::TextureFormatFeatures,
1026    pub(crate) initialization_status: RwLock<TextureInitTracker>,
1027    pub(crate) full_range: TextureSelector,
1028    /// The `label` from the descriptor used to create the resource.
1029    pub(crate) label: String,
1030    pub(crate) tracking_data: TrackingData,
1031    pub(crate) clear_mode: TextureClearMode,
1032    pub(crate) views: Mutex<WeakVec<TextureView>>,
1033    pub(crate) bind_groups: Mutex<WeakVec<BindGroup>>,
1034}
1035
1036impl Texture {
1037    pub(crate) fn new(
1038        device: &Arc<Device>,
1039        inner: TextureInner,
1040        hal_usage: wgt::TextureUses,
1041        desc: &TextureDescriptor,
1042        format_features: wgt::TextureFormatFeatures,
1043        clear_mode: TextureClearMode,
1044        init: bool,
1045    ) -> Self {
1046        Texture {
1047            inner: Snatchable::new(inner),
1048            device: device.clone(),
1049            desc: desc.map_label(|_| ()),
1050            _hal_usage: hal_usage,
1051            format_features,
1052            initialization_status: RwLock::new(
1053                rank::TEXTURE_INITIALIZATION_STATUS,
1054                if init {
1055                    TextureInitTracker::new(desc.mip_level_count, desc.array_layer_count())
1056                } else {
1057                    TextureInitTracker::new(desc.mip_level_count, 0)
1058                },
1059            ),
1060            full_range: TextureSelector {
1061                mips: 0..desc.mip_level_count,
1062                layers: 0..desc.array_layer_count(),
1063            },
1064            label: desc.label.to_string(),
1065            tracking_data: TrackingData::new(device.tracker_indices.textures.clone()),
1066            clear_mode,
1067            views: Mutex::new(rank::TEXTURE_VIEWS, WeakVec::new()),
1068            bind_groups: Mutex::new(rank::TEXTURE_BIND_GROUPS, WeakVec::new()),
1069        }
1070    }
1071
1072    /// Checks that the given texture usage contains the required texture usage,
1073    /// returns an error otherwise.
1074    pub(crate) fn check_usage(
1075        &self,
1076        expected: wgt::TextureUsages,
1077    ) -> Result<(), MissingTextureUsageError> {
1078        if self.desc.usage.contains(expected) {
1079            Ok(())
1080        } else {
1081            Err(MissingTextureUsageError {
1082                res: self.error_ident(),
1083                actual: self.desc.usage,
1084                expected,
1085            })
1086        }
1087    }
1088}
1089
1090impl Drop for Texture {
1091    fn drop(&mut self) {
1092        match self.clear_mode {
1093            TextureClearMode::Surface {
1094                ref mut clear_view, ..
1095            } => {
1096                // SAFETY: We are in the Drop impl and we don't use clear_view anymore after this point.
1097                let raw = unsafe { ManuallyDrop::take(clear_view) };
1098                unsafe {
1099                    self.device.raw().destroy_texture_view(raw);
1100                }
1101            }
1102            TextureClearMode::RenderPass {
1103                ref mut clear_views,
1104                ..
1105            } => {
1106                clear_views.iter_mut().for_each(|clear_view| {
1107                    // SAFETY: We are in the Drop impl and we don't use clear_view anymore after this point.
1108                    let raw = unsafe { ManuallyDrop::take(clear_view) };
1109                    unsafe {
1110                        self.device.raw().destroy_texture_view(raw);
1111                    }
1112                });
1113            }
1114            _ => {}
1115        };
1116
1117        if let Some(TextureInner::Native { raw }) = self.inner.take() {
1118            resource_log!("Destroy raw {}", self.error_ident());
1119            unsafe {
1120                self.device.raw().destroy_texture(raw);
1121            }
1122        }
1123    }
1124}
1125
1126impl Texture {
1127    pub(crate) fn try_inner<'a>(
1128        &'a self,
1129        guard: &'a SnatchGuard,
1130    ) -> Result<&'a TextureInner, DestroyedResourceError> {
1131        self.inner
1132            .get(guard)
1133            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
1134    }
1135
1136    pub(crate) fn raw<'a>(
1137        &'a self,
1138        snatch_guard: &'a SnatchGuard,
1139    ) -> Option<&'a dyn hal::DynTexture> {
1140        Some(self.inner.get(snatch_guard)?.raw())
1141    }
1142
1143    pub(crate) fn try_raw<'a>(
1144        &'a self,
1145        guard: &'a SnatchGuard,
1146    ) -> Result<&'a dyn hal::DynTexture, DestroyedResourceError> {
1147        self.inner
1148            .get(guard)
1149            .map(|t| t.raw())
1150            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
1151    }
1152
1153    pub(crate) fn get_clear_view<'a>(
1154        clear_mode: &'a TextureClearMode,
1155        desc: &'a wgt::TextureDescriptor<(), Vec<wgt::TextureFormat>>,
1156        mip_level: u32,
1157        depth_or_layer: u32,
1158    ) -> &'a dyn hal::DynTextureView {
1159        match *clear_mode {
1160            TextureClearMode::BufferCopy => {
1161                panic!("Given texture is cleared with buffer copies, not render passes")
1162            }
1163            TextureClearMode::None => {
1164                panic!("Given texture can't be cleared")
1165            }
1166            TextureClearMode::Surface { ref clear_view, .. } => clear_view.as_ref(),
1167            TextureClearMode::RenderPass {
1168                ref clear_views, ..
1169            } => {
1170                let index = if desc.dimension == wgt::TextureDimension::D3 {
1171                    (0..mip_level).fold(0, |acc, mip| {
1172                        acc + (desc.size.depth_or_array_layers >> mip).max(1)
1173                    })
1174                } else {
1175                    mip_level * desc.size.depth_or_array_layers
1176                } + depth_or_layer;
1177                clear_views[index as usize].as_ref()
1178            }
1179        }
1180    }
1181
1182    pub(crate) fn destroy(self: &Arc<Self>) -> Result<(), DestroyError> {
1183        let device = &self.device;
1184
1185        let temp = {
1186            let raw = match self.inner.snatch(&mut device.snatchable_lock.write()) {
1187                Some(TextureInner::Native { raw }) => raw,
1188                Some(TextureInner::Surface { .. }) => {
1189                    return Ok(());
1190                }
1191                None => {
1192                    return Err(DestroyError::AlreadyDestroyed);
1193                }
1194            };
1195
1196            let views = {
1197                let mut guard = self.views.lock();
1198                mem::take(&mut *guard)
1199            };
1200
1201            let bind_groups = {
1202                let mut guard = self.bind_groups.lock();
1203                mem::take(&mut *guard)
1204            };
1205
1206            queue::TempResource::DestroyedTexture(DestroyedTexture {
1207                raw: ManuallyDrop::new(raw),
1208                views,
1209                bind_groups,
1210                device: Arc::clone(&self.device),
1211                label: self.label().to_owned(),
1212            })
1213        };
1214
1215        if let Some(queue) = device.get_queue() {
1216            let mut pending_writes = queue.pending_writes.lock();
1217            if pending_writes.contains_texture(self) {
1218                pending_writes.consume_temp(temp);
1219            } else {
1220                let mut life_lock = queue.lock_life();
1221                let last_submit_index = life_lock.get_texture_latest_submission_index(self);
1222                if let Some(last_submit_index) = last_submit_index {
1223                    life_lock.schedule_resource_destruction(temp, last_submit_index);
1224                }
1225            }
1226        }
1227
1228        Ok(())
1229    }
1230}
1231
1232impl Global {
1233    /// # Safety
1234    ///
1235    /// - The raw buffer handle must not be manually destroyed
1236    pub unsafe fn buffer_as_hal<A: HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
1237        &self,
1238        id: BufferId,
1239        hal_buffer_callback: F,
1240    ) -> R {
1241        profiling::scope!("Buffer::as_hal");
1242
1243        let hub = &self.hub;
1244
1245        if let Ok(buffer) = hub.buffers.get(id).get() {
1246            let snatch_guard = buffer.device.snatchable_lock.read();
1247            let hal_buffer = buffer
1248                .raw(&snatch_guard)
1249                .and_then(|b| b.as_any().downcast_ref());
1250            hal_buffer_callback(hal_buffer)
1251        } else {
1252            hal_buffer_callback(None)
1253        }
1254    }
1255
1256    /// # Safety
1257    ///
1258    /// - The raw texture handle must not be manually destroyed
1259    pub unsafe fn texture_as_hal<A: HalApi, F: FnOnce(Option<&A::Texture>) -> R, R>(
1260        &self,
1261        id: TextureId,
1262        hal_texture_callback: F,
1263    ) -> R {
1264        profiling::scope!("Texture::as_hal");
1265
1266        let hub = &self.hub;
1267
1268        if let Ok(texture) = hub.textures.get(id).get() {
1269            let snatch_guard = texture.device.snatchable_lock.read();
1270            let hal_texture = texture.raw(&snatch_guard);
1271            let hal_texture = hal_texture
1272                .as_ref()
1273                .and_then(|it| it.as_any().downcast_ref());
1274            hal_texture_callback(hal_texture)
1275        } else {
1276            hal_texture_callback(None)
1277        }
1278    }
1279
1280    /// # Safety
1281    ///
1282    /// - The raw texture view handle must not be manually destroyed
1283    pub unsafe fn texture_view_as_hal<A: HalApi, F: FnOnce(Option<&A::TextureView>) -> R, R>(
1284        &self,
1285        id: TextureViewId,
1286        hal_texture_view_callback: F,
1287    ) -> R {
1288        profiling::scope!("TextureView::as_hal");
1289
1290        let hub = &self.hub;
1291
1292        if let Ok(texture_view) = hub.texture_views.get(id).get() {
1293            let snatch_guard = texture_view.device.snatchable_lock.read();
1294            let hal_texture_view = texture_view.raw(&snatch_guard);
1295            let hal_texture_view = hal_texture_view
1296                .as_ref()
1297                .and_then(|it| it.as_any().downcast_ref());
1298            hal_texture_view_callback(hal_texture_view)
1299        } else {
1300            hal_texture_view_callback(None)
1301        }
1302    }
1303
1304    /// # Safety
1305    ///
1306    /// - The raw adapter handle must not be manually destroyed
1307    pub unsafe fn adapter_as_hal<A: HalApi, F: FnOnce(Option<&A::Adapter>) -> R, R>(
1308        &self,
1309        id: AdapterId,
1310        hal_adapter_callback: F,
1311    ) -> R {
1312        profiling::scope!("Adapter::as_hal");
1313
1314        let hub = &self.hub;
1315        let adapter = hub.adapters.get(id);
1316        let hal_adapter = adapter.raw.adapter.as_any().downcast_ref();
1317
1318        hal_adapter_callback(hal_adapter)
1319    }
1320
1321    /// # Safety
1322    ///
1323    /// - The raw device handle must not be manually destroyed
1324    pub unsafe fn device_as_hal<A: HalApi, F: FnOnce(Option<&A::Device>) -> R, R>(
1325        &self,
1326        id: DeviceId,
1327        hal_device_callback: F,
1328    ) -> R {
1329        profiling::scope!("Device::as_hal");
1330
1331        let device = self.hub.devices.get(id);
1332        let hal_device = device.raw().as_any().downcast_ref();
1333
1334        hal_device_callback(hal_device)
1335    }
1336
1337    /// # Safety
1338    ///
1339    /// - The raw fence handle must not be manually destroyed
1340    pub unsafe fn device_fence_as_hal<A: HalApi, F: FnOnce(Option<&A::Fence>) -> R, R>(
1341        &self,
1342        id: DeviceId,
1343        hal_fence_callback: F,
1344    ) -> R {
1345        profiling::scope!("Device::fence_as_hal");
1346
1347        let device = self.hub.devices.get(id);
1348        let fence = device.fence.read();
1349        hal_fence_callback(fence.as_any().downcast_ref())
1350    }
1351
1352    /// # Safety
1353    /// - The raw surface handle must not be manually destroyed
1354    pub unsafe fn surface_as_hal<A: HalApi, F: FnOnce(Option<&A::Surface>) -> R, R>(
1355        &self,
1356        id: SurfaceId,
1357        hal_surface_callback: F,
1358    ) -> R {
1359        profiling::scope!("Surface::as_hal");
1360
1361        let surface = self.surfaces.get(id);
1362        let hal_surface = surface
1363            .raw(A::VARIANT)
1364            .and_then(|surface| surface.as_any().downcast_ref());
1365
1366        hal_surface_callback(hal_surface)
1367    }
1368
1369    /// # Safety
1370    ///
1371    /// - The raw command encoder handle must not be manually destroyed
1372    pub unsafe fn command_encoder_as_hal_mut<
1373        A: HalApi,
1374        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
1375        R,
1376    >(
1377        &self,
1378        id: CommandEncoderId,
1379        hal_command_encoder_callback: F,
1380    ) -> R {
1381        profiling::scope!("CommandEncoder::as_hal");
1382
1383        let hub = &self.hub;
1384
1385        let cmd_buf = hub.command_buffers.get(id.into_command_buffer_id());
1386        let mut cmd_buf_data = cmd_buf.data.lock();
1387        let cmd_buf_data_guard = cmd_buf_data.record();
1388
1389        if let Ok(mut cmd_buf_data_guard) = cmd_buf_data_guard {
1390            let cmd_buf_raw = cmd_buf_data_guard
1391                .encoder
1392                .open()
1393                .ok()
1394                .and_then(|encoder| encoder.as_any_mut().downcast_mut());
1395            let ret = hal_command_encoder_callback(cmd_buf_raw);
1396            cmd_buf_data_guard.mark_successful();
1397            ret
1398        } else {
1399            hal_command_encoder_callback(None)
1400        }
1401    }
1402
1403    /// # Safety
1404    ///
1405    /// - The raw queue handle must not be manually destroyed
1406    pub unsafe fn queue_as_hal<A: HalApi, F, R>(&self, id: QueueId, hal_queue_callback: F) -> R
1407    where
1408        F: FnOnce(Option<&A::Queue>) -> R,
1409    {
1410        profiling::scope!("Queue::as_hal");
1411
1412        let queue = self.hub.queues.get(id);
1413        let hal_queue = queue.raw().as_any().downcast_ref();
1414
1415        hal_queue_callback(hal_queue)
1416    }
1417
1418    /// # Safety
1419    ///
1420    /// - The raw blas handle must not be manually destroyed
1421    pub unsafe fn blas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
1422        &self,
1423        id: BlasId,
1424        hal_blas_callback: F,
1425    ) -> R {
1426        profiling::scope!("Blas::as_hal");
1427
1428        let hub = &self.hub;
1429
1430        if let Ok(blas) = hub.blas_s.get(id).get() {
1431            let snatch_guard = blas.device.snatchable_lock.read();
1432            let hal_blas = blas
1433                .try_raw(&snatch_guard)
1434                .ok()
1435                .and_then(|b| b.as_any().downcast_ref());
1436            hal_blas_callback(hal_blas)
1437        } else {
1438            hal_blas_callback(None)
1439        }
1440    }
1441
1442    /// # Safety
1443    ///
1444    /// - The raw tlas handle must not be manually destroyed
1445    pub unsafe fn tlas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
1446        &self,
1447        id: TlasId,
1448        hal_tlas_callback: F,
1449    ) -> R {
1450        profiling::scope!("Blas::as_hal");
1451
1452        let hub = &self.hub;
1453
1454        if let Ok(tlas) = hub.tlas_s.get(id).get() {
1455            let snatch_guard = tlas.device.snatchable_lock.read();
1456            let hal_tlas = tlas
1457                .try_raw(&snatch_guard)
1458                .ok()
1459                .and_then(|t| t.as_any().downcast_ref());
1460            hal_tlas_callback(hal_tlas)
1461        } else {
1462            hal_tlas_callback(None)
1463        }
1464    }
1465}
1466
1467/// A texture that has been marked as destroyed and is staged for actual deletion soon.
1468#[derive(Debug)]
1469pub struct DestroyedTexture {
1470    raw: ManuallyDrop<Box<dyn hal::DynTexture>>,
1471    views: WeakVec<TextureView>,
1472    bind_groups: WeakVec<BindGroup>,
1473    device: Arc<Device>,
1474    label: String,
1475}
1476
1477impl DestroyedTexture {
1478    pub fn label(&self) -> &dyn fmt::Debug {
1479        &self.label
1480    }
1481}
1482
1483impl Drop for DestroyedTexture {
1484    fn drop(&mut self) {
1485        let device = &self.device;
1486
1487        let mut deferred = device.deferred_destroy.lock();
1488        deferred.push(DeferredDestroy::TextureViews(mem::take(&mut self.views)));
1489        deferred.push(DeferredDestroy::BindGroups(mem::take(
1490            &mut self.bind_groups,
1491        )));
1492        drop(deferred);
1493
1494        resource_log!("Destroy raw Texture (destroyed) {:?}", self.label());
1495        // SAFETY: We are in the Drop impl and we don't use self.raw anymore after this point.
1496        let raw = unsafe { ManuallyDrop::take(&mut self.raw) };
1497        unsafe {
1498            self.device.raw().destroy_texture(raw);
1499        }
1500    }
1501}
1502
1503#[derive(Clone, Copy, Debug)]
1504pub enum TextureErrorDimension {
1505    X,
1506    Y,
1507    Z,
1508}
1509
1510#[derive(Clone, Debug, Error)]
1511#[non_exhaustive]
1512pub enum TextureDimensionError {
1513    #[error("Dimension {0:?} is zero")]
1514    Zero(TextureErrorDimension),
1515    #[error("Dimension {dim:?} value {given} exceeds the limit of {limit}")]
1516    LimitExceeded {
1517        dim: TextureErrorDimension,
1518        given: u32,
1519        limit: u32,
1520    },
1521    #[error("Sample count {0} is invalid")]
1522    InvalidSampleCount(u32),
1523    #[error("Width {width} is not a multiple of {format:?}'s block width ({block_width})")]
1524    NotMultipleOfBlockWidth {
1525        width: u32,
1526        block_width: u32,
1527        format: wgt::TextureFormat,
1528    },
1529    #[error("Height {height} is not a multiple of {format:?}'s block height ({block_height})")]
1530    NotMultipleOfBlockHeight {
1531        height: u32,
1532        block_height: u32,
1533        format: wgt::TextureFormat,
1534    },
1535    #[error(
1536        "Width {width} is not a multiple of {format:?}'s width multiple requirement ({multiple})"
1537    )]
1538    WidthNotMultipleOf {
1539        width: u32,
1540        multiple: u32,
1541        format: wgt::TextureFormat,
1542    },
1543    #[error("Height {height} is not a multiple of {format:?}'s height multiple requirement ({multiple})")]
1544    HeightNotMultipleOf {
1545        height: u32,
1546        multiple: u32,
1547        format: wgt::TextureFormat,
1548    },
1549    #[error("Multisampled texture depth or array layers must be 1, got {0}")]
1550    MultisampledDepthOrArrayLayer(u32),
1551}
1552
1553#[derive(Clone, Debug, Error)]
1554#[non_exhaustive]
1555pub enum CreateTextureError {
1556    #[error(transparent)]
1557    Device(#[from] DeviceError),
1558    #[error(transparent)]
1559    CreateTextureView(#[from] CreateTextureViewError),
1560    #[error("Invalid usage flags {0:?}")]
1561    InvalidUsage(wgt::TextureUsages),
1562    #[error(transparent)]
1563    InvalidDimension(#[from] TextureDimensionError),
1564    #[error("Depth texture ({1:?}) can't be created as {0:?}")]
1565    InvalidDepthDimension(wgt::TextureDimension, wgt::TextureFormat),
1566    #[error("Compressed texture ({1:?}) can't be created as {0:?}")]
1567    InvalidCompressedDimension(wgt::TextureDimension, wgt::TextureFormat),
1568    #[error(
1569        "Texture descriptor mip level count {requested} is invalid, maximum allowed is {maximum}"
1570    )]
1571    InvalidMipLevelCount { requested: u32, maximum: u32 },
1572    #[error(
1573        "Texture usages {0:?} are not allowed on a texture of type {1:?}{downlevel_suffix}",
1574        downlevel_suffix = if *.2 { " due to downlevel restrictions" } else { "" }
1575    )]
1576    InvalidFormatUsages(wgt::TextureUsages, wgt::TextureFormat, bool),
1577    #[error("The view format {0:?} is not compatible with texture format {1:?}, only changing srgb-ness is allowed.")]
1578    InvalidViewFormat(wgt::TextureFormat, wgt::TextureFormat),
1579    #[error("Texture usages {0:?} are not allowed on a texture of dimensions {1:?}")]
1580    InvalidDimensionUsages(wgt::TextureUsages, wgt::TextureDimension),
1581    #[error("Texture usage STORAGE_BINDING is not allowed for multisampled textures")]
1582    InvalidMultisampledStorageBinding,
1583    #[error("Format {0:?} does not support multisampling")]
1584    InvalidMultisampledFormat(wgt::TextureFormat),
1585    #[error("Sample count {0} is not supported by format {1:?} on this device. The WebGPU spec guarantees {2:?} samples are supported by this format. With the TEXTURE_ADAPTER_SPECIFIC_FORMAT_FEATURES feature your device supports {3:?}.")]
1586    InvalidSampleCount(u32, wgt::TextureFormat, Vec<u32>, Vec<u32>),
1587    #[error("Multisampled textures must have RENDER_ATTACHMENT usage")]
1588    MultisampledNotRenderAttachment,
1589    #[error("Texture format {0:?} can't be used due to missing features")]
1590    MissingFeatures(wgt::TextureFormat, #[source] MissingFeatures),
1591    #[error(transparent)]
1592    MissingDownlevelFlags(#[from] MissingDownlevelFlags),
1593}
1594
1595crate::impl_resource_type!(Texture);
1596crate::impl_labeled!(Texture);
1597crate::impl_parent_device!(Texture);
1598crate::impl_storage_item!(Texture);
1599crate::impl_trackable!(Texture);
1600
1601impl Borrow<TextureSelector> for Texture {
1602    fn borrow(&self) -> &TextureSelector {
1603        &self.full_range
1604    }
1605}
1606
1607/// Describes a [`TextureView`].
1608#[derive(Clone, Debug, Default, Eq, PartialEq)]
1609#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1610#[cfg_attr(feature = "serde", serde(default))]
1611pub struct TextureViewDescriptor<'a> {
1612    /// Debug label of the texture view.
1613    ///
1614    /// This will show up in graphics debuggers for easy identification.
1615    pub label: Label<'a>,
1616    /// Format of the texture view, or `None` for the same format as the texture
1617    /// itself.
1618    ///
1619    /// At this time, it must be the same the underlying format of the texture.
1620    pub format: Option<wgt::TextureFormat>,
1621    /// The dimension of the texture view.
1622    ///
1623    /// - For 1D textures, this must be `D1`.
1624    /// - For 2D textures it must be one of `D2`, `D2Array`, `Cube`, or `CubeArray`.
1625    /// - For 3D textures it must be `D3`.
1626    pub dimension: Option<wgt::TextureViewDimension>,
1627    /// The allowed usage(s) for the texture view. Must be a subset of the usage flags of the texture.
1628    /// If not provided, defaults to the full set of usage flags of the texture.
1629    pub usage: Option<wgt::TextureUsages>,
1630    /// Range within the texture that is accessible via this view.
1631    pub range: wgt::ImageSubresourceRange,
1632}
1633
1634#[derive(Debug)]
1635pub(crate) struct HalTextureViewDescriptor {
1636    pub texture_format: wgt::TextureFormat,
1637    pub format: wgt::TextureFormat,
1638    pub usage: wgt::TextureUsages,
1639    pub dimension: wgt::TextureViewDimension,
1640    pub range: wgt::ImageSubresourceRange,
1641}
1642
1643impl HalTextureViewDescriptor {
1644    pub fn aspects(&self) -> hal::FormatAspects {
1645        hal::FormatAspects::new(self.texture_format, self.range.aspect)
1646    }
1647}
1648
1649#[derive(Debug, Copy, Clone, Error)]
1650pub enum TextureViewNotRenderableReason {
1651    #[error("The texture this view references doesn't include the RENDER_ATTACHMENT usage. Provided usages: {0:?}")]
1652    Usage(wgt::TextureUsages),
1653    #[error("The dimension of this texture view is not 2D. View dimension: {0:?}")]
1654    Dimension(wgt::TextureViewDimension),
1655    #[error("This texture view has more than one mipmap level. View mipmap levels: {0:?}")]
1656    MipLevelCount(u32),
1657    #[error("This texture view has more than one array layer. View array layers: {0:?}")]
1658    ArrayLayerCount(u32),
1659    #[error(
1660        "The aspects of this texture view are a subset of the aspects in the original texture. Aspects: {0:?}"
1661    )]
1662    Aspects(hal::FormatAspects),
1663}
1664
1665#[derive(Debug)]
1666pub struct TextureView {
1667    pub(crate) raw: Snatchable<Box<dyn hal::DynTextureView>>,
1668    // if it's a surface texture - it's none
1669    pub(crate) parent: Arc<Texture>,
1670    pub(crate) device: Arc<Device>,
1671    pub(crate) desc: HalTextureViewDescriptor,
1672    pub(crate) format_features: wgt::TextureFormatFeatures,
1673    /// This is `Err` only if the texture view is not renderable
1674    pub(crate) render_extent: Result<wgt::Extent3d, TextureViewNotRenderableReason>,
1675    pub(crate) samples: u32,
1676    pub(crate) selector: TextureSelector,
1677    /// The `label` from the descriptor used to create the resource.
1678    pub(crate) label: String,
1679    pub(crate) tracking_data: TrackingData,
1680}
1681
1682impl Drop for TextureView {
1683    fn drop(&mut self) {
1684        if let Some(raw) = self.raw.take() {
1685            resource_log!("Destroy raw {}", self.error_ident());
1686            unsafe {
1687                self.device.raw().destroy_texture_view(raw);
1688            }
1689        }
1690    }
1691}
1692
1693impl TextureView {
1694    pub(crate) fn raw<'a>(
1695        &'a self,
1696        snatch_guard: &'a SnatchGuard,
1697    ) -> Option<&'a dyn hal::DynTextureView> {
1698        self.raw.get(snatch_guard).map(|it| it.as_ref())
1699    }
1700
1701    pub(crate) fn try_raw<'a>(
1702        &'a self,
1703        guard: &'a SnatchGuard,
1704    ) -> Result<&'a dyn hal::DynTextureView, DestroyedResourceError> {
1705        self.raw
1706            .get(guard)
1707            .map(|it| it.as_ref())
1708            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
1709    }
1710
1711    /// Checks that the given texture usage contains the required texture usage,
1712    /// returns an error otherwise.
1713    pub(crate) fn check_usage(
1714        &self,
1715        expected: wgt::TextureUsages,
1716    ) -> Result<(), MissingTextureUsageError> {
1717        if self.desc.usage.contains(expected) {
1718            Ok(())
1719        } else {
1720            Err(MissingTextureUsageError {
1721                res: self.error_ident(),
1722                actual: self.desc.usage,
1723                expected,
1724            })
1725        }
1726    }
1727}
1728
1729#[derive(Clone, Debug, Error)]
1730#[non_exhaustive]
1731pub enum CreateTextureViewError {
1732    #[error(transparent)]
1733    Device(#[from] DeviceError),
1734    #[error(transparent)]
1735    DestroyedResource(#[from] DestroyedResourceError),
1736    #[error("Invalid texture view dimension `{view:?}` with texture of dimension `{texture:?}`")]
1737    InvalidTextureViewDimension {
1738        view: wgt::TextureViewDimension,
1739        texture: wgt::TextureDimension,
1740    },
1741    #[error("Texture view format `{0:?}` is not renderable")]
1742    TextureViewFormatNotRenderable(wgt::TextureFormat),
1743    #[error("Texture view format `{0:?}` is not storage bindable")]
1744    TextureViewFormatNotStorage(wgt::TextureFormat),
1745    #[error("Invalid texture view usage `{view:?}` with texture of usage `{texture:?}`")]
1746    InvalidTextureViewUsage {
1747        view: wgt::TextureUsages,
1748        texture: wgt::TextureUsages,
1749    },
1750    #[error("Invalid texture view dimension `{0:?}` of a multisampled texture")]
1751    InvalidMultisampledTextureViewDimension(wgt::TextureViewDimension),
1752    #[error("Invalid texture depth `{depth}` for texture view of dimension `Cubemap`. Cubemap views must use images of size 6.")]
1753    InvalidCubemapTextureDepth { depth: u32 },
1754    #[error("Invalid texture depth `{depth}` for texture view of dimension `CubemapArray`. Cubemap views must use images with sizes which are a multiple of 6.")]
1755    InvalidCubemapArrayTextureDepth { depth: u32 },
1756    #[error("Source texture width and height must be equal for a texture view of dimension `Cube`/`CubeArray`")]
1757    InvalidCubeTextureViewSize,
1758    #[error("Mip level count is 0")]
1759    ZeroMipLevelCount,
1760    #[error("Array layer count is 0")]
1761    ZeroArrayLayerCount,
1762    #[error(
1763        "TextureView mip level count + base mip level {requested} must be <= Texture mip level count {total}"
1764    )]
1765    TooManyMipLevels { requested: u32, total: u32 },
1766    #[error("TextureView array layer count + base array layer {requested} must be <= Texture depth/array layer count {total}")]
1767    TooManyArrayLayers { requested: u32, total: u32 },
1768    #[error("Requested array layer count {requested} is not valid for the target view dimension {dim:?}")]
1769    InvalidArrayLayerCount {
1770        requested: u32,
1771        dim: wgt::TextureViewDimension,
1772    },
1773    #[error("Aspect {requested_aspect:?} is not in the source texture format {texture_format:?}")]
1774    InvalidAspect {
1775        texture_format: wgt::TextureFormat,
1776        requested_aspect: wgt::TextureAspect,
1777    },
1778    #[error("Unable to view texture {texture:?} as {view:?}")]
1779    FormatReinterpretation {
1780        texture: wgt::TextureFormat,
1781        view: wgt::TextureFormat,
1782    },
1783    #[error(transparent)]
1784    InvalidResource(#[from] InvalidResourceError),
1785    #[error(transparent)]
1786    MissingFeatures(#[from] MissingFeatures),
1787}
1788
1789#[derive(Clone, Debug, Error)]
1790#[non_exhaustive]
1791pub enum TextureViewDestroyError {}
1792
1793crate::impl_resource_type!(TextureView);
1794crate::impl_labeled!(TextureView);
1795crate::impl_parent_device!(TextureView);
1796crate::impl_storage_item!(TextureView);
1797crate::impl_trackable!(TextureView);
1798
1799/// Describes a [`Sampler`]
1800#[derive(Clone, Debug, PartialEq)]
1801#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1802pub struct SamplerDescriptor<'a> {
1803    /// Debug label of the sampler.
1804    ///
1805    /// This will show up in graphics debuggers for easy identification.
1806    pub label: Label<'a>,
1807    /// How to deal with out of bounds accesses in the u (i.e. x) direction
1808    pub address_modes: [wgt::AddressMode; 3],
1809    /// How to filter the texture when it needs to be magnified (made larger)
1810    pub mag_filter: wgt::FilterMode,
1811    /// How to filter the texture when it needs to be minified (made smaller)
1812    pub min_filter: wgt::FilterMode,
1813    /// How to filter between mip map levels
1814    pub mipmap_filter: wgt::FilterMode,
1815    /// Minimum level of detail (i.e. mip level) to use
1816    pub lod_min_clamp: f32,
1817    /// Maximum level of detail (i.e. mip level) to use
1818    pub lod_max_clamp: f32,
1819    /// If this is enabled, this is a comparison sampler using the given comparison function.
1820    pub compare: Option<wgt::CompareFunction>,
1821    /// Must be at least 1. If this is not 1, all filter modes must be linear.
1822    pub anisotropy_clamp: u16,
1823    /// Border color to use when address_mode is
1824    /// [`AddressMode::ClampToBorder`](wgt::AddressMode::ClampToBorder)
1825    pub border_color: Option<wgt::SamplerBorderColor>,
1826}
1827
1828#[derive(Debug)]
1829pub struct Sampler {
1830    pub(crate) raw: ManuallyDrop<Box<dyn hal::DynSampler>>,
1831    pub(crate) device: Arc<Device>,
1832    /// The `label` from the descriptor used to create the resource.
1833    pub(crate) label: String,
1834    pub(crate) tracking_data: TrackingData,
1835    /// `true` if this is a comparison sampler
1836    pub(crate) comparison: bool,
1837    /// `true` if this is a filtering sampler
1838    pub(crate) filtering: bool,
1839}
1840
1841impl Drop for Sampler {
1842    fn drop(&mut self) {
1843        resource_log!("Destroy raw {}", self.error_ident());
1844        // SAFETY: We are in the Drop impl and we don't use self.raw anymore after this point.
1845        let raw = unsafe { ManuallyDrop::take(&mut self.raw) };
1846        unsafe {
1847            self.device.raw().destroy_sampler(raw);
1848        }
1849    }
1850}
1851
1852impl Sampler {
1853    pub(crate) fn raw(&self) -> &dyn hal::DynSampler {
1854        self.raw.as_ref()
1855    }
1856}
1857
1858#[derive(Copy, Clone)]
1859pub enum SamplerFilterErrorType {
1860    MagFilter,
1861    MinFilter,
1862    MipmapFilter,
1863}
1864
1865impl fmt::Debug for SamplerFilterErrorType {
1866    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1867        match *self {
1868            SamplerFilterErrorType::MagFilter => write!(f, "magFilter"),
1869            SamplerFilterErrorType::MinFilter => write!(f, "minFilter"),
1870            SamplerFilterErrorType::MipmapFilter => write!(f, "mipmapFilter"),
1871        }
1872    }
1873}
1874
1875#[derive(Clone, Debug, Error)]
1876#[non_exhaustive]
1877pub enum CreateSamplerError {
1878    #[error(transparent)]
1879    Device(#[from] DeviceError),
1880    #[error("Invalid lodMinClamp: {0}. Must be greater or equal to 0.0")]
1881    InvalidLodMinClamp(f32),
1882    #[error("Invalid lodMaxClamp: {lod_max_clamp}. Must be greater or equal to lodMinClamp (which is {lod_min_clamp}).")]
1883    InvalidLodMaxClamp {
1884        lod_min_clamp: f32,
1885        lod_max_clamp: f32,
1886    },
1887    #[error("Invalid anisotropic clamp: {0}. Must be at least 1.")]
1888    InvalidAnisotropy(u16),
1889    #[error("Invalid filter mode for {filter_type:?}: {filter_mode:?}. When anistropic clamp is not 1 (it is {anisotropic_clamp}), all filter modes must be linear.")]
1890    InvalidFilterModeWithAnisotropy {
1891        filter_type: SamplerFilterErrorType,
1892        filter_mode: wgt::FilterMode,
1893        anisotropic_clamp: u16,
1894    },
1895    #[error(transparent)]
1896    MissingFeatures(#[from] MissingFeatures),
1897}
1898
1899crate::impl_resource_type!(Sampler);
1900crate::impl_labeled!(Sampler);
1901crate::impl_parent_device!(Sampler);
1902crate::impl_storage_item!(Sampler);
1903crate::impl_trackable!(Sampler);
1904
1905#[derive(Clone, Debug, Error)]
1906#[non_exhaustive]
1907pub enum CreateQuerySetError {
1908    #[error(transparent)]
1909    Device(#[from] DeviceError),
1910    #[error("QuerySets cannot be made with zero queries")]
1911    ZeroCount,
1912    #[error("{count} is too many queries for a single QuerySet. QuerySets cannot be made more than {maximum} queries.")]
1913    TooManyQueries { count: u32, maximum: u32 },
1914    #[error(transparent)]
1915    MissingFeatures(#[from] MissingFeatures),
1916}
1917
1918pub type QuerySetDescriptor<'a> = wgt::QuerySetDescriptor<Label<'a>>;
1919
1920#[derive(Debug)]
1921pub struct QuerySet {
1922    pub(crate) raw: ManuallyDrop<Box<dyn hal::DynQuerySet>>,
1923    pub(crate) device: Arc<Device>,
1924    /// The `label` from the descriptor used to create the resource.
1925    pub(crate) label: String,
1926    pub(crate) tracking_data: TrackingData,
1927    pub(crate) desc: wgt::QuerySetDescriptor<()>,
1928}
1929
1930impl Drop for QuerySet {
1931    fn drop(&mut self) {
1932        resource_log!("Destroy raw {}", self.error_ident());
1933        // SAFETY: We are in the Drop impl and we don't use self.raw anymore after this point.
1934        let raw = unsafe { ManuallyDrop::take(&mut self.raw) };
1935        unsafe {
1936            self.device.raw().destroy_query_set(raw);
1937        }
1938    }
1939}
1940
1941crate::impl_resource_type!(QuerySet);
1942crate::impl_labeled!(QuerySet);
1943crate::impl_parent_device!(QuerySet);
1944crate::impl_storage_item!(QuerySet);
1945crate::impl_trackable!(QuerySet);
1946
1947impl QuerySet {
1948    pub(crate) fn raw(&self) -> &dyn hal::DynQuerySet {
1949        self.raw.as_ref()
1950    }
1951}
1952
1953#[derive(Clone, Debug, Error)]
1954#[non_exhaustive]
1955pub enum DestroyError {
1956    #[error("Resource is already destroyed")]
1957    AlreadyDestroyed,
1958    #[error(transparent)]
1959    InvalidResource(#[from] InvalidResourceError),
1960}
1961
1962pub type BlasDescriptor<'a> = wgt::CreateBlasDescriptor<Label<'a>>;
1963pub type TlasDescriptor<'a> = wgt::CreateTlasDescriptor<Label<'a>>;
1964
1965pub(crate) trait AccelerationStructure: Trackable {
1966    fn try_raw<'a>(
1967        &'a self,
1968        guard: &'a SnatchGuard,
1969    ) -> Result<&'a dyn hal::DynAccelerationStructure, DestroyedResourceError>;
1970}
1971
1972#[derive(Debug)]
1973pub struct Blas {
1974    pub(crate) raw: Snatchable<Box<dyn hal::DynAccelerationStructure>>,
1975    pub(crate) device: Arc<Device>,
1976    pub(crate) size_info: hal::AccelerationStructureBuildSizes,
1977    pub(crate) sizes: wgt::BlasGeometrySizeDescriptors,
1978    pub(crate) flags: wgt::AccelerationStructureFlags,
1979    pub(crate) update_mode: wgt::AccelerationStructureUpdateMode,
1980    pub(crate) built_index: RwLock<Option<NonZeroU64>>,
1981    pub(crate) handle: u64,
1982    /// The `label` from the descriptor used to create the resource.
1983    pub(crate) label: String,
1984    pub(crate) tracking_data: TrackingData,
1985}
1986
1987impl Drop for Blas {
1988    fn drop(&mut self) {
1989        resource_log!("Destroy raw {}", self.error_ident());
1990        // SAFETY: We are in the Drop impl, and we don't use self.raw anymore after this point.
1991        if let Some(raw) = self.raw.take() {
1992            unsafe {
1993                self.device.raw().destroy_acceleration_structure(raw);
1994            }
1995        }
1996    }
1997}
1998
1999impl AccelerationStructure for Blas {
2000    fn try_raw<'a>(
2001        &'a self,
2002        guard: &'a SnatchGuard,
2003    ) -> Result<&'a dyn hal::DynAccelerationStructure, DestroyedResourceError> {
2004        self.raw
2005            .get(guard)
2006            .map(|raw| raw.as_ref())
2007            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
2008    }
2009}
2010
2011crate::impl_resource_type!(Blas);
2012crate::impl_labeled!(Blas);
2013crate::impl_parent_device!(Blas);
2014crate::impl_storage_item!(Blas);
2015crate::impl_trackable!(Blas);
2016
2017#[derive(Debug)]
2018pub struct Tlas {
2019    pub(crate) raw: Snatchable<Box<dyn hal::DynAccelerationStructure>>,
2020    pub(crate) device: Arc<Device>,
2021    pub(crate) size_info: hal::AccelerationStructureBuildSizes,
2022    pub(crate) max_instance_count: u32,
2023    pub(crate) flags: wgt::AccelerationStructureFlags,
2024    pub(crate) update_mode: wgt::AccelerationStructureUpdateMode,
2025    pub(crate) built_index: RwLock<Option<NonZeroU64>>,
2026    pub(crate) dependencies: RwLock<Vec<Arc<Blas>>>,
2027    pub(crate) instance_buffer: ManuallyDrop<Box<dyn hal::DynBuffer>>,
2028    /// The `label` from the descriptor used to create the resource.
2029    pub(crate) label: String,
2030    pub(crate) tracking_data: TrackingData,
2031}
2032
2033impl Drop for Tlas {
2034    fn drop(&mut self) {
2035        unsafe {
2036            resource_log!("Destroy raw {}", self.error_ident());
2037            if let Some(structure) = self.raw.take() {
2038                self.device.raw().destroy_acceleration_structure(structure);
2039            }
2040            let buffer = ManuallyDrop::take(&mut self.instance_buffer);
2041            self.device.raw().destroy_buffer(buffer);
2042        }
2043    }
2044}
2045
2046impl AccelerationStructure for Tlas {
2047    fn try_raw<'a>(
2048        &'a self,
2049        guard: &'a SnatchGuard,
2050    ) -> Result<&'a dyn hal::DynAccelerationStructure, DestroyedResourceError> {
2051        self.raw
2052            .get(guard)
2053            .map(|raw| raw.as_ref())
2054            .ok_or_else(|| DestroyedResourceError(self.error_ident()))
2055    }
2056}
2057
2058crate::impl_resource_type!(Tlas);
2059crate::impl_labeled!(Tlas);
2060crate::impl_parent_device!(Tlas);
2061crate::impl_storage_item!(Tlas);
2062crate::impl_trackable!(Tlas);