use std::{
error, fmt,
ops::{Bound, Deref, DerefMut, Range, RangeBounds},
sync::Arc,
};
use parking_lot::Mutex;
use crate::*;
#[derive(Debug, Clone)]
pub struct Buffer {
pub(crate) inner: dispatch::DispatchBuffer,
pub(crate) map_context: Arc<Mutex<MapContext>>,
pub(crate) size: wgt::BufferAddress,
pub(crate) usage: BufferUsages,
}
#[cfg(send_sync)]
static_assertions::assert_impl_all!(Buffer: Send, Sync);
crate::cmp::impl_eq_ord_hash_proxy!(Buffer => .inner);
impl Buffer {
pub fn as_entire_binding(&self) -> BindingResource<'_> {
BindingResource::Buffer(self.as_entire_buffer_binding())
}
pub fn as_entire_buffer_binding(&self) -> BufferBinding<'_> {
BufferBinding {
buffer: self,
offset: 0,
size: None,
}
}
#[cfg(wgpu_core)]
pub unsafe fn as_hal<A: wgc::hal_api::HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
&self,
hal_buffer_callback: F,
) -> R {
if let Some(buffer) = self.inner.as_core_opt() {
unsafe {
buffer
.context
.buffer_as_hal::<A, F, R>(buffer, hal_buffer_callback)
}
} else {
hal_buffer_callback(None)
}
}
pub fn slice<S: RangeBounds<BufferAddress>>(&self, bounds: S) -> BufferSlice<'_> {
let (offset, size) = range_to_offset_size(bounds);
check_buffer_bounds(self.size, offset, size);
BufferSlice {
buffer: self,
offset,
size,
}
}
pub fn unmap(&self) {
self.map_context.lock().reset();
self.inner.unmap();
}
pub fn destroy(&self) {
self.inner.destroy();
}
pub fn size(&self) -> BufferAddress {
self.size
}
pub fn usage(&self) -> BufferUsages {
self.usage
}
}
#[derive(Copy, Clone, Debug)]
pub struct BufferSlice<'a> {
pub(crate) buffer: &'a Buffer,
pub(crate) offset: BufferAddress,
pub(crate) size: Option<BufferSize>,
}
#[cfg(send_sync)]
static_assertions::assert_impl_all!(BufferSlice<'_>: Send, Sync);
impl<'a> BufferSlice<'a> {
pub fn map_async(
&self,
mode: MapMode,
callback: impl FnOnce(Result<(), BufferAsyncError>) + WasmNotSend + 'static,
) {
let mut mc = self.buffer.map_context.lock();
assert_eq!(mc.initial_range, 0..0, "Buffer is already mapped");
let end = match self.size {
Some(s) => self.offset + s.get(),
None => mc.total_size,
};
mc.initial_range = self.offset..end;
self.buffer
.inner
.map_async(mode, self.offset..end, Box::new(callback));
}
pub fn get_mapped_range(&self) -> BufferView<'a> {
let end = self.buffer.map_context.lock().add(self.offset, self.size);
let range = self.buffer.inner.get_mapped_range(self.offset..end);
BufferView {
slice: *self,
inner: range,
}
}
#[cfg(webgpu)]
pub fn get_mapped_range_as_array_buffer(&self) -> Option<js_sys::ArrayBuffer> {
let end = self.buffer.map_context.lock().add(self.offset, self.size);
self.buffer
.inner
.get_mapped_range_as_array_buffer(self.offset..end)
}
pub fn get_mapped_range_mut(&self) -> BufferViewMut<'a> {
let end = self.buffer.map_context.lock().add(self.offset, self.size);
let range = self.buffer.inner.get_mapped_range(self.offset..end);
BufferViewMut {
slice: *self,
inner: range,
readable: self.buffer.usage.contains(BufferUsages::MAP_READ),
}
}
}
#[derive(Debug)]
pub(crate) struct MapContext {
pub(crate) total_size: BufferAddress,
pub(crate) initial_range: Range<BufferAddress>,
sub_ranges: Vec<Range<BufferAddress>>,
}
impl MapContext {
pub(crate) fn new(total_size: BufferAddress) -> Self {
Self {
total_size,
initial_range: 0..0,
sub_ranges: Vec::new(),
}
}
fn reset(&mut self) {
self.initial_range = 0..0;
assert!(
self.sub_ranges.is_empty(),
"You cannot unmap a buffer that still has accessible mapped views"
);
}
fn add(&mut self, offset: BufferAddress, size: Option<BufferSize>) -> BufferAddress {
let end = match size {
Some(s) => offset + s.get(),
None => self.initial_range.end,
};
assert!(self.initial_range.start <= offset && end <= self.initial_range.end);
for sub in self.sub_ranges.iter() {
assert!(
end <= sub.start || offset >= sub.end,
"Intersecting map range with {sub:?}"
);
}
self.sub_ranges.push(offset..end);
end
}
fn remove(&mut self, offset: BufferAddress, size: Option<BufferSize>) {
let end = match size {
Some(s) => offset + s.get(),
None => self.initial_range.end,
};
let index = self
.sub_ranges
.iter()
.position(|r| *r == (offset..end))
.expect("unable to remove range from map context");
self.sub_ranges.swap_remove(index);
}
}
pub type BufferDescriptor<'a> = wgt::BufferDescriptor<Label<'a>>;
static_assertions::assert_impl_all!(BufferDescriptor<'_>: Send, Sync);
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BufferAsyncError;
static_assertions::assert_impl_all!(BufferAsyncError: Send, Sync);
impl fmt::Display for BufferAsyncError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Error occurred when trying to async map a buffer")
}
}
impl error::Error for BufferAsyncError {}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum MapMode {
Read,
Write,
}
static_assertions::assert_impl_all!(MapMode: Send, Sync);
#[derive(Debug)]
pub struct BufferView<'a> {
slice: BufferSlice<'a>,
inner: dispatch::DispatchBufferMappedRange,
}
impl std::ops::Deref for BufferView<'_> {
type Target = [u8];
#[inline]
fn deref(&self) -> &[u8] {
self.inner.slice()
}
}
impl AsRef<[u8]> for BufferView<'_> {
#[inline]
fn as_ref(&self) -> &[u8] {
self.inner.slice()
}
}
#[derive(Debug)]
pub struct BufferViewMut<'a> {
slice: BufferSlice<'a>,
inner: dispatch::DispatchBufferMappedRange,
readable: bool,
}
impl AsMut<[u8]> for BufferViewMut<'_> {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
self.inner.slice_mut()
}
}
impl Deref for BufferViewMut<'_> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
if !self.readable {
log::warn!("Reading from a BufferViewMut is slow and not recommended.");
}
self.inner.slice()
}
}
impl DerefMut for BufferViewMut<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.inner.slice_mut()
}
}
impl Drop for BufferView<'_> {
fn drop(&mut self) {
self.slice
.buffer
.map_context
.lock()
.remove(self.slice.offset, self.slice.size);
}
}
impl Drop for BufferViewMut<'_> {
fn drop(&mut self) {
self.slice
.buffer
.map_context
.lock()
.remove(self.slice.offset, self.slice.size);
}
}
fn check_buffer_bounds(
buffer_size: BufferAddress,
offset: BufferAddress,
size: Option<BufferSize>,
) {
if offset >= buffer_size {
panic!(
"slice offset {} is out of range for buffer of size {}",
offset, buffer_size
);
}
if let Some(size) = size {
let end = offset.checked_add(size.get());
if end.map_or(true, |end| end > buffer_size) {
panic!(
"slice offset {} size {} is out of range for buffer of size {}",
offset, size, buffer_size
);
}
}
}
fn range_to_offset_size<S: RangeBounds<BufferAddress>>(
bounds: S,
) -> (BufferAddress, Option<BufferSize>) {
let offset = match bounds.start_bound() {
Bound::Included(&bound) => bound,
Bound::Excluded(&bound) => bound + 1,
Bound::Unbounded => 0,
};
let size = match bounds.end_bound() {
Bound::Included(&bound) => Some(bound + 1 - offset),
Bound::Excluded(&bound) => Some(bound - offset),
Bound::Unbounded => None,
}
.map(|size| BufferSize::new(size).expect("Buffer slices can not be empty"));
(offset, size)
}
#[cfg(test)]
mod tests {
use super::{check_buffer_bounds, range_to_offset_size, BufferSize};
#[test]
fn range_to_offset_size_works() {
assert_eq!(range_to_offset_size(0..2), (0, BufferSize::new(2)));
assert_eq!(range_to_offset_size(2..5), (2, BufferSize::new(3)));
assert_eq!(range_to_offset_size(..), (0, None));
assert_eq!(range_to_offset_size(21..), (21, None));
assert_eq!(range_to_offset_size(0..), (0, None));
assert_eq!(range_to_offset_size(..21), (0, BufferSize::new(21)));
}
#[test]
#[should_panic]
fn range_to_offset_size_panics_for_empty_range() {
range_to_offset_size(123..123);
}
#[test]
#[should_panic]
fn range_to_offset_size_panics_for_unbounded_empty_range() {
range_to_offset_size(..0);
}
#[test]
#[should_panic]
fn check_buffer_bounds_panics_for_offset_at_size() {
check_buffer_bounds(100, 100, None);
}
#[test]
fn check_buffer_bounds_works_for_end_in_range() {
check_buffer_bounds(200, 100, BufferSize::new(50));
check_buffer_bounds(200, 100, BufferSize::new(100));
check_buffer_bounds(u64::MAX, u64::MAX - 100, BufferSize::new(100));
check_buffer_bounds(u64::MAX, 0, BufferSize::new(u64::MAX));
check_buffer_bounds(u64::MAX, 1, BufferSize::new(u64::MAX - 1));
}
#[test]
#[should_panic]
fn check_buffer_bounds_panics_for_end_over_size() {
check_buffer_bounds(200, 100, BufferSize::new(101));
}
#[test]
#[should_panic]
fn check_buffer_bounds_panics_for_end_wraparound() {
check_buffer_bounds(u64::MAX, 1, BufferSize::new(u64::MAX));
}
}