1use crate::{device::Device, AllocatorAccess};
2use ash::vk;
3use bort_vma::AllocationCreateInfo;
4#[cfg(feature = "bytemuck")]
5use bytemuck::{NoUninit, Pod, PodCastError};
6use std::{error, fmt, mem, ptr, sync::Arc};
7
8pub struct MemoryAllocation {
12 inner: bort_vma::Allocation,
13 memory_type: vk::MemoryType,
14 size: vk::DeviceSize,
15
16 allocator_access: Arc<dyn AllocatorAccess>,
18}
19
20impl MemoryAllocation {
21 pub(crate) fn from_vma_allocation(
22 inner: bort_vma::Allocation,
23 allocator_access: Arc<dyn AllocatorAccess>,
24 ) -> Self {
25 let memory_info = allocator_access.vma_allocator().get_allocation_info(&inner);
26
27 let size = memory_info.size;
28
29 let physical_device_mem_props = allocator_access
30 .device()
31 .physical_device()
32 .memory_properties();
33
34 debug_assert!(memory_info.memory_type < physical_device_mem_props.memory_type_count);
35 let memory_type = physical_device_mem_props.memory_types[memory_info.memory_type as usize];
36
37 Self {
38 inner,
39 memory_type,
40 size,
41 allocator_access,
42 }
43 }
44
45 #[cfg(feature = "bytemuck")]
46 pub fn write_into_bytes<T>(
47 &mut self,
48 write_data: T,
49 allocation_offset: usize,
50 ) -> Result<(), MemoryError>
51 where
52 T: NoUninit,
53 {
54 let write_bytes = bytemuck::bytes_of(&write_data);
55 self.write_bytes(write_bytes, allocation_offset)
56 }
57
58 #[cfg(feature = "bytemuck")]
59 pub fn write_slice<T>(
60 &mut self,
61 write_data: &[T],
62 allocation_offset: usize,
63 ) -> Result<(), MemoryError>
64 where
65 T: NoUninit,
66 {
67 let write_bytes: &[u8] = bytemuck::try_cast_slice(write_data)?;
68 self.write_bytes(write_bytes, allocation_offset)
69 }
70
71 pub fn write_bytes(
72 &mut self,
73 write_bytes: &[u8],
74 allocation_offset: usize,
75 ) -> Result<(), MemoryError> {
76 let data_size = write_bytes.len();
77 self.check_memory_access_parameters(data_size, allocation_offset)?;
78
79 let offset_mapped_memory: *mut u8 =
80 unsafe { self.map_memory_with_offset_unchecked(allocation_offset)? };
81
82 let write_bytes_ptr = write_bytes.as_ptr();
83 unsafe {
84 ptr::copy_nonoverlapping(write_bytes_ptr, offset_mapped_memory, data_size);
85 }
86
87 let flush_res = self.flush_allocation(allocation_offset, data_size);
88 unsafe { self.unmap_memory() };
89 flush_res
90 }
91
92 pub fn write_struct<T>(
96 &mut self,
97 write_data: T,
98 allocation_offset: usize,
99 ) -> Result<(), MemoryError> {
100 let data_size = mem::size_of_val(&write_data);
101 self.check_memory_access_parameters(data_size, allocation_offset)?;
102
103 let offset_mapped_memory: *mut T =
104 unsafe { self.map_memory_with_offset_unchecked(allocation_offset)? };
105
106 unsafe { ptr::write::<T>(offset_mapped_memory, write_data) };
107
108 let flush_res = self.flush_allocation(allocation_offset, data_size);
109 unsafe { self.unmap_memory() };
110 flush_res
111 }
112
113 pub fn write_iter<I, T>(
118 &mut self,
119 write_data: I,
120 allocation_offset: usize,
121 ) -> Result<(), MemoryError>
122 where
123 I: IntoIterator<Item = T>,
124 I::IntoIter: ExactSizeIterator,
125 {
126 let write_data_iter = write_data.into_iter();
127 let item_size = mem::size_of::<T>();
128 let data_size = write_data_iter.len() * item_size;
129 self.check_memory_access_parameters(data_size, allocation_offset)?;
130
131 let mut offset_mapped_memory: *mut T =
132 unsafe { self.map_memory_with_offset_unchecked(allocation_offset)? };
133
134 for element in write_data_iter {
135 unsafe {
136 ptr::write::<T>(offset_mapped_memory, element);
137 offset_mapped_memory = offset_mapped_memory.offset(1);
138 }
139 }
140
141 let flush_res = self.flush_allocation(allocation_offset, data_size);
142 unsafe { self.unmap_memory() };
143 flush_res
144 }
145
146 #[cfg(feature = "bytemuck")]
147 pub fn read_vec<T>(
148 &mut self,
149 element_count: usize,
150 allocation_offset: usize,
151 ) -> Result<Vec<T>, MemoryError>
152 where
153 T: Pod,
154 {
155 let type_size = mem::size_of::<T>();
156 let data_size = type_size * element_count;
157 self.check_memory_access_parameters(data_size, allocation_offset)?;
158
159 let offset_mapped_memory: *mut T =
160 unsafe { self.map_memory_with_offset_unchecked(allocation_offset)? };
161
162 let mut output_vec = Vec::<T>::new();
163 output_vec.resize_with(element_count, T::zeroed);
164 let output_vec_ptr = output_vec.as_mut_ptr();
165
166 unsafe { ptr::copy_nonoverlapping(offset_mapped_memory, output_vec_ptr, element_count) };
167
168 unsafe { self.unmap_memory() };
169 Ok(output_vec)
170 }
171
172 pub fn read_struct<T>(&mut self, allocation_offset: usize) -> Result<T, MemoryError> {
176 let data_size = mem::size_of::<T>();
177 self.check_memory_access_parameters(data_size, allocation_offset)?;
178
179 let offset_mapped_memory: *mut T =
180 unsafe { self.map_memory_with_offset_unchecked(allocation_offset)? };
181
182 let read_data = unsafe { ptr::read::<T>(offset_mapped_memory) };
183
184 unsafe { self.unmap_memory() };
185 Ok(read_data)
186 }
187
188 fn check_memory_access_parameters(
189 &self,
190 data_size: usize,
191 allocation_offset: usize,
192 ) -> Result<(), MemoryError> {
193 let allocation_size = self.size as usize; let allocation_write_size = allocation_size.checked_sub(allocation_offset).ok_or(
195 MemoryError::AllocationOffsetTooBig {
196 allocation_size: self.size,
197 allocation_offset,
198 },
199 )?;
200
201 if data_size > allocation_write_size {
202 return Err(MemoryError::DataSizeTooBig {
203 data_size,
204 allocation_size: self.size,
205 allocation_offset,
206 });
207 }
208
209 Ok(())
210 }
211
212 pub unsafe fn map_memory(&mut self) -> Result<*mut u8, MemoryError> {
213 self.allocator_access
214 .vma_allocator()
215 .map_memory(&mut self.inner)
216 .map_err(|e| MemoryError::Mapping(e))
217 }
218
219 pub unsafe fn unmap_memory(&mut self) {
220 self.allocator_access
221 .vma_allocator()
222 .unmap_memory(&mut self.inner)
223 }
224
225 unsafe fn map_memory_with_offset_unchecked<T>(
226 &mut self,
227 allocation_offset: usize,
228 ) -> Result<*mut T, MemoryError> {
229 let mapped_memory = unsafe { self.map_memory() }?;
230 let offset_mapped_memory =
231 unsafe { mapped_memory.offset(allocation_offset as isize) } as *mut T;
232 Ok(offset_mapped_memory)
233 }
234
235 #[inline]
238 pub fn flush_allocation(
239 &mut self,
240 allocation_offset: usize,
241 data_size: usize,
242 ) -> Result<(), MemoryError> {
243 self.allocator_access
244 .vma_allocator()
245 .flush_allocation(&self.inner, allocation_offset, data_size)
246 .map_err(|e| MemoryError::Flushing(e))
247 }
248
249 #[inline]
253 pub fn inner(&self) -> &bort_vma::Allocation {
254 &self.inner
255 }
256
257 #[inline]
259 pub fn inner_mut(&mut self) -> &mut bort_vma::Allocation {
260 &mut self.inner
261 }
262
263 #[inline]
264 pub fn memory_type(&self) -> vk::MemoryType {
265 self.memory_type
266 }
267
268 #[inline]
270 pub fn allocator_access(&self) -> &Arc<dyn AllocatorAccess> {
271 &self.allocator_access
272 }
273
274 #[inline]
275 pub fn memory_property_flags(&self) -> vk::MemoryPropertyFlags {
276 self.memory_type.property_flags
277 }
278
279 #[inline]
280 pub fn device(&self) -> &Arc<Device> {
281 &self.allocator_access.device()
282 }
283}
284
285pub fn allocation_info_from_flags(
289 required_flags: vk::MemoryPropertyFlags,
290 preferred_flags: vk::MemoryPropertyFlags,
291) -> AllocationCreateInfo {
292 AllocationCreateInfo {
293 required_flags,
294 preferred_flags,
295 ..Default::default()
296 }
297}
298
299pub fn allocation_info_cpu_accessible() -> AllocationCreateInfo {
302 allocation_info_from_flags(
304 vk::MemoryPropertyFlags::HOST_VISIBLE,
305 vk::MemoryPropertyFlags::HOST_COHERENT,
306 )
307}
308
309#[derive(Debug, Clone)]
312pub enum MemoryError {
313 Mapping(vk::Result),
314 DataSizeTooBig {
315 data_size: usize,
316 allocation_size: vk::DeviceSize,
317 allocation_offset: usize,
318 },
319 AllocationOffsetTooBig {
320 allocation_size: vk::DeviceSize,
321 allocation_offset: usize,
322 },
323 Flushing(vk::Result),
324 #[cfg(feature = "bytemuck")]
325 PodCastError(PodCastError),
326}
327
328impl fmt::Display for MemoryError {
329 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
330 match self {
331 Self::Mapping(e) => write!(f, "failed map allocation memory: {}", e),
332 Self::DataSizeTooBig {
333 data_size,
334 allocation_size,
335 allocation_offset,
336 } => write!(
337 f,
338 "invalid data size access parameters: data size = {}, allocation size = {}, write offset = {}",
339 data_size, allocation_size, allocation_offset
340 ),
341 Self::AllocationOffsetTooBig {
342 allocation_size,
343 allocation_offset,
344 } => write!(f,
345 "allocation offset {} is larger than the allocated memory {}",
346 allocation_offset, allocation_size
347 ),
348 Self::Flushing(e) => write!(f, "failed to flush memory: {}", e),
349 #[cfg(feature = "bytemuck")]
350 Self::PodCastError(e) => write!(f, "slice cast failed: {}", e),
351 }
352 }
353}
354
355impl error::Error for MemoryError {
356 fn source(&self) -> Option<&(dyn error::Error + 'static)> {
357 match self {
358 Self::Mapping(e) => Some(e),
359 Self::DataSizeTooBig { .. } => None,
360 Self::AllocationOffsetTooBig { .. } => None,
361 Self::Flushing(e) => Some(e),
362 #[cfg(feature = "bytemuck")]
363 Self::PodCastError(e) => Some(e),
364 }
365 }
366}
367
368#[cfg(feature = "bytemuck")]
369impl From<PodCastError> for MemoryError {
370 fn from(e: PodCastError) -> Self {
371 Self::PodCastError(e)
372 }
373}