1
2use crate::prelude::*;
3use std::{
4 any::Any,
5 ffi::c_void,
6 fmt::{self, Debug, Formatter},
7 mem::size_of,
8 ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
9 slice::from_raw_parts_mut,
10 sync::{
11 Arc,
12 RwLock,
13 RwLockWriteGuard,
14 },
15 vec::IntoIter,
16};
17
18pub struct Buffer {
20 pub device: Arc<VulkanDevice>,
22
23 pub buffer: Arc<VulkanBuffer>,
25
26 pub memory: Arc<VulkanMemory>,
28
29 pub(crate) usage: VkBufferUsageFlags,
31
32 pub staging_buffer: RwLock<Option<StagingBuffer>>,
34}
35
36impl Buffer {
37 pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize, data: Option<*const c_void>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
40 let buffer = Arc::new(VulkanBuffer::new(device.clone(), size, usage | VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_DST_BIT as VkBufferUsageFlags)?);
41 let memory = Arc::new(VulkanMemory::new(device.clone(), &buffer.get_memory_requirements()?,
42 VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT as VkMemoryPropertyFlags)?);
43 memory.bind_vk_buffer(buffer.get_vk_buffer())?;
44 let ret = Self {
45 device,
46 memory,
47 buffer,
48 usage,
49 staging_buffer: RwLock::new(None),
50 };
51 if let Some(data) = data {
52 unsafe {ret.set_staging_data(data, 0, size as usize)?};
53 }
54 Ok(ret)
55 }
56
57 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
59 self.buffer.get_vk_buffer()
60 }
61
62 pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
64 let mut lock = self.staging_buffer.write().unwrap();
65 if lock.is_none() {
66 *lock = Some(StagingBuffer::new(self.device.clone(), self.buffer.get_size())?);
67 }
68 Ok(lock)
69 }
70
71 pub fn discard_staging_buffer(&self) {
73 let mut lock = self.staging_buffer.write().unwrap();
74 *lock = None;
75 }
76
77 pub fn get_usage(&self) -> VkBufferUsageFlags {
79 self.usage
80 }
81
82 pub fn get_size(&self) -> VkDeviceSize {
84 self.buffer.get_size()
85 }
86
87 pub fn map_staging_buffer_as_slice_locked<'a, T>(&'a self) -> Result<BufferMapGuard<'a, T>, VulkanError>
89 where
90 T: Sized + Clone + Copy {
91 BufferMapGuard::new(self.ensure_staging_buffer()?, self.get_size() as usize)
92 }
93
94 pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
96 let lock = self.ensure_staging_buffer()?;
97 Ok(lock.as_ref().unwrap().get_address())
98 }
99
100 pub unsafe fn set_staging_data(&self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
106 let lock = self.ensure_staging_buffer()?;
107 lock.as_ref().unwrap().set_data(data, offset, size)?;
108 Ok(())
109 }
110
111 pub unsafe fn get_staging_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
117 let lock = self.staging_buffer.read().unwrap();
118 if let Some(ref staging_buffer) = *lock {
119 staging_buffer.get_data(data, offset, size)
120 } else {
121 Err(VulkanError::NoStagingBuffer)
122 }
123 }
124
125 pub fn upload_staging_buffer(&self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
127 let lock = self.staging_buffer.read().unwrap();
128 if let Some(ref staging_buffer) = *lock {
129 let copy_region = VkBufferCopy {
130 srcOffset: offset,
131 dstOffset: offset,
132 size: size as VkDeviceSize,
133 };
134 self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), 1, ©_region)?;
135 Ok(())
136 } else {
137 Err(VulkanError::NoStagingBuffer)
138 }
139 }
140
141 pub fn upload_staging_buffer_multi(&self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
143 let lock = self.staging_buffer.read().unwrap();
144 if let Some(ref staging_buffer) = *lock {
145 let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
146 srcOffset: r.offset,
147 dstOffset: r.offset,
148 size: r.size as VkDeviceSize,
149 }).collect();
150 self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
151 Ok(())
152 } else {
153 Err(VulkanError::NoStagingBuffer)
154 }
155 }
156
157 pub fn download_staging_buffer(&self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
159 let lock = self.ensure_staging_buffer()?;
160 let copy_region = VkBufferCopy {
161 srcOffset: offset,
162 dstOffset: offset,
163 size: size as VkDeviceSize,
164 };
165 self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), lock.as_ref().unwrap().get_vk_buffer(), 1, ©_region)?;
166 Ok(())
167 }
168
169 pub fn download_staging_buffer_multi(&self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
171 let lock = self.ensure_staging_buffer()?;
172 let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
173 srcOffset: r.offset,
174 dstOffset: r.offset,
175 size: r.size as VkDeviceSize,
176 }).collect();
177 self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), lock.as_ref().unwrap().get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
178 Ok(())
179 }
180
181 pub fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
183 VulkanBufferView::new(self.buffer.clone(), format)
184 }
185
186 pub fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
188 VulkanBufferView::new_partial(self.buffer.clone(), range)
189 }
190}
191
192impl Clone for Buffer {
193 fn clone(&self) -> Self {
194 Self::new(self.device.clone(), self.get_size(), self.staging_buffer.read().unwrap().as_ref().map(|b|b.get_address() as *const _), self.usage).unwrap()
195 }
196}
197
198impl Debug for Buffer {
199 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
200 f.debug_struct("Buffer")
201 .field("memory", &self.memory)
202 .field("buffer", &self.buffer)
203 .field("usage", &self.usage)
204 .field("staging_buffer", &self.staging_buffer)
205 .finish()
206 }
207}
208
209
210#[derive(Debug)]
212pub struct BufferMapGuard<'a, T>
213where
214 T: Sized + Clone + Copy {
215 lock_guard: RwLockWriteGuard<'a, Option<StagingBuffer>>,
217
218 slice: &'a mut [T],
220}
221
222impl<'a, T> BufferMapGuard<'a, T>
223where
224 T: Sized + Clone + Copy {
225 pub fn new(lock_guard: RwLockWriteGuard<'a, Option<StagingBuffer>>, size: usize) -> Result<Self, VulkanError> {
226 let address = lock_guard.as_ref().unwrap().get_address();
227 let len = size / size_of::<T>();
228 let slice = unsafe {from_raw_parts_mut(address as *mut T, len)};
229 Ok(Self {
230 lock_guard,
231 slice,
232 })
233 }
234
235 pub fn as_slice(&self) -> &[T] {
237 self.slice
238 }
239
240 pub fn as_slice_mut(&mut self) -> &mut [T] {
242 self.slice
243 }
244}
245
246impl<'a, T> Index<usize> for BufferMapGuard<'a, T>
247where
248 T: Sized + Clone + Copy {
249 type Output = T;
250 fn index(&self, index: usize) -> &T {
251 &self.slice[index]
252 }
253}
254
255impl<'a, T> IndexMut<usize> for BufferMapGuard<'a, T>
256where
257 T: Sized + Clone + Copy {
258 fn index_mut(&mut self, index: usize) -> &mut T {
259 &mut self.slice[index]
260 }
261}
262
263impl<'a, T> Index<Range<usize>> for BufferMapGuard<'a, T>
264where
265 T: Sized + Clone + Copy {
266 type Output = [T];
267 fn index(&self, range: Range<usize>) -> &[T] {
268 &self.slice[range.start..range.end]
269 }
270}
271
272impl<'a, T> IndexMut<Range<usize>> for BufferMapGuard<'a, T>
273where
274 T: Sized + Clone + Copy {
275 fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
276 &mut self.slice[range.start..range.end]
277 }
278}
279
280impl<'a, T> Index<RangeFrom<usize>> for BufferMapGuard<'a, T>
281where
282 T: Sized + Clone + Copy {
283 type Output = [T];
284 fn index(&self, range: RangeFrom<usize>) -> &[T] {
285 &self.slice[range.start..]
286 }
287}
288
289impl<'a, T> IndexMut<RangeFrom<usize>> for BufferMapGuard<'a, T>
290where
291 T: Sized + Clone + Copy {
292 fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
293 &mut self.slice[range.start..]
294 }
295}
296
297impl<'a, T> Index<RangeTo<usize>> for BufferMapGuard<'a, T>
298where
299 T: Sized + Clone + Copy {
300 type Output = [T];
301 fn index(&self, range: RangeTo<usize>) -> &[T] {
302 &self.slice[..range.end]
303 }
304}
305
306impl<'a, T> IndexMut<RangeTo<usize>> for BufferMapGuard<'a, T>
307where
308 T: Sized + Clone + Copy {
309 fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
310 &mut self.slice[..range.end]
311 }
312}
313
314impl<'a, T> Index<RangeFull> for BufferMapGuard<'a, T>
315where
316 T: Sized + Clone + Copy {
317 type Output = [T];
318 fn index(&self, _: RangeFull) -> &[T] {
319 &self.slice[..]
320 }
321}
322
323impl<'a, T> IndexMut<RangeFull> for BufferMapGuard<'a, T>
324where
325 T: Sized + Clone + Copy {
326 fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
327 &mut self.slice[..]
328 }
329}
330
331impl<'a, T> Index<RangeInclusive<usize>> for BufferMapGuard<'a, T>
332where
333 T: Sized + Clone + Copy {
334 type Output = [T];
335 fn index(&self, range: RangeInclusive<usize>) -> &[T] {
336 &self.slice[*range.start()..=*range.end()]
337 }
338}
339
340impl<'a, T> IndexMut<RangeInclusive<usize>> for BufferMapGuard<'a, T>
341where
342 T: Sized + Clone + Copy {
343 fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
344 &mut self.slice[*range.start()..=*range.end()]
345 }
346}
347
348impl<'a, T> Index<RangeToInclusive<usize>> for BufferMapGuard<'a, T>
349where
350 T: Sized + Clone + Copy {
351 type Output = [T];
352 fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
353 &self.slice[..=range.end]
354 }
355}
356
357impl<'a, T> IndexMut<RangeToInclusive<usize>> for BufferMapGuard<'a, T>
358where
359 T: Sized + Clone + Copy {
360 fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
361 &mut self.slice[..=range.end]
362 }
363}
364
365pub trait UniformStructType: Copy + Clone + Sized + Default + Send + Sync + Debug + FFIStruct + Any {}
367impl<T> UniformStructType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + FFIStruct + Any {}
368
369#[macro_export]
370macro_rules! derive_uniform_buffer_type {
371 ($item: item) => {
372 #[ffi_struct]
373 #[derive(Default, Debug, Clone, Copy)]
374 #[size_of_type (Vec1 = 4, Vec2 = 8, Vec3 = 12, Vec4 = 16)]
375 #[align_of_type(Vec1 = 4, Vec2 = 8, Vec3 = 16, Vec4 = 16)]
376 #[size_of_type (Mat1 = 4, Mat2 = 16, Mat3 = 48, Mat4 = 64)]
377 #[align_of_type(Mat1 = 4, Mat2 = 8, Mat3 = 16, Mat4 = 16)]
378 #[size_of_type (Mat1x1 = 4, Mat2x2 = 16, Mat3x3 = 48, Mat4x4 = 64)]
379 #[align_of_type(Mat1x1 = 4, Mat2x2 = 8, Mat3x3 = 16, Mat4x4 = 16)]
380 #[size_of_type (Mat1x2 = 8, Mat1x3 = 12, Mat1x4 = 16)]
381 #[align_of_type(Mat1x2 = 4, Mat1x3 = 4, Mat1x4 = 4)]
382 #[size_of_type (Mat2x1 = 8, Mat2x3 = 24, Mat2x4 = 32)]
383 #[align_of_type(Mat2x1 = 8, Mat2x3 = 8, Mat2x4 = 8)]
384 #[size_of_type (Mat2x3 = 24, Mat2x4 = 32)]
385 #[align_of_type(Mat2x3 = 8, Mat2x4 = 8)]
386 #[size_of_type (Mat3x2 = 32, Mat3x4 = 64)]
387 #[align_of_type(Mat3x2 = 16, Mat3x4 = 16)]
388 #[size_of_type (Mat4x2 = 32, Mat4x3 = 48)]
389 #[align_of_type(Mat4x2 = 16, Mat4x3 = 16)]
390 #[size_of_type (DVec1 = 8, DVec2 = 16, DVec3 = 24, DVec4 = 32)]
391 #[align_of_type(DVec1 = 8, DVec2 = 16, DVec3 = 32, DVec4 = 32)]
392 #[size_of_type (DMat1 = 8, DMat2 = 32, DMat3 = 96, DMat4 = 128)]
393 #[align_of_type(DMat1 = 8, DMat2 = 16, DMat3 = 32, DMat4 = 32)]
394 #[size_of_type (DMat1x1 = 8, DMat2x2 = 32, DMat3x3 = 96, DMat4x4 = 128)]
395 #[align_of_type(DMat1x1 = 8, DMat2x2 = 16, DMat3x3 = 32, DMat4x4 = 32)]
396 #[size_of_type (DMat1x2 = 16, DMat1x3 = 24, DMat1x4 = 32)]
397 #[align_of_type(DMat1x2 = 8, DMat1x3 = 8, DMat1x4 = 8)]
398 #[size_of_type (DMat2x1 = 16, DMat2x3 = 48, DMat2x4 = 64)]
399 #[align_of_type(DMat2x1 = 16, DMat2x3 = 16, DMat2x4 = 16)]
400 #[size_of_type (DMat2x3 = 48, DMat2x4 = 64)]
401 #[align_of_type(DMat2x3 = 16, DMat2x4 = 32)]
402 #[size_of_type (DMat3x2 = 64, DMat3x4 = 128)]
403 #[align_of_type(DMat3x2 = 32, DMat3x4 = 32)]
404 #[size_of_type (DMat4x2 = 64, DMat4x3 = 96)]
405 #[align_of_type(DMat4x2 = 32, DMat4x3 = 32)]
406 $item
407 };
408}
409
410#[derive(Debug, Clone)]
412pub struct UniformBuffer<U>
413where
414 U: UniformStructType {
415 pub buffer: Buffer,
417
418 iterable: U,
420}
421
422impl<U> UniformBuffer<U>
423where
424 U: UniformStructType {
425 pub fn new(device: Arc<VulkanDevice>, initial_value: Option<U>) -> Result<Self, VulkanError> {
427 let def = initial_value.unwrap_or_default();
428 let buffer = Buffer::new(device.clone(), size_of::<U>() as VkDeviceSize, Some(&def as *const U as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT as VkBufferUsageFlags)?;
429 Ok(Self {
430 buffer,
431 iterable: U::default(),
432 })
433 }
434
435 pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
437 self.buffer.ensure_staging_buffer()
438 }
439
440 pub fn discard_staging_buffer(&self) {
442 self.buffer.discard_staging_buffer()
443 }
444
445 pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
447 self.buffer.get_staging_buffer_address()
448 }
449
450 pub fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
452 self.buffer.upload_staging_buffer(cmdbuf, 0, self.buffer.get_size())
453 }
454}
455
456impl<U> AsRef<U> for UniformBuffer<U>
457where
458 U: UniformStructType {
459 fn as_ref(&self) -> &U {
460 unsafe{&*(self.get_staging_buffer_address().unwrap() as *const U)}
461 }
462}
463
464impl<U> AsMut<U> for UniformBuffer<U>
465where
466 U: UniformStructType {
467 fn as_mut(&mut self) -> &mut U {
468 unsafe{&mut *(self.get_staging_buffer_address().unwrap() as *mut U)}
469 }
470}
471
472unsafe impl<U> Send for UniformBuffer<U> where U: UniformStructType {}
473unsafe impl<U> Sync for UniformBuffer<U> where U: UniformStructType {}
474
475pub trait GenericUniformBuffer: Debug + Any + Send + Sync {
477 fn get_vk_buffer(&self) -> VkBuffer;
479
480 fn iter_members(&self) -> IntoIter<(&'static str, MemberInfo)>;
482
483 fn get_size(&self) -> VkDeviceSize;
485
486 fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError>;
488
489 fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
491}
492
493impl<U> GenericUniformBuffer for UniformBuffer<U>
494where
495 U: UniformStructType {
496 fn get_vk_buffer(&self) -> VkBuffer {
497 self.buffer.get_vk_buffer()
498 }
499
500 fn iter_members(&self) -> IntoIter<(&'static str, MemberInfo)> {
501 self.iterable.iter_members()
502 }
503
504 fn get_size(&self) -> VkDeviceSize {
505 self.buffer.get_size()
506 }
507
508 fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
509 self.get_staging_buffer_address()
510 }
511
512 fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
513 self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
514 }
515}
516
517pub trait GenericStorageBuffer: Debug + Any + Send + Sync {
519 fn get_vk_buffer(&self) -> VkBuffer;
521
522 fn iter_members(&self) -> IntoIter<(&'static str, MemberInfo)>;
524
525 fn get_size(&self) -> VkDeviceSize;
527
528 fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError>;
530
531 fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
533}
534
535pub trait StorageBufferStructType: Copy + Clone + Sized + Default + Send + Sync + Debug + FFIStruct + Any {}
537impl<T> StorageBufferStructType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + FFIStruct + Any {}
538
539#[macro_export]
540macro_rules! derive_storage_buffer_type {
541 ($item: item) => {
542 #[repr(C)]
543 #[derive(Default, Debug, Clone, Copy)]
544 #[size_of_type (Vec1 = 4, Vec2 = 8, Vec3 = 12, Vec4 = 16)]
545 #[align_of_type(Vec1 = 4, Vec2 = 4, Vec3 = 4, Vec4 = 4)]
546 #[size_of_type (Mat1 = 4, Mat2 = 16, Mat3 = 36, Mat4 = 64)]
547 #[align_of_type(Mat1 = 4, Mat2 = 4, Mat3 = 4, Mat4 = 4)]
548 #[size_of_type (Mat1x1 = 4, Mat2x2 = 16, Mat3x3 = 36, Mat4x4 = 64)]
549 #[align_of_type(Mat1x1 = 4, Mat2x2 = 4, Mat3x3 = 4, Mat4x4 = 4)]
550 #[size_of_type (Mat1x2 = 8, Mat1x3 = 12, Mat1x4 = 16)]
551 #[align_of_type(Mat1x2 = 4, Mat1x3 = 4, Mat1x4 = 4)]
552 #[size_of_type (Mat2x1 = 8, Mat2x3 = 24, Mat2x4 = 32)]
553 #[align_of_type(Mat2x1 = 4, Mat2x3 = 4, Mat2x4 = 4)]
554 #[size_of_type (Mat2x3 = 24, Mat2x4 = 32)]
555 #[align_of_type(Mat2x3 = 4, Mat2x4 = 4)]
556 #[size_of_type (Mat3x2 = 24, Mat3x4 = 48)]
557 #[align_of_type(Mat3x2 = 4, Mat3x4 = 4)]
558 #[size_of_type (Mat4x2 = 32, Mat4x3 = 48)]
559 #[align_of_type(Mat4x2 = 4, Mat4x3 = 4)]
560 #[size_of_type (DVec1 = 8, DVec2 = 16, DVec3 = 24, DVec4 = 32)]
561 #[align_of_type(DVec1 = 4, DVec2 = 4, DVec3 = 4, DVec4 = 4)]
562 #[size_of_type (DMat1 = 8, DMat2 = 32, DMat3 = 72, DMat4 = 128)]
563 #[align_of_type(DMat1 = 4, DMat2 = 4, DMat3 = 4, DMat4 = 4)]
564 #[size_of_type (DMat1x1 = 8, DMat2x2 = 32, DMat3x3 = 72, DMat4x4 = 128)]
565 #[align_of_type(DMat1x1 = 4, DMat2x2 = 4, DMat3x3 = 4, DMat4x4 = 4)]
566 #[size_of_type (DMat1x2 = 16, DMat1x3 = 24, DMat1x4 = 32)]
567 #[align_of_type(DMat1x2 = 4, DMat1x3 = 4, DMat1x4 = 4)]
568 #[size_of_type (DMat2x1 = 16, DMat2x3 = 48, DMat2x4 = 64)]
569 #[align_of_type(DMat2x1 = 4, DMat2x3 = 4, DMat2x4 = 4)]
570 #[size_of_type (DMat2x3 = 48, DMat2x4 = 64)]
571 #[align_of_type(DMat2x3 = 4, DMat2x4 = 4)]
572 #[size_of_type (DMat3x2 = 48, DMat3x4 = 96)]
573 #[align_of_type(DMat3x2 = 4, DMat3x4 = 4)]
574 #[size_of_type (DMat4x2 = 64, DMat4x3 = 96)]
575 #[align_of_type(DMat4x2 = 4, DMat4x3 = 4)]
576 #[size_of_type (bool = 4)]
577 #[align_of_type(bool = 4)]
578 $item
579 };
580}
581
582#[derive(Debug, Clone)]
584pub struct StorageBuffer<S>
585where
586 S: StorageBufferStructType {
587 pub buffer: Buffer,
589
590 iterable: S,
592}
593
594impl<S> StorageBuffer<S>
595where
596 S: StorageBufferStructType {
597 pub fn new(device: Arc<VulkanDevice>, initial_value: Option<S>) -> Result<Self, VulkanError> {
599 let def = initial_value.unwrap_or_default();
600 let buffer = Buffer::new(device.clone(), size_of::<S>() as VkDeviceSize, Some(&def as *const S as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT as VkBufferUsageFlags)?;
601 Ok(Self {
602 buffer,
603 iterable: S::default(),
604 })
605 }
606
607 pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
609 self.buffer.ensure_staging_buffer()
610 }
611
612 pub fn discard_staging_buffer(&self) {
614 self.buffer.discard_staging_buffer()
615 }
616
617 pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
619 self.buffer.get_staging_buffer_address()
620 }
621
622 pub fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
624 self.buffer.upload_staging_buffer(cmdbuf, 0, self.buffer.get_size())
625 }
626}
627
628impl<S> AsRef<S> for StorageBuffer<S>
629where
630 S: StorageBufferStructType {
631 fn as_ref(&self) -> &S {
632 unsafe{&*(self.get_staging_buffer_address().unwrap() as *const S)}
633 }
634}
635
636impl<S> AsMut<S> for StorageBuffer<S>
637where
638 S: StorageBufferStructType {
639 fn as_mut(&mut self) -> &mut S {
640 unsafe{&mut *(self.get_staging_buffer_address().unwrap() as *mut S)}
641 }
642}
643
644unsafe impl<S> Send for StorageBuffer<S> where S: StorageBufferStructType {}
645unsafe impl<S> Sync for StorageBuffer<S> where S: StorageBufferStructType {}
646
647impl<S> GenericStorageBuffer for StorageBuffer<S>
648where
649 S: StorageBufferStructType {
650 fn get_vk_buffer(&self) -> VkBuffer {
651 self.buffer.get_vk_buffer()
652 }
653
654 fn iter_members(&self) -> IntoIter<(&'static str, MemberInfo)> {
655 self.iterable.iter_members()
656 }
657
658 fn get_size(&self) -> VkDeviceSize {
659 self.buffer.get_size()
660 }
661
662 fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
663 self.get_staging_buffer_address()
664 }
665
666 fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
667 self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
668 }
669}
670
671#[macro_export]
672macro_rules! get_generic_uniform_buffer_cache {
673 ($gub:expr,$t:ty) => (&mut *($gub.get_staging_buffer_address()? as *mut $t))
674}
675
676#[macro_export]
677macro_rules! get_generic_storage_buffer_cache {
678 ($gsb:expr,$t:ty) => (&mut *($gsb.get_staging_buffer_address()? as *mut $t))
679}