1
2#![allow(clippy::uninit_vec)]
3#![allow(clippy::too_many_arguments)]
4use crate::prelude::*;
5use std::{
6 ffi::c_void,
7 fmt::{self, Debug, Formatter},
8 io::{self, ErrorKind},
9 marker::PhantomData,
10 mem::{MaybeUninit, size_of},
11 ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
12 ptr::{copy, null, null_mut},
13 slice::from_raw_parts_mut,
14 sync::{
15 Arc,
16 Mutex,
17 },
18};
19
20#[derive(Debug, Clone)]
22pub enum VulkanError {
23 VkError(VkError),
24 IOError((String, ErrorKind)),
25 ChooseGpuFailed,
26 NoGoodQueueForSurface(&'static str),
27 NoGoodDepthStencilFormat,
28 CommandPoolIsInUse,
29 NoIdleCommandPools,
30 NoIdleDeviceQueues,
31 NoSuitableMemoryType,
32 NoStagingBuffer,
33 ImageTypeSizeNotMatch(String),
34 ImagePixelFormatNotSupported,
35 ShaderCompilationError(String),
36 ShaderParseIdUnknown(String),
37 ShaderParseTypeUnknown(String),
38 ShaderParseError(Arc<rspirv::binary::ParseState>),
39 MissingShaderInputs(String),
40 ShaderInputTypeMismatch(String),
41 ShaderInputLengthMismatch(String),
42 ShaderInputTypeUnsupported(String),
43}
44
45impl From<VkError> for VulkanError {
46 fn from(e: VkError) -> Self {
47 Self::VkError(e)
48 }
49}
50
51impl From<io::Error> for VulkanError {
52 fn from(e: io::Error) -> Self {
53 Self::IOError((format!("{e:?}"), e.kind()))
54 }
55}
56
57impl From<rspirv::binary::ParseState> for VulkanError {
58 fn from(s: rspirv::binary::ParseState) -> Self {
59 Self::ShaderParseError(Arc::new(s))
60 }
61}
62
63#[cfg(feature = "shaderc")]
64impl From<shaderc::Error> for VulkanError {
65 fn from(e: shaderc::Error) -> Self {
66 match e {
67 shaderc::Error::CompilationError(_, desc) => Self::ShaderCompilationError(desc),
68 _ => Self::ShaderCompilationError(format!("{e:?}")),
69 }
70 }
71}
72
73impl VulkanError {
74 pub fn is_vkerror(&self) -> Option<&VkError> {
75 if let Self::VkError(ve) = self {
76 Some(ve)
77 } else {
78 None
79 }
80 }
81
82 pub fn is_shader_error(&self) -> Option<&String> {
83 if let Self::ShaderCompilationError(se) = self {
84 Some(se)
85 } else {
86 None
87 }
88 }
89}
90
91pub fn filter_no_staging_buffer(result: Result<(), VulkanError>) -> Result<(), VulkanError> {
93 match result {
94 Ok(_) => result,
95 Err(ref vke) => match vke {
96 VulkanError::NoStagingBuffer => Ok(()),
97 _ => result,
98 }
99 }
100}
101
102pub fn proceed_run<E: Debug>(result: Result<(), E>) {
104 match result {
105 Ok(_) => {}
106 Err(e) => {
107 eprintln!("Error occured: `{e:?}` proceed to run.");
108 }
109 }
110}
111
112pub struct VulkanPipelineCache {
114 pub device: Arc<VulkanDevice>,
116
117 pipeline_cache: VkPipelineCache,
119}
120
121impl VulkanPipelineCache {
122 pub fn new(device: Arc<VulkanDevice>, initial_data: Option<&[u8]>) -> Result<Self, VulkanError> {
124 let (data_size, data_ptr) = if let Some(data) = initial_data {
125 (data.len(), data.as_ptr())
126 } else {
127 (0, null())
128 };
129 let pipeline_cache_ci = VkPipelineCacheCreateInfo {
130 sType: VkStructureType::VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
131 pNext: null(),
132 flags: 0,
133 initialDataSize: data_size,
134 pInitialData: data_ptr as *const c_void,
135 };
136 let mut pipeline_cache = null();
137 device.vkcore.vkCreatePipelineCache(device.get_vk_device(), &pipeline_cache_ci, null(), &mut pipeline_cache)?;
138 Ok(Self {
139 device,
140 pipeline_cache,
141 })
142 }
143
144 pub fn dump_cache(&self) -> Result<Vec<u8>, VulkanError> {
146 let vkdevice = self.device.get_vk_device();
147 let mut size = 0;
148 self.device.vkcore.vkGetPipelineCacheData(vkdevice, self.pipeline_cache, &mut size, null_mut())?;
149 if size == 0 {
150 Ok(Vec::new())
151 } else {
152 let mut data: Vec<u8> = Vec::with_capacity(size);
153 self.device.vkcore.vkGetPipelineCacheData(vkdevice, self.pipeline_cache, &mut size, data.as_mut_ptr() as *mut c_void)?;
154 unsafe {data.set_len(size)};
155 Ok(data)
156 }
157 }
158
159 pub(crate) fn get_vk_pipeline_cache(&self) -> VkPipelineCache {
161 self.pipeline_cache
162 }
163}
164
165impl Debug for VulkanPipelineCache {
166 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
167 f.debug_struct("VulkanPipelineCache")
168 .field("pipeline_cache", &self.pipeline_cache)
169 .finish()
170 }
171}
172
173impl Drop for VulkanPipelineCache {
174 fn drop(&mut self) {
175 proceed_run(self.device.vkcore.vkDestroyPipelineCache(self.device.get_vk_device(), self.pipeline_cache, null()))
176 }
177}
178
179unsafe impl Send for VulkanPipelineCache {}
180unsafe impl Sync for VulkanPipelineCache {}
181
182pub struct VulkanSemaphore {
184 pub device: Arc<VulkanDevice>,
186
187 semaphore: VkSemaphore,
189
190 pub(crate) timeline: u64,
192}
193
194unsafe impl Send for VulkanSemaphore {}
195unsafe impl Sync for VulkanSemaphore {}
196
197impl VulkanSemaphore {
198 pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
200 let ci = VkSemaphoreCreateInfo {
201 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
202 pNext: null(),
203 flags: 0,
204 };
205 let mut semaphore: VkSemaphore = null();
206 device.vkcore.vkCreateSemaphore(device.get_vk_device(), &ci, null(), &mut semaphore)?;
207 Ok(Self{
208 device,
209 semaphore,
210 timeline: 0,
211 })
212 }
213
214 pub fn new_timeline(device: Arc<VulkanDevice>, initial_value: u64) -> Result<Self, VulkanError> {
216 let ci_next = VkSemaphoreTypeCreateInfo {
217 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
218 pNext: null(),
219 semaphoreType: VkSemaphoreType::VK_SEMAPHORE_TYPE_TIMELINE,
220 initialValue: initial_value,
221 };
222 let ci = VkSemaphoreCreateInfo {
223 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
224 pNext: &ci_next as *const VkSemaphoreTypeCreateInfo as *const c_void,
225 flags: 0,
226 };
227 let mut semaphore: VkSemaphore = null();
228 device.vkcore.vkCreateSemaphore(device.get_vk_device(), &ci, null(), &mut semaphore)?;
229 Ok(Self{
230 device,
231 semaphore,
232 timeline: initial_value,
233 })
234 }
235
236 pub fn signal(&self, value: u64) -> Result<(), VulkanError> {
238 let vkcore = self.device.vkcore.clone();
239 let signal_i = VkSemaphoreSignalInfo {
240 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
241 pNext: null(),
242 semaphore: self.semaphore,
243 value,
244 };
245 vkcore.vkSignalSemaphore(self.device.get_vk_device(), &signal_i)?;
246 Ok(())
247 }
248
249 pub(crate) fn get_vk_semaphore(&self) -> VkSemaphore {
251 self.semaphore
252 }
253
254 pub fn wait(&self, timeout: u64) -> Result<(), VulkanError> {
256 let vkcore = self.device.vkcore.clone();
257 let vk_device = self.device.get_vk_device();
258 let semaphores = [self.semaphore];
259 let timelines = [self.timeline];
260 let wait_i = VkSemaphoreWaitInfo {
261 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
262 pNext: null(),
263 flags: 0,
264 semaphoreCount: 1,
265 pSemaphores: semaphores.as_ptr(),
266 pValues: timelines.as_ptr(),
267 };
268 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
269 Ok(())
270 }
271
272 pub fn wait_vk(device: &VulkanDevice, semaphore: VkSemaphore, timeline: u64, timeout: u64) -> Result<(), VulkanError> {
274 let vkcore = device.vkcore.clone();
275 let vk_device = device.get_vk_device();
276 let semaphores = [semaphore];
277 let timelines = [timeline];
278 let wait_i = VkSemaphoreWaitInfo {
279 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
280 pNext: null(),
281 flags: 0,
282 semaphoreCount: 1,
283 pSemaphores: semaphores.as_ptr(),
284 pValues: timelines.as_ptr(),
285 };
286 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
287 Ok(())
288 }
289
290 pub fn wait_multi(semaphores: &[Self], timeout: u64, any: bool) -> Result<(), VulkanError> {
292 if semaphores.is_empty() {
293 Ok(())
294 } else {
295 let vkcore = semaphores[0].device.vkcore.clone();
296 let vk_device = semaphores[0].device.get_vk_device();
297 let timelines: Vec<u64> = semaphores.iter().map(|s|s.timeline).collect();
298 let semaphores: Vec<VkSemaphore> = semaphores.iter().map(|s|s.get_vk_semaphore()).collect();
299 let wait_i = VkSemaphoreWaitInfo {
300 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
301 pNext: null(),
302 flags: if any {VkSemaphoreWaitFlagBits::VK_SEMAPHORE_WAIT_ANY_BIT as VkSemaphoreWaitFlags} else {0},
303 semaphoreCount: semaphores.len() as u32,
304 pSemaphores: semaphores.as_ptr(),
305 pValues: timelines.as_ptr(),
306 };
307 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
308 Ok(())
309 }
310 }
311
312 pub fn wait_multi_vk(device: &VulkanDevice, semaphores: &[VkSemaphore], timelines: &[u64], timeout: u64, any: bool) -> Result<(), VulkanError> {
314 if semaphores.is_empty() {
315 Ok(())
316 } else {
317 let vkcore = device.vkcore.clone();
318 let vk_device = device.get_vk_device();
319 let wait_i = VkSemaphoreWaitInfo {
320 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
321 pNext: null(),
322 flags: if any {VkSemaphoreWaitFlagBits::VK_SEMAPHORE_WAIT_ANY_BIT as VkSemaphoreWaitFlags} else {0},
323 semaphoreCount: semaphores.len() as u32,
324 pSemaphores: semaphores.as_ptr(),
325 pValues: timelines.as_ptr(),
326 };
327 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
328 Ok(())
329 }
330 }
331}
332
333impl Debug for VulkanSemaphore {
334 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
335 f.debug_struct("VulkanSemaphore")
336 .field("semaphore", &self.semaphore)
337 .field("timeline", &self.timeline)
338 .finish()
339 }
340}
341
342impl Drop for VulkanSemaphore {
343 fn drop(&mut self) {
344 proceed_run(self.device.vkcore.vkDestroySemaphore(self.device.get_vk_device(), self.semaphore, null()))
345 }
346}
347
348pub struct VulkanFence {
350 pub device: Arc<VulkanDevice>,
352
353 fence: VkFence,
355}
356
357unsafe impl Send for VulkanFence {}
358unsafe impl Sync for VulkanFence {}
359
360impl VulkanFence {
361 pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
363 let vkcore = device.vkcore.clone();
364 let ci = VkFenceCreateInfo {
365 sType: VkStructureType::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
366 pNext: null(),
367 flags: 0,
368 };
369 let mut fence: VkFence = null();
370 vkcore.vkCreateFence(device.get_vk_device(), &ci, null(), &mut fence)?;
371 Ok(Self{
372 device,
373 fence,
374 })
375 }
376
377 pub(crate) fn get_vk_fence(&self) -> VkFence {
379 self.fence
380 }
381
382 pub fn is_signaled(&self) -> Result<bool, VulkanError> {
384 let vkcore = self.device.vkcore.clone();
385 match vkcore.vkGetFenceStatus(self.device.get_vk_device(), self.fence) {
386 Ok(_) => Ok(true),
387 Err(e) => match e {
388 VkError::VkNotReady(_) => Ok(false),
389 others => Err(VulkanError::VkError(others)),
390 }
391 }
392 }
393
394 pub fn is_signaled_vk(device: &VulkanDevice, fence: VkFence) -> Result<bool, VulkanError> {
396 let vkcore = device.vkcore.clone();
397 match vkcore.vkGetFenceStatus(device.get_vk_device(), fence) {
398 Ok(_) => Ok(true),
399 Err(e) => match e {
400 VkError::VkNotReady(_) => Ok(false),
401 others => Err(VulkanError::VkError(others)),
402 }
403 }
404 }
405
406 pub fn unsignal(&self) -> Result<(), VulkanError> {
408 let vkcore = self.device.vkcore.clone();
409 let fences = [self.fence];
410 Ok(vkcore.vkResetFences(self.device.get_vk_device(), 1, fences.as_ptr())?)
411 }
412
413 pub fn unsignal_multi(fences: &[Self]) -> Result<(), VulkanError> {
415 if fences.is_empty() {
416 Ok(())
417 } else {
418 let vkcore = &fences[0].device.vkcore;
419 let vkdevice = fences[0].device.get_vk_device();
420 let fences: Vec<VkFence> = fences.iter().map(|f|f.get_vk_fence()).collect();
421 Ok(vkcore.vkResetFences(vkdevice, fences.len() as u32, fences.as_ptr())?)
422 }
423 }
424
425 pub fn unsignal_multi_vk(device: &VulkanDevice, fences: &[VkFence]) -> Result<(), VulkanError> {
427 if fences.is_empty() {
428 Ok(())
429 } else {
430 let vkcore = device.vkcore.clone();
431 Ok(vkcore.vkResetFences(device.get_vk_device(), fences.len() as u32, fences.as_ptr())?)
432 }
433 }
434
435 pub fn wait(&self, timeout: u64) -> Result<(), VulkanError> {
437 let vk_device = self.device.get_vk_device();
438 let fences = [self.fence];
439 let vkcore = self.device.vkcore.clone();
440 vkcore.vkWaitForFences(vk_device, 1, fences.as_ptr(), 0, timeout)?;
441 Ok(())
442 }
443
444 pub fn wait_vk(device: &VulkanDevice, fence: VkFence, timeout: u64) -> Result<(), VulkanError> {
446 let vk_device = device.get_vk_device();
447 let fences = [fence];
448 let vkcore = device.vkcore.clone();
449 vkcore.vkWaitForFences(vk_device, 1, fences.as_ptr(), 0, timeout)?;
450 Ok(())
451 }
452
453 pub fn wait_multi(fences: &[Self], timeout: u64, any: bool) -> Result<(), VulkanError> {
455 if fences.is_empty() {
456 Ok(())
457 } else {
458 let vkcore = fences[0].device.vkcore.clone();
459 let vk_device = fences[0].device.get_vk_device();
460 let fences: Vec<VkFence> = fences.iter().map(|f|f.get_vk_fence()).collect();
461 vkcore.vkWaitForFences(vk_device, fences.len() as u32, fences.as_ptr(), if any {0} else {1}, timeout)?;
462 Ok(())
463 }
464 }
465
466 pub fn wait_multi_vk(device: &VulkanDevice, fences: &[VkFence], timeout: u64, any: bool) -> Result<(), VulkanError> {
468 if fences.is_empty() {
469 Ok(())
470 } else {
471 let vkcore = device.vkcore.clone();
472 let vk_device = device.get_vk_device();
473 vkcore.vkWaitForFences(vk_device, fences.len() as u32, fences.as_ptr(), if any {0} else {1}, timeout)?;
474 Ok(())
475 }
476 }
477}
478
479impl Debug for VulkanFence {
480 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
481 f.debug_struct("VulkanFence")
482 .field("fence", &self.fence)
483 .finish()
484 }
485}
486
487impl Drop for VulkanFence {
488 fn drop(&mut self) {
489 proceed_run(self.device.vkcore.vkDestroyFence(self.device.get_vk_device(), self.fence, null()))
490 }
491}
492
493#[derive(Default, Debug, Clone)]
495pub struct MemoryMappingState {
496 pub(crate) mapped_address: *mut c_void,
498
499 pub(crate) map_count: u32,
501}
502
503pub struct VulkanMemory {
505 pub device: Arc<VulkanDevice>,
507
508 memory: VkDeviceMemory,
510
511 size: VkDeviceSize,
513
514 mapping_state: Mutex<MemoryMappingState>,
516}
517
518#[derive(Debug)]
520pub enum DataDirection {
521 SetData,
522 GetData
523}
524
525impl VulkanMemory {
526 pub fn new(device: Arc<VulkanDevice>, mem_reqs: &VkMemoryRequirements, flags: VkMemoryPropertyFlags) -> Result<Self, VulkanError> {
528 let vkcore = device.vkcore.clone();
529 let alloc_i = VkMemoryAllocateInfo {
530 sType: VkStructureType::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
531 pNext: null(),
532 allocationSize: mem_reqs.size,
533 memoryTypeIndex: device.get_gpu().get_memory_type_index(mem_reqs.memoryTypeBits, flags)?,
534 };
535 let mut memory: VkDeviceMemory = null();
536 vkcore.vkAllocateMemory(device.get_vk_device(), &alloc_i, null(), &mut memory)?;
537 let ret = Self {
538 device,
539 memory,
540 size: mem_reqs.size,
541 mapping_state: Mutex::new(MemoryMappingState::default()),
542 };
543 Ok(ret)
544 }
545
546 pub(crate) fn get_vk_memory(&self) -> VkDeviceMemory {
548 self.memory
549 }
550
551 pub fn get_size(&self) -> VkDeviceSize {
553 self.size
554 }
555
556 pub fn map<'a>(&'a self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
558 let mut mapping_state_lock = self.mapping_state.lock().unwrap();
559 if mapping_state_lock.map_count == 0 {
560 self.device.vkcore.vkMapMemory(self.device.get_vk_device(), self.memory, 0, self.size, 0, &mut mapping_state_lock.mapped_address)?;
561 }
562 mapping_state_lock.map_count += 1;
563 Ok(MappedMemory::new(self, (mapping_state_lock.mapped_address as *mut u8).wrapping_add(offset as usize) as *mut c_void, size))
564 }
565
566 pub fn map_as_slice<'a, T>(&'a self, offset: VkDeviceSize, size: usize) -> Result<TypedMappedMemory<'a, T>, VulkanError>
568 where
569 T: Sized + Clone + Copy {
570 Ok(TypedMappedMemory::new(self.map(offset, size)?))
571 }
572
573 pub fn manipulate_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize, direction: DataDirection) -> Result<(), VulkanError> {
575 let map_guard = self.map(offset, size)?;
576 match direction {
577 DataDirection::SetData => unsafe {copy(data as *const u8, map_guard.address as *mut u8, size)},
578 DataDirection::GetData => unsafe {copy(map_guard.address as *const u8, data as *mut u8, size)},
579 }
580 Ok(())
581 }
582
583 pub fn set_data(&self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
585 self.manipulate_data(data as *mut c_void, offset, size, DataDirection::SetData)
586 }
587
588 pub fn get_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
590 self.manipulate_data(data, offset, size, DataDirection::GetData)
591 }
592
593 pub(crate) fn bind_vk_buffer(&self, buffer: VkBuffer) -> Result<(), VulkanError> {
595 let vkcore = self.device.vkcore.clone();
596 vkcore.vkBindBufferMemory(self.device.get_vk_device(), buffer, self.memory, 0)?;
597 Ok(())
598 }
599
600 pub(crate) fn bind_vk_image(&self, image: VkImage) -> Result<(), VulkanError> {
602 let vkcore = self.device.vkcore.clone();
603 vkcore.vkBindImageMemory(self.device.get_vk_device(), image, self.memory, 0)?;
604 Ok(())
605 }
606}
607
608impl Debug for VulkanMemory {
609 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
610 f.debug_struct("VulkanMemory")
611 .field("memory", &self.memory)
612 .field("size", &self.size)
613 .field("mapping_state", &self.mapping_state)
614 .finish()
615 }
616}
617
618impl Drop for VulkanMemory {
619 fn drop(&mut self) {
620 proceed_run(self.device.vkcore.vkFreeMemory(self.device.get_vk_device(), self.memory, null()))
621 }
622}
623
624unsafe impl Send for VulkanMemory {}
625unsafe impl Sync for VulkanMemory {}
626
627#[derive(Debug)]
629pub struct MappedMemory<'a> {
630 pub memory: &'a VulkanMemory,
632
633 pub(crate) address: *mut c_void,
635
636 pub(crate) size: usize,
638}
639
640impl<'a> MappedMemory<'a> {
641 pub(crate) fn new(memory: &'a VulkanMemory, address: *mut c_void, size: usize) -> Self {
643 Self {
644 memory,
645 address,
646 size,
647 }
648 }
649
650 pub fn get_address(&self) -> *const c_void {
652 self.address
653 }
654
655 pub fn get_size(&self) -> usize {
657 self.size
658 }
659}
660
661impl Drop for MappedMemory<'_> {
662 fn drop(&mut self) {
663 let mut mapping_state_lock = self.memory.mapping_state.lock().unwrap();
664 mapping_state_lock.map_count -= 1;
665 if mapping_state_lock.map_count == 0 {
666 proceed_run(self.memory.device.vkcore.vkUnmapMemory(self.memory.device.get_vk_device(), self.memory.memory))
667 }
668 }
669}
670
671#[derive(Debug)]
673pub struct TypedMappedMemory<'a, T>
674where
675 T: Sized + Clone + Copy {
676 mapped_memory: MappedMemory<'a>,
678
679 slice: &'a mut [T],
681
682 _phantom: PhantomData<T>,
684}
685
686impl<'a, T> TypedMappedMemory<'a, T>
687where
688 T: Sized + Clone + Copy {
689 pub fn new(mapped_memory: MappedMemory<'a>) -> Self {
690 let len = mapped_memory.size / size_of::<T>();
691 let slice = unsafe {from_raw_parts_mut(mapped_memory.address as *mut T, len)};
692 Self {
693 mapped_memory,
694 slice,
695 _phantom: PhantomData,
696 }
697 }
698
699 pub fn as_slice(&self) -> &[T] {
701 self.slice
702 }
703
704 pub fn as_slice_mut(&mut self) -> &mut [T] {
706 self.slice
707 }
708}
709
710impl<'a, T> Index<usize> for TypedMappedMemory<'a, T>
711where
712 T: Sized + Clone + Copy {
713 type Output = T;
714 fn index(&self, index: usize) -> &T {
715 &self.slice[index]
716 }
717}
718
719impl<'a, T> IndexMut<usize> for TypedMappedMemory<'a, T>
720where
721 T: Sized + Clone + Copy {
722 fn index_mut(&mut self, index: usize) -> &mut T {
723 &mut self.slice[index]
724 }
725}
726
727impl<'a, T> Index<Range<usize>> for TypedMappedMemory<'a, T>
728where
729 T: Sized + Clone + Copy {
730 type Output = [T];
731 fn index(&self, range: Range<usize>) -> &[T] {
732 &self.slice[range.start..range.end]
733 }
734}
735
736impl<'a, T> IndexMut<Range<usize>> for TypedMappedMemory<'a, T>
737where
738 T: Sized + Clone + Copy {
739 fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
740 &mut self.slice[range.start..range.end]
741 }
742}
743
744impl<'a, T> Index<RangeFrom<usize>> for TypedMappedMemory<'a, T>
745where
746 T: Sized + Clone + Copy {
747 type Output = [T];
748 fn index(&self, range: RangeFrom<usize>) -> &[T] {
749 &self.slice[range.start..]
750 }
751}
752
753impl<'a, T> IndexMut<RangeFrom<usize>> for TypedMappedMemory<'a, T>
754where
755 T: Sized + Clone + Copy {
756 fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
757 &mut self.slice[range.start..]
758 }
759}
760
761impl<'a, T> Index<RangeTo<usize>> for TypedMappedMemory<'a, T>
762where
763 T: Sized + Clone + Copy {
764 type Output = [T];
765 fn index(&self, range: RangeTo<usize>) -> &[T] {
766 &self.slice[..range.end]
767 }
768}
769
770impl<'a, T> IndexMut<RangeTo<usize>> for TypedMappedMemory<'a, T>
771where
772 T: Sized + Clone + Copy {
773 fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
774 &mut self.slice[..range.end]
775 }
776}
777
778impl<'a, T> Index<RangeFull> for TypedMappedMemory<'a, T>
779where
780 T: Sized + Clone + Copy {
781 type Output = [T];
782 fn index(&self, _: RangeFull) -> &[T] {
783 &self.slice[..]
784 }
785}
786
787impl<'a, T> IndexMut<RangeFull> for TypedMappedMemory<'a, T>
788where
789 T: Sized + Clone + Copy {
790 fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
791 &mut self.slice[..]
792 }
793}
794
795impl<'a, T> Index<RangeInclusive<usize>> for TypedMappedMemory<'a, T>
796where
797 T: Sized + Clone + Copy {
798 type Output = [T];
799 fn index(&self, range: RangeInclusive<usize>) -> &[T] {
800 &self.slice[*range.start()..=*range.end()]
801 }
802}
803
804impl<'a, T> IndexMut<RangeInclusive<usize>> for TypedMappedMemory<'a, T>
805where
806 T: Sized + Clone + Copy {
807 fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
808 &mut self.slice[*range.start()..=*range.end()]
809 }
810}
811
812impl<'a, T> Index<RangeToInclusive<usize>> for TypedMappedMemory<'a, T>
813where
814 T: Sized + Clone + Copy {
815 type Output = [T];
816 fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
817 &self.slice[..=range.end]
818 }
819}
820
821impl<'a, T> IndexMut<RangeToInclusive<usize>> for TypedMappedMemory<'a, T>
822where
823 T: Sized + Clone + Copy {
824 fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
825 &mut self.slice[..=range.end]
826 }
827}
828
829#[derive(Debug, Clone, Copy)]
831pub struct BufferViewRange {
832 pub format: VkFormat,
834
835 pub offset: VkDeviceSize,
837
838 pub range: VkDeviceSize,
840}
841
842pub struct VulkanBufferView {
844 pub device: Arc<VulkanDevice>,
846
847 buffer: Arc<VulkanBuffer>,
849
850 buffer_view: VkBufferView,
852
853 range: BufferViewRange,
855}
856
857impl VulkanBufferView {
858 pub fn new_partial(buffer: Arc<VulkanBuffer>, range: &BufferViewRange) -> Result<Self, VulkanError> {
860 let buffer_view_ci = VkBufferViewCreateInfo {
861 sType: VkStructureType::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
862 pNext: null(),
863 flags: 0,
864 buffer: buffer.get_vk_buffer(),
865 format: range.format,
866 offset: range.offset,
867 range: range.range,
868 };
869 let mut buffer_view = null();
870 buffer.device.vkcore.vkCreateBufferView(buffer.device.get_vk_device(), &buffer_view_ci, null(), &mut buffer_view)?;
871 Ok(Self {
872 device: buffer.device.clone(),
873 buffer,
874 buffer_view,
875 range: *range,
876 })
877 }
878
879 pub fn new(buffer: Arc<VulkanBuffer>, format: VkFormat) -> Result<Self, VulkanError> {
881 let range = BufferViewRange {
882 format,
883 offset: 0,
884 range: buffer.get_size(),
885 };
886 Self::new_partial(buffer, &range)
887 }
888
889 pub(crate) fn get_vk_buffer_view(&self) -> VkBufferView {
891 self.buffer_view
892 }
893
894 pub fn get_range(&self) -> &BufferViewRange {
896 &self.range
897 }
898}
899
900impl Debug for VulkanBufferView {
901 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
902 f.debug_struct("VulkanBufferView")
903 .field("buffer", &self.buffer)
904 .field("buffer_view", &self.buffer_view)
905 .field("range", &self.range)
906 .finish()
907 }
908}
909
910impl Drop for VulkanBufferView {
911 fn drop(&mut self) {
912 proceed_run(self.device.vkcore.vkDestroyBufferView(self.device.get_vk_device(), self.buffer_view, null()))
913 }
914}
915
916pub struct VulkanBuffer {
918 pub device: Arc<VulkanDevice>,
920
921 size: VkDeviceSize,
923
924 buffer: VkBuffer,
926}
927
928impl VulkanBuffer {
929 pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
931 let vkcore = device.vkcore.clone();
932 let vkdevice = device.get_vk_device();
933 let buffer_ci = VkBufferCreateInfo {
934 sType: VkStructureType::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
935 pNext: null(),
936 flags: 0,
937 size,
938 usage,
939 sharingMode: VkSharingMode::VK_SHARING_MODE_EXCLUSIVE,
940 queueFamilyIndexCount: 0,
941 pQueueFamilyIndices: null(),
942 };
943 let mut buffer: VkBuffer = null();
944 vkcore.vkCreateBuffer(vkdevice, &buffer_ci, null(), &mut buffer)?;
945 Ok(Self {
946 device,
947 size,
948 buffer,
949 })
950 }
951
952 pub fn get_memory_requirements(&self) -> Result<VkMemoryRequirements, VulkanError> {
954 let vkcore = self.device.vkcore.clone();
955 let mut ret: VkMemoryRequirements = unsafe {MaybeUninit::zeroed().assume_init()};
956 vkcore.vkGetBufferMemoryRequirements(self.device.get_vk_device(), self.buffer, &mut ret)?;
957 Ok(ret)
958 }
959
960 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
962 self.buffer
963 }
964
965 pub fn get_size(&self) -> VkDeviceSize {
967 self.size
968 }
969}
970
971impl Debug for VulkanBuffer {
972 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
973 f.debug_struct("VulkanBuffer")
974 .field("size", &self.size)
975 .field("buffer", &self.buffer)
976 .finish()
977 }
978}
979
980impl Drop for VulkanBuffer {
981 fn drop(&mut self) {
982 proceed_run(self.device.vkcore.vkDestroyBuffer(self.device.get_vk_device(), self.buffer, null()))
983 }
984}
985
986unsafe impl Send for VulkanBuffer {}
987unsafe impl Sync for VulkanBuffer {}
988
989#[derive(Debug)]
991pub struct BufferRegion {
992 pub offset: VkDeviceSize,
993 pub size: VkDeviceSize,
994}
995
996pub struct StagingBuffer {
998 pub device: Arc<VulkanDevice>,
1000
1001 pub buffer: Arc<VulkanBuffer>,
1003
1004 pub memory: Arc<VulkanMemory>,
1006
1007 pub(crate) address: *mut c_void,
1009}
1010
1011impl StagingBuffer {
1012 pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize) -> Result<Self, VulkanError> {
1014 let buffer = Arc::new(VulkanBuffer::new(device.clone(), size, VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_SRC_BIT as VkBufferUsageFlags)?);
1015 let memory = Arc::new(VulkanMemory::new(device.clone(), &buffer.get_memory_requirements()?,
1016 VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT as VkMemoryPropertyFlags |
1017 VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT as VkMemoryPropertyFlags)?);
1018 memory.bind_vk_buffer(buffer.get_vk_buffer())?;
1019 let mut address: *mut c_void = null_mut();
1020 let mut mapping_state_lock = memory.mapping_state.lock().unwrap();
1021 assert_eq!(mapping_state_lock.map_count, 0, "The newly created `VulkanMemory` must be unmapped.");
1022 mapping_state_lock.map_count += 1;
1023 device.vkcore.vkMapMemory(device.get_vk_device(), memory.get_vk_memory(), 0, size, 0, &mut address)?;
1024 mapping_state_lock.mapped_address = address;
1025 drop(mapping_state_lock);
1026 Ok(Self {
1027 device,
1028 buffer,
1029 memory,
1030 address,
1031 })
1032 }
1033
1034 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
1036 self.buffer.get_vk_buffer()
1037 }
1038
1039 pub(crate) fn get_vk_memory(&self) -> VkDeviceMemory {
1041 self.memory.get_vk_memory()
1042 }
1043
1044 pub fn get_size(&self) -> VkDeviceSize {
1046 self.memory.get_size()
1047 }
1048
1049 pub fn get_address(&self) -> *mut c_void {
1051 self.address
1052 }
1053
1054 pub fn set_data(&mut self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
1056 self.memory.set_data(data, offset, size)?;
1057 Ok(())
1058 }
1059
1060 pub fn get_data(&mut self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
1062 self.memory.get_data(data, offset, size)?;
1063 Ok(())
1064 }
1065
1066 pub fn map<'a>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
1068 self.memory.map(offset, size)
1069 }
1070
1071 pub fn map_as_slice<'a, T>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<TypedMappedMemory<'a, T>, VulkanError>
1073 where
1074 T: Sized + Clone + Copy {
1075 self.memory.map_as_slice(offset, size)
1076 }
1077}
1078
1079impl Debug for StagingBuffer {
1080 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1081 f.debug_struct("StagingBuffer")
1082 .field("buffer", &self.buffer)
1083 .field("memory", &self.memory)
1084 .field("address", &self.address)
1085 .finish()
1086 }
1087}
1088
1089impl Drop for StagingBuffer {
1090 fn drop(&mut self) {
1091 let mut mapping_state_lock = self.memory.mapping_state.lock().unwrap();
1092 mapping_state_lock.map_count -= 1;
1093 if mapping_state_lock.map_count == 0 {
1094 proceed_run(self.device.vkcore.vkUnmapMemory(self.device.get_vk_device(), self.get_vk_memory()))
1095 }
1096 }
1097}
1098
1099pub struct VulkanSampler {
1100 pub device: Arc<VulkanDevice>,
1102
1103 sampler: VkSampler,
1105}
1106
1107impl VulkanSampler {
1108 pub fn new(device: Arc<VulkanDevice>, sampler_ci: &VkSamplerCreateInfo) -> Result<Self, VulkanError> {
1110 let mut sampler = null();
1111 device.vkcore.vkCreateSampler(device.get_vk_device(), sampler_ci, null(), &mut sampler)?;
1112 Ok(Self {
1113 device,
1114 sampler,
1115 })
1116 }
1117
1118 pub fn new_linear(device: Arc<VulkanDevice>, with_mipmaps: bool, anisotropy: bool) -> Result<Self, VulkanError> {
1120 let max_anisotropy = device.get_gpu().properties.limits.maxSamplerAnisotropy;
1121 let sampler_ci = VkSamplerCreateInfo {
1122 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1123 pNext: null(),
1124 flags: 0,
1125 magFilter: VkFilter::VK_FILTER_LINEAR,
1126 minFilter: VkFilter::VK_FILTER_LINEAR,
1127 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1128 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1129 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1130 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1131 mipLodBias: 0.0,
1132 anisotropyEnable: if anisotropy {1} else {0},
1133 maxAnisotropy: max_anisotropy,
1134 compareEnable: 0,
1135 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1136 minLod: 0.0,
1137 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1138 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1139 unnormalizedCoordinates: 0,
1140 };
1141 Self::new(device, &sampler_ci)
1142 }
1143
1144 pub fn new_nearest(device: Arc<VulkanDevice>, with_mipmaps: bool) -> Result<Self, VulkanError> {
1146 let sampler_ci = VkSamplerCreateInfo {
1147 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1148 pNext: null(),
1149 flags: 0,
1150 magFilter: VkFilter::VK_FILTER_NEAREST,
1151 minFilter: VkFilter::VK_FILTER_NEAREST,
1152 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1153 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1154 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1155 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1156 mipLodBias: 0.0,
1157 anisotropyEnable: 0,
1158 maxAnisotropy: 1.0,
1159 compareEnable: 0,
1160 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1161 minLod: 0.0,
1162 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1163 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1164 unnormalizedCoordinates: 0,
1165 };
1166 Self::new(device, &sampler_ci)
1167 }
1168
1169 pub fn get_vk_sampler(&self) -> VkSampler {
1171 self.sampler
1172 }
1173}
1174
1175impl Debug for VulkanSampler {
1176 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1177 f.debug_struct("VulkanSampler")
1178 .field("sampler", &self.sampler)
1179 .finish()
1180 }
1181}
1182
1183impl Drop for VulkanSampler {
1184 fn drop(&mut self) {
1185 proceed_run(self.device.vkcore.vkDestroySampler(self.device.get_vk_device(), self.sampler, null()))
1186 }
1187}