1
2#![allow(clippy::uninit_vec)]
3#![allow(clippy::too_many_arguments)]
4use crate::prelude::*;
5use std::{
6 ffi::c_void,
7 fmt::{self, Debug, Formatter},
8 io::{self, ErrorKind},
9 mem::{MaybeUninit, size_of},
10 ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
11 ptr::{copy, null, null_mut},
12 slice::from_raw_parts_mut,
13 sync::{
14 Arc,
15 Mutex,
16 MutexGuard,
17 atomic::{
18 AtomicBool,
19 Ordering,
20 },
21 },
22};
23
24#[derive(Debug, Clone)]
26pub enum VulkanError {
27 VkError(VkError),
28 IOError{kind: ErrorKind, what: String},
29 ChooseGpuFailed,
30 NoGoodQueueForSurface(&'static str),
31 NoGoodDepthStencilFormat,
32 CommandPoolIsInUse,
33 NoIdleCommandPools,
34 NoIdleDeviceQueues,
35 NoSuitableMemoryType,
36 NoStagingBuffer,
37 ImageTypeSizeNotMatch(String),
38 ImagePixelFormatNotSupported,
39 LoadImageFailed(String),
40 ShaderCompilationError(String),
41 ShaderParseIdUnknown(String),
42 ShaderParseTypeUnknown(String),
43 ShaderParseError(Arc<rspirv::binary::ParseState>),
44 MissingShaderInputs(String),
45 ShaderInputTypeMismatch(String),
46 ShaderInputLengthMismatch(String),
47 ShaderInputTypeUnsupported(String),
48 ObjError(ObjError),
49}
50
51impl From<VkError> for VulkanError {
52 fn from(e: VkError) -> Self {
53 Self::VkError(e)
54 }
55}
56
57impl From<io::Error> for VulkanError {
58 fn from(e: io::Error) -> Self {
59 Self::IOError {
60 kind: e.kind(),
61 what: format!("{e:?}"),
62 }
63 }
64}
65
66impl From<rspirv::binary::ParseState> for VulkanError {
67 fn from(s: rspirv::binary::ParseState) -> Self {
68 Self::ShaderParseError(Arc::new(s))
69 }
70}
71
72impl From<ObjError> for VulkanError {
73 fn from(s: ObjError) -> Self {
74 match s {
75 ObjError::IOError {kind, what} => {
76 Self::IOError {
77 kind,
78 what,
79 }
80 }
81 _ => Self::ObjError(s),
82 }
83 }
84}
85
86#[cfg(feature = "shaderc")]
87impl From<shaderc::Error> for VulkanError {
88 fn from(e: shaderc::Error) -> Self {
89 match e {
90 shaderc::Error::CompilationError(_, desc) => Self::ShaderCompilationError(desc),
91 _ => Self::ShaderCompilationError(format!("{e:?}")),
92 }
93 }
94}
95
96impl From<image::error::ImageError> for VulkanError {
97 fn from(e: image::error::ImageError) -> Self {
98 use image::error::ImageError::*;
99 match e {
100 IoError(ioe) => Self::IOError {
101 kind: ioe.kind(),
102 what: format!("{ioe:?}"),
103 },
104 _ => Self::LoadImageFailed(format!("{e:?}")),
105 }
106 }
107}
108
109impl VulkanError {
110 pub fn is_vkerror(&self) -> Option<&VkError> {
111 if let Self::VkError(ve) = self {
112 Some(ve)
113 } else {
114 None
115 }
116 }
117
118 pub fn is_shader_error(&self) -> Option<&String> {
119 if let Self::ShaderCompilationError(se) = self {
120 Some(se)
121 } else {
122 None
123 }
124 }
125}
126
127pub fn filter_no_staging_buffer(result: Result<(), VulkanError>) -> Result<(), VulkanError> {
129 match result {
130 Ok(_) => result,
131 Err(ref vke) => match vke {
132 VulkanError::NoStagingBuffer => Ok(()),
133 _ => result,
134 }
135 }
136}
137
138pub fn proceed_run<E: Debug>(result: Result<(), E>) {
140 match result {
141 Ok(_) => {}
142 Err(e) => {
143 eprintln!("Error occured: `{e:?}` proceed to run.");
144 }
145 }
146}
147
148pub struct VulkanPipelineCache {
150 pub device: Arc<VulkanDevice>,
152
153 pipeline_cache: VkPipelineCache,
155}
156
157impl VulkanPipelineCache {
158 pub fn new(device: Arc<VulkanDevice>, initial_data: Option<&[u8]>) -> Result<Self, VulkanError> {
160 let (data_size, data_ptr) = if let Some(data) = initial_data {
161 (data.len(), data.as_ptr())
162 } else {
163 (0, null())
164 };
165 let pipeline_cache_ci = VkPipelineCacheCreateInfo {
166 sType: VkStructureType::VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
167 pNext: null(),
168 flags: 0,
169 initialDataSize: data_size,
170 pInitialData: data_ptr as *const c_void,
171 };
172 let mut pipeline_cache = null();
173 device.vkcore.vkCreatePipelineCache(device.get_vk_device(), &pipeline_cache_ci, null(), &mut pipeline_cache)?;
174 Ok(Self {
175 device,
176 pipeline_cache,
177 })
178 }
179
180 pub fn dump_cache(&self) -> Result<Vec<u8>, VulkanError> {
182 let vkdevice = self.device.get_vk_device();
183 let mut size = 0;
184 self.device.vkcore.vkGetPipelineCacheData(vkdevice, self.pipeline_cache, &mut size, null_mut())?;
185 if size == 0 {
186 Ok(Vec::new())
187 } else {
188 let mut data: Vec<u8> = Vec::with_capacity(size);
189 self.device.vkcore.vkGetPipelineCacheData(vkdevice, self.pipeline_cache, &mut size, data.as_mut_ptr() as *mut c_void)?;
190 unsafe {data.set_len(size)};
191 Ok(data)
192 }
193 }
194
195 pub(crate) fn get_vk_pipeline_cache(&self) -> VkPipelineCache {
197 self.pipeline_cache
198 }
199}
200
201impl Debug for VulkanPipelineCache {
202 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
203 f.debug_struct("VulkanPipelineCache")
204 .field("pipeline_cache", &self.pipeline_cache)
205 .finish()
206 }
207}
208
209impl Drop for VulkanPipelineCache {
210 fn drop(&mut self) {
211 proceed_run(self.device.vkcore.vkDestroyPipelineCache(self.device.get_vk_device(), self.pipeline_cache, null()))
212 }
213}
214
215unsafe impl Send for VulkanPipelineCache {}
216unsafe impl Sync for VulkanPipelineCache {}
217
218pub struct VulkanSemaphore {
220 pub device: Arc<VulkanDevice>,
222
223 semaphore: VkSemaphore,
225
226 pub(crate) timeline: u64,
228}
229
230unsafe impl Send for VulkanSemaphore {}
231unsafe impl Sync for VulkanSemaphore {}
232
233impl VulkanSemaphore {
234 pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
236 let ci = VkSemaphoreCreateInfo {
237 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
238 pNext: null(),
239 flags: 0,
240 };
241 let mut semaphore: VkSemaphore = null();
242 device.vkcore.vkCreateSemaphore(device.get_vk_device(), &ci, null(), &mut semaphore)?;
243 Ok(Self{
244 device,
245 semaphore,
246 timeline: 0,
247 })
248 }
249
250 pub fn new_timeline(device: Arc<VulkanDevice>, initial_value: u64) -> Result<Self, VulkanError> {
252 let ci_next = VkSemaphoreTypeCreateInfo {
253 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
254 pNext: null(),
255 semaphoreType: VkSemaphoreType::VK_SEMAPHORE_TYPE_TIMELINE,
256 initialValue: initial_value,
257 };
258 let ci = VkSemaphoreCreateInfo {
259 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
260 pNext: &ci_next as *const VkSemaphoreTypeCreateInfo as *const c_void,
261 flags: 0,
262 };
263 let mut semaphore: VkSemaphore = null();
264 device.vkcore.vkCreateSemaphore(device.get_vk_device(), &ci, null(), &mut semaphore)?;
265 Ok(Self{
266 device,
267 semaphore,
268 timeline: initial_value,
269 })
270 }
271
272 pub fn signal(&self, value: u64) -> Result<(), VulkanError> {
274 let vkcore = self.device.vkcore.clone();
275 let signal_i = VkSemaphoreSignalInfo {
276 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
277 pNext: null(),
278 semaphore: self.semaphore,
279 value,
280 };
281 vkcore.vkSignalSemaphore(self.device.get_vk_device(), &signal_i)?;
282 Ok(())
283 }
284
285 pub(crate) fn get_vk_semaphore(&self) -> VkSemaphore {
287 self.semaphore
288 }
289
290 pub fn wait(&self, timeout: u64) -> Result<(), VulkanError> {
292 let vkcore = self.device.vkcore.clone();
293 let vk_device = self.device.get_vk_device();
294 let semaphores = [self.semaphore];
295 let timelines = [self.timeline];
296 let wait_i = VkSemaphoreWaitInfo {
297 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
298 pNext: null(),
299 flags: 0,
300 semaphoreCount: 1,
301 pSemaphores: semaphores.as_ptr(),
302 pValues: timelines.as_ptr(),
303 };
304 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
305 Ok(())
306 }
307
308 pub fn wait_vk(device: &VulkanDevice, semaphore: VkSemaphore, timeline: u64, timeout: u64) -> Result<(), VulkanError> {
310 let vkcore = device.vkcore.clone();
311 let vk_device = device.get_vk_device();
312 let semaphores = [semaphore];
313 let timelines = [timeline];
314 let wait_i = VkSemaphoreWaitInfo {
315 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
316 pNext: null(),
317 flags: 0,
318 semaphoreCount: 1,
319 pSemaphores: semaphores.as_ptr(),
320 pValues: timelines.as_ptr(),
321 };
322 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
323 Ok(())
324 }
325
326 pub fn wait_multi(semaphores: &[Self], timeout: u64, any: bool) -> Result<(), VulkanError> {
328 if semaphores.is_empty() {
329 Ok(())
330 } else {
331 let vkcore = semaphores[0].device.vkcore.clone();
332 let vk_device = semaphores[0].device.get_vk_device();
333 let timelines: Vec<u64> = semaphores.iter().map(|s|s.timeline).collect();
334 let semaphores: Vec<VkSemaphore> = semaphores.iter().map(|s|s.get_vk_semaphore()).collect();
335 let wait_i = VkSemaphoreWaitInfo {
336 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
337 pNext: null(),
338 flags: if any {VkSemaphoreWaitFlagBits::VK_SEMAPHORE_WAIT_ANY_BIT as VkSemaphoreWaitFlags} else {0},
339 semaphoreCount: semaphores.len() as u32,
340 pSemaphores: semaphores.as_ptr(),
341 pValues: timelines.as_ptr(),
342 };
343 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
344 Ok(())
345 }
346 }
347
348 pub fn wait_multi_vk(device: &VulkanDevice, semaphores: &[VkSemaphore], timelines: &[u64], timeout: u64, any: bool) -> Result<(), VulkanError> {
350 if semaphores.is_empty() {
351 Ok(())
352 } else {
353 let vkcore = device.vkcore.clone();
354 let vk_device = device.get_vk_device();
355 let wait_i = VkSemaphoreWaitInfo {
356 sType: VkStructureType::VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
357 pNext: null(),
358 flags: if any {VkSemaphoreWaitFlagBits::VK_SEMAPHORE_WAIT_ANY_BIT as VkSemaphoreWaitFlags} else {0},
359 semaphoreCount: semaphores.len() as u32,
360 pSemaphores: semaphores.as_ptr(),
361 pValues: timelines.as_ptr(),
362 };
363 vkcore.vkWaitSemaphores(vk_device, &wait_i, timeout)?;
364 Ok(())
365 }
366 }
367}
368
369impl Debug for VulkanSemaphore {
370 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
371 f.debug_struct("VulkanSemaphore")
372 .field("semaphore", &self.semaphore)
373 .field("timeline", &self.timeline)
374 .finish()
375 }
376}
377
378impl Drop for VulkanSemaphore {
379 fn drop(&mut self) {
380 proceed_run(self.device.vkcore.vkDestroySemaphore(self.device.get_vk_device(), self.semaphore, null()))
381 }
382}
383
384pub struct VulkanFence {
386 pub device: Arc<VulkanDevice>,
388
389 fence: VkFence,
391
392 is_being_signaled: AtomicBool,
394}
395
396unsafe impl Send for VulkanFence {}
397unsafe impl Sync for VulkanFence {}
398
399impl VulkanFence {
400 pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
402 let vkcore = device.vkcore.clone();
403 let ci = VkFenceCreateInfo {
404 sType: VkStructureType::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
405 pNext: null(),
406 flags: 0,
407 };
408 let mut fence: VkFence = null();
409 vkcore.vkCreateFence(device.get_vk_device(), &ci, null(), &mut fence)?;
410 Ok(Self{
411 device,
412 fence,
413 is_being_signaled: AtomicBool::new(false),
414 })
415 }
416
417 pub(crate) fn get_vk_fence(&self) -> VkFence {
419 self.fence
420 }
421
422 pub fn set_is_being_signaled(&self) {
424 self.is_being_signaled.store(true, Ordering::Release)
425 }
426
427 pub fn is_signaled(&self) -> Result<bool, VulkanError> {
429 let vkcore = self.device.vkcore.clone();
430 match vkcore.vkGetFenceStatus(self.device.get_vk_device(), self.fence) {
431 Ok(_) => Ok(true),
432 Err(e) => match e {
433 VkError::VkNotReady(_) => Ok(false),
434 others => Err(VulkanError::VkError(others)),
435 }
436 }
437 }
438
439 pub fn is_signaled_vk(device: &VulkanDevice, fence: VkFence) -> Result<bool, VulkanError> {
441 let vkcore = device.vkcore.clone();
442 match vkcore.vkGetFenceStatus(device.get_vk_device(), fence) {
443 Ok(_) => Ok(true),
444 Err(e) => match e {
445 VkError::VkNotReady(_) => Ok(false),
446 others => Err(VulkanError::VkError(others)),
447 }
448 }
449 }
450
451 pub fn unsignal(&self) -> Result<(), VulkanError> {
453 let vkcore = self.device.vkcore.clone();
454 let fences = [self.fence];
455 Ok(vkcore.vkResetFences(self.device.get_vk_device(), 1, fences.as_ptr())?)
456 }
457
458 pub fn unsignal_multi(fences: &[Self]) -> Result<(), VulkanError> {
460 if fences.is_empty() {
461 Ok(())
462 } else {
463 let vkcore = &fences[0].device.vkcore;
464 let vkdevice = fences[0].device.get_vk_device();
465 let fences: Vec<VkFence> = fences.iter().map(|f|f.get_vk_fence()).collect();
466 Ok(vkcore.vkResetFences(vkdevice, fences.len() as u32, fences.as_ptr())?)
467 }
468 }
469
470 pub fn unsignal_multi_vk(device: &VulkanDevice, fences: &[VkFence]) -> Result<(), VulkanError> {
472 if fences.is_empty() {
473 Ok(())
474 } else {
475 let vkcore = device.vkcore.clone();
476 Ok(vkcore.vkResetFences(device.get_vk_device(), fences.len() as u32, fences.as_ptr())?)
477 }
478 }
479
480 pub fn wait(&self, timeout: u64) -> Result<(), VulkanError> {
482 if !self.is_being_signaled.fetch_or(false, Ordering::Acquire) {
483 return Ok(())
484 }
485 let vk_device = self.device.get_vk_device();
486 let fences = [self.fence];
487 let vkcore = self.device.vkcore.clone();
488 vkcore.vkWaitForFences(vk_device, 1, fences.as_ptr(), 0, timeout)?;
489 self.is_being_signaled.store(false, Ordering::Release);
490 Ok(())
491 }
492
493 pub fn wait_vk(device: &VulkanDevice, fence: VkFence, timeout: u64) -> Result<(), VulkanError> {
495 let vk_device = device.get_vk_device();
496 let fences = [fence];
497 let vkcore = device.vkcore.clone();
498 vkcore.vkWaitForFences(vk_device, 1, fences.as_ptr(), 0, timeout)?;
499 Ok(())
500 }
501
502 pub fn wait_multi(fences: &[Self], timeout: u64, any: bool) -> Result<(), VulkanError> {
504 if fences.is_empty() {
505 Ok(())
506 } else {
507 for fence in fences.iter() {
508 if !fence.is_being_signaled.load(Ordering::Acquire) {
509 return Ok(());
510 }
511 }
512 let vkcore = fences[0].device.vkcore.clone();
513 let vk_device = fences[0].device.get_vk_device();
514 let vk_fences: Vec<VkFence> = fences.iter().map(|f|f.get_vk_fence()).collect();
515 vkcore.vkWaitForFences(vk_device, vk_fences.len() as u32, vk_fences.as_ptr(), if any {0} else {1}, timeout)?;
516 for fence in fences.iter() {
517 fence.is_being_signaled.store(false, Ordering::Release);
518 }
519 Ok(())
520 }
521 }
522
523 pub fn wait_multi_vk(device: &VulkanDevice, fences: &[VkFence], timeout: u64, any: bool) -> Result<(), VulkanError> {
525 if fences.is_empty() {
526 Ok(())
527 } else {
528 let vkcore = device.vkcore.clone();
529 let vk_device = device.get_vk_device();
530 vkcore.vkWaitForFences(vk_device, fences.len() as u32, fences.as_ptr(), if any {0} else {1}, timeout)?;
531 Ok(())
532 }
533 }
534}
535
536impl Debug for VulkanFence {
537 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
538 f.debug_struct("VulkanFence")
539 .field("fence", &self.fence)
540 .finish()
541 }
542}
543
544impl Drop for VulkanFence {
545 fn drop(&mut self) {
546 proceed_run(self.device.vkcore.vkDestroyFence(self.device.get_vk_device(), self.fence, null()))
547 }
548}
549
550#[derive(Default, Debug, Clone)]
552pub struct MemoryMappingState {
553 pub(crate) mapped_address: *mut c_void,
555
556 pub(crate) map_count: u32,
558}
559
560pub struct VulkanMemory {
562 pub device: Arc<VulkanDevice>,
564
565 memory: VkDeviceMemory,
567
568 size: VkDeviceSize,
570
571 mapping_state: Mutex<MemoryMappingState>,
573
574 mapping_lock: Mutex<()>,
576}
577
578#[derive(Debug)]
580pub enum DataDirection {
581 SetData,
582 GetData
583}
584
585impl VulkanMemory {
586 pub fn new(device: Arc<VulkanDevice>, mem_reqs: &VkMemoryRequirements, flags: VkMemoryPropertyFlags) -> Result<Self, VulkanError> {
588 let vkcore = device.vkcore.clone();
589 let alloc_i = VkMemoryAllocateInfo {
590 sType: VkStructureType::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
591 pNext: null(),
592 allocationSize: mem_reqs.size,
593 memoryTypeIndex: device.get_gpu().get_memory_type_index(mem_reqs.memoryTypeBits, flags)?,
594 };
595 let mut memory: VkDeviceMemory = null();
596 vkcore.vkAllocateMemory(device.get_vk_device(), &alloc_i, null(), &mut memory)?;
597 let ret = Self {
598 device,
599 memory,
600 size: mem_reqs.size,
601 mapping_state: Mutex::new(MemoryMappingState::default()),
602 mapping_lock: Mutex::new(()),
603 };
604 Ok(ret)
605 }
606
607 pub(crate) fn get_vk_memory(&self) -> VkDeviceMemory {
609 self.memory
610 }
611
612 pub fn get_size(&self) -> VkDeviceSize {
614 self.size
615 }
616
617 pub(crate) unsafe fn mapped<'a>(&'a self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
623 let mut mapping_state_lock = self.mapping_state.lock().unwrap();
624 if mapping_state_lock.map_count == 0 {
625 self.device.vkcore.vkMapMemory(self.device.get_vk_device(), self.memory, 0, self.size, 0, &mut mapping_state_lock.mapped_address)?;
626 }
627 mapping_state_lock.map_count += 1;
628 Ok(MappedMemory::new(self, (mapping_state_lock.mapped_address as *mut u8).wrapping_add(offset as usize) as *mut c_void, size))
629 }
630
631 pub fn map<'a>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
633 unsafe {self.mapped(offset, size)}
634 }
635
636 pub fn map_locked<'a>(&'a self, offset: VkDeviceSize, size: usize) -> Result<LockedMappedMemoryGuard<'a>, VulkanError> {
638 Ok(LockedMappedMemoryGuard::new(unsafe {self.mapped(offset, size)?}, self.mapping_lock.lock().unwrap()))
639 }
640
641 pub fn map_as_slice<'a, T>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<TypedMappedMemory<'a, T>, VulkanError>
643 where
644 T: Sized + Clone + Copy {
645 Ok(TypedMappedMemory::new(self.map(offset, size)?))
646 }
647
648 pub fn map_as_slice_locked<'a, T>(&'a self, offset: VkDeviceSize, size: usize) -> Result<LockedTypedMappedMemoryGuard<'a, T, ()>, VulkanError>
650 where
651 T: Sized + Clone + Copy {
652 Ok(LockedTypedMappedMemoryGuard::new(unsafe {self.mapped(offset, size)?}, self.mapping_lock.lock().unwrap()))
653 }
654
655 pub fn manipulate_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize, direction: DataDirection) -> Result<(), VulkanError> {
657 let map_guard = self.map_locked(offset, size)?;
658 match direction {
659 DataDirection::SetData => unsafe {copy(data as *const u8, map_guard.get_address() as *mut u8, size)},
660 DataDirection::GetData => unsafe {copy(map_guard.get_address() as *const u8, data as *mut u8, size)},
661 }
662 Ok(())
663 }
664
665 pub fn set_data(&self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
667 self.manipulate_data(data as *mut c_void, offset, size, DataDirection::SetData)
668 }
669
670 pub fn get_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
672 self.manipulate_data(data, offset, size, DataDirection::GetData)
673 }
674
675 pub(crate) fn bind_vk_buffer(&self, buffer: VkBuffer) -> Result<(), VulkanError> {
677 let vkcore = self.device.vkcore.clone();
678 vkcore.vkBindBufferMemory(self.device.get_vk_device(), buffer, self.memory, 0)?;
679 Ok(())
680 }
681
682 pub(crate) fn bind_vk_image(&self, image: VkImage) -> Result<(), VulkanError> {
684 let vkcore = self.device.vkcore.clone();
685 vkcore.vkBindImageMemory(self.device.get_vk_device(), image, self.memory, 0)?;
686 Ok(())
687 }
688}
689
690impl Debug for VulkanMemory {
691 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
692 f.debug_struct("VulkanMemory")
693 .field("memory", &self.memory)
694 .field("size", &self.size)
695 .field("mapping_state", &self.mapping_state)
696 .finish()
697 }
698}
699
700impl Drop for VulkanMemory {
701 fn drop(&mut self) {
702 proceed_run(self.device.vkcore.vkFreeMemory(self.device.get_vk_device(), self.memory, null()))
703 }
704}
705
706unsafe impl Send for VulkanMemory {}
707unsafe impl Sync for VulkanMemory {}
708
709#[derive(Debug)]
711pub struct MappedMemory<'a> {
712 pub memory: &'a VulkanMemory,
714
715 pub(crate) address: *mut c_void,
717
718 pub(crate) size: usize,
720}
721
722impl<'a> MappedMemory<'a> {
723 pub(crate) fn new(memory: &'a VulkanMemory, address: *mut c_void, size: usize) -> Self {
725 Self {
726 memory,
727 address,
728 size,
729 }
730 }
731
732 pub fn get_address(&self) -> *const c_void {
734 self.address
735 }
736
737 pub fn get_size(&self) -> usize {
739 self.size
740 }
741}
742
743impl Drop for MappedMemory<'_> {
744 fn drop(&mut self) {
745 let mut mapping_state_lock = self.memory.mapping_state.lock().unwrap();
746 mapping_state_lock.map_count -= 1;
747 if mapping_state_lock.map_count == 0 {
748 proceed_run(self.memory.device.vkcore.vkUnmapMemory(self.memory.device.get_vk_device(), self.memory.memory))
749 }
750 }
751}
752
753#[derive(Debug)]
755pub struct LockedMappedMemoryGuard<'a> {
756 map_guard: MappedMemory<'a>,
758
759 lock_guard: MutexGuard<'a, ()>,
761}
762
763impl<'a> LockedMappedMemoryGuard<'a> {
764 pub(crate) fn new(map_guard: MappedMemory<'a>, lock_guard: MutexGuard<'a, ()>) -> Self {
766 Self {
767 map_guard,
768 lock_guard,
769 }
770 }
771
772 pub fn get_address(&self) -> *const c_void {
774 self.map_guard.address
775 }
776
777 pub fn get_size(&self) -> usize {
779 self.map_guard.size
780 }
781}
782
783#[derive(Debug)]
785pub struct TypedMappedMemory<'a, T>
786where
787 T: Sized + Clone + Copy {
788 mapped_memory: MappedMemory<'a>,
790
791 slice: &'a mut [T],
793}
794
795impl<'a, T> TypedMappedMemory<'a, T>
796where
797 T: Sized + Clone + Copy {
798 pub fn new(mapped_memory: MappedMemory<'a>) -> Self {
799 let len = mapped_memory.size / size_of::<T>();
800 let slice = unsafe {from_raw_parts_mut(mapped_memory.address as *mut T, len)};
801 Self {
802 mapped_memory,
803 slice,
804 }
805 }
806
807 pub fn as_slice(&self) -> &[T] {
809 self.slice
810 }
811
812 pub fn as_slice_mut(&mut self) -> &mut [T] {
814 self.slice
815 }
816}
817
818impl<'a, T> Index<usize> for TypedMappedMemory<'a, T>
819where
820 T: Sized + Clone + Copy {
821 type Output = T;
822 fn index(&self, index: usize) -> &T {
823 &self.slice[index]
824 }
825}
826
827impl<'a, T> IndexMut<usize> for TypedMappedMemory<'a, T>
828where
829 T: Sized + Clone + Copy {
830 fn index_mut(&mut self, index: usize) -> &mut T {
831 &mut self.slice[index]
832 }
833}
834
835impl<'a, T> Index<Range<usize>> for TypedMappedMemory<'a, T>
836where
837 T: Sized + Clone + Copy {
838 type Output = [T];
839 fn index(&self, range: Range<usize>) -> &[T] {
840 &self.slice[range.start..range.end]
841 }
842}
843
844impl<'a, T> IndexMut<Range<usize>> for TypedMappedMemory<'a, T>
845where
846 T: Sized + Clone + Copy {
847 fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
848 &mut self.slice[range.start..range.end]
849 }
850}
851
852impl<'a, T> Index<RangeFrom<usize>> for TypedMappedMemory<'a, T>
853where
854 T: Sized + Clone + Copy {
855 type Output = [T];
856 fn index(&self, range: RangeFrom<usize>) -> &[T] {
857 &self.slice[range.start..]
858 }
859}
860
861impl<'a, T> IndexMut<RangeFrom<usize>> for TypedMappedMemory<'a, T>
862where
863 T: Sized + Clone + Copy {
864 fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
865 &mut self.slice[range.start..]
866 }
867}
868
869impl<'a, T> Index<RangeTo<usize>> for TypedMappedMemory<'a, T>
870where
871 T: Sized + Clone + Copy {
872 type Output = [T];
873 fn index(&self, range: RangeTo<usize>) -> &[T] {
874 &self.slice[..range.end]
875 }
876}
877
878impl<'a, T> IndexMut<RangeTo<usize>> for TypedMappedMemory<'a, T>
879where
880 T: Sized + Clone + Copy {
881 fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
882 &mut self.slice[..range.end]
883 }
884}
885
886impl<'a, T> Index<RangeFull> for TypedMappedMemory<'a, T>
887where
888 T: Sized + Clone + Copy {
889 type Output = [T];
890 fn index(&self, _: RangeFull) -> &[T] {
891 &self.slice[..]
892 }
893}
894
895impl<'a, T> IndexMut<RangeFull> for TypedMappedMemory<'a, T>
896where
897 T: Sized + Clone + Copy {
898 fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
899 &mut self.slice[..]
900 }
901}
902
903impl<'a, T> Index<RangeInclusive<usize>> for TypedMappedMemory<'a, T>
904where
905 T: Sized + Clone + Copy {
906 type Output = [T];
907 fn index(&self, range: RangeInclusive<usize>) -> &[T] {
908 &self.slice[*range.start()..=*range.end()]
909 }
910}
911
912impl<'a, T> IndexMut<RangeInclusive<usize>> for TypedMappedMemory<'a, T>
913where
914 T: Sized + Clone + Copy {
915 fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
916 &mut self.slice[*range.start()..=*range.end()]
917 }
918}
919
920impl<'a, T> Index<RangeToInclusive<usize>> for TypedMappedMemory<'a, T>
921where
922 T: Sized + Clone + Copy {
923 type Output = [T];
924 fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
925 &self.slice[..=range.end]
926 }
927}
928
929impl<'a, T> IndexMut<RangeToInclusive<usize>> for TypedMappedMemory<'a, T>
930where
931 T: Sized + Clone + Copy {
932 fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
933 &mut self.slice[..=range.end]
934 }
935}
936
937#[derive(Debug)]
939pub struct LockedTypedMappedMemoryGuard<'a, T, L>
940where
941 T: Sized + Clone + Copy,
942 L: ?Sized {
943 mapped_memory: MappedMemory<'a>,
945
946 lock_guard: MutexGuard<'a, L>,
948
949 slice: &'a mut [T],
951}
952
953impl<'a, T, L> LockedTypedMappedMemoryGuard<'a, T, L>
954where
955 T: Sized + Clone + Copy,
956 L: ?Sized {
957 pub fn new(mapped_memory: MappedMemory<'a>, lock_guard: MutexGuard<'a, L>) -> Self {
958 let len = mapped_memory.size / size_of::<T>();
959 let slice = unsafe {from_raw_parts_mut(mapped_memory.address as *mut T, len)};
960 Self {
961 mapped_memory,
962 lock_guard,
963 slice,
964 }
965 }
966
967 pub fn as_slice(&self) -> &[T] {
969 self.slice
970 }
971
972 pub fn as_slice_mut(&mut self) -> &mut [T] {
974 self.slice
975 }
976}
977
978impl<'a, T, L> Index<usize> for LockedTypedMappedMemoryGuard<'a, T, L>
979where
980 T: Sized + Clone + Copy,
981 L: ?Sized {
982 type Output = T;
983 fn index(&self, index: usize) -> &T {
984 &self.slice[index]
985 }
986}
987
988impl<'a, T, L> IndexMut<usize> for LockedTypedMappedMemoryGuard<'a, T, L>
989where
990 T: Sized + Clone + Copy,
991 L: ?Sized {
992 fn index_mut(&mut self, index: usize) -> &mut T {
993 &mut self.slice[index]
994 }
995}
996
997impl<'a, T, L> Index<Range<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
998where
999 T: Sized + Clone + Copy,
1000 L: ?Sized {
1001 type Output = [T];
1002 fn index(&self, range: Range<usize>) -> &[T] {
1003 &self.slice[range.start..range.end]
1004 }
1005}
1006
1007impl<'a, T, L> IndexMut<Range<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1008where
1009 T: Sized + Clone + Copy,
1010 L: ?Sized {
1011 fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
1012 &mut self.slice[range.start..range.end]
1013 }
1014}
1015
1016impl<'a, T, L> Index<RangeFrom<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1017where
1018 T: Sized + Clone + Copy,
1019 L: ?Sized {
1020 type Output = [T];
1021 fn index(&self, range: RangeFrom<usize>) -> &[T] {
1022 &self.slice[range.start..]
1023 }
1024}
1025
1026impl<'a, T, L> IndexMut<RangeFrom<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1027where
1028 T: Sized + Clone + Copy,
1029 L: ?Sized {
1030 fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
1031 &mut self.slice[range.start..]
1032 }
1033}
1034
1035impl<'a, T, L> Index<RangeTo<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1036where
1037 T: Sized + Clone + Copy,
1038 L: ?Sized {
1039 type Output = [T];
1040 fn index(&self, range: RangeTo<usize>) -> &[T] {
1041 &self.slice[..range.end]
1042 }
1043}
1044
1045impl<'a, T, L> IndexMut<RangeTo<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1046where
1047 T: Sized + Clone + Copy,
1048 L: ?Sized {
1049 fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
1050 &mut self.slice[..range.end]
1051 }
1052}
1053
1054impl<'a, T, L> Index<RangeFull> for LockedTypedMappedMemoryGuard<'a, T, L>
1055where
1056 T: Sized + Clone + Copy,
1057 L: ?Sized {
1058 type Output = [T];
1059 fn index(&self, _: RangeFull) -> &[T] {
1060 &self.slice[..]
1061 }
1062}
1063
1064impl<'a, T, L> IndexMut<RangeFull> for LockedTypedMappedMemoryGuard<'a, T, L>
1065where
1066 T: Sized + Clone + Copy,
1067 L: ?Sized {
1068 fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
1069 &mut self.slice[..]
1070 }
1071}
1072
1073impl<'a, T, L> Index<RangeInclusive<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1074where
1075 T: Sized + Clone + Copy,
1076 L: ?Sized {
1077 type Output = [T];
1078 fn index(&self, range: RangeInclusive<usize>) -> &[T] {
1079 &self.slice[*range.start()..=*range.end()]
1080 }
1081}
1082
1083impl<'a, T, L> IndexMut<RangeInclusive<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1084where
1085 T: Sized + Clone + Copy,
1086 L: ?Sized {
1087 fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
1088 &mut self.slice[*range.start()..=*range.end()]
1089 }
1090}
1091
1092impl<'a, T, L> Index<RangeToInclusive<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1093where
1094 T: Sized + Clone + Copy,
1095 L: ?Sized {
1096 type Output = [T];
1097 fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
1098 &self.slice[..=range.end]
1099 }
1100}
1101
1102impl<'a, T, L> IndexMut<RangeToInclusive<usize>> for LockedTypedMappedMemoryGuard<'a, T, L>
1103where
1104 T: Sized + Clone + Copy,
1105 L: ?Sized {
1106 fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
1107 &mut self.slice[..=range.end]
1108 }
1109}
1110
1111#[derive(Debug, Clone, Copy)]
1113pub struct BufferViewRange {
1114 pub format: VkFormat,
1116
1117 pub offset: VkDeviceSize,
1119
1120 pub range: VkDeviceSize,
1122}
1123
1124pub struct VulkanBufferView {
1126 pub device: Arc<VulkanDevice>,
1128
1129 buffer: Arc<VulkanBuffer>,
1131
1132 buffer_view: VkBufferView,
1134
1135 range: BufferViewRange,
1137}
1138
1139impl VulkanBufferView {
1140 pub fn new_partial(buffer: Arc<VulkanBuffer>, range: &BufferViewRange) -> Result<Self, VulkanError> {
1142 let buffer_view_ci = VkBufferViewCreateInfo {
1143 sType: VkStructureType::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
1144 pNext: null(),
1145 flags: 0,
1146 buffer: buffer.get_vk_buffer(),
1147 format: range.format,
1148 offset: range.offset,
1149 range: range.range,
1150 };
1151 let mut buffer_view = null();
1152 buffer.device.vkcore.vkCreateBufferView(buffer.device.get_vk_device(), &buffer_view_ci, null(), &mut buffer_view)?;
1153 Ok(Self {
1154 device: buffer.device.clone(),
1155 buffer,
1156 buffer_view,
1157 range: *range,
1158 })
1159 }
1160
1161 pub fn new(buffer: Arc<VulkanBuffer>, format: VkFormat) -> Result<Self, VulkanError> {
1163 let range = BufferViewRange {
1164 format,
1165 offset: 0,
1166 range: buffer.get_size(),
1167 };
1168 Self::new_partial(buffer, &range)
1169 }
1170
1171 pub(crate) fn get_vk_buffer_view(&self) -> VkBufferView {
1173 self.buffer_view
1174 }
1175
1176 pub fn get_range(&self) -> &BufferViewRange {
1178 &self.range
1179 }
1180}
1181
1182impl Debug for VulkanBufferView {
1183 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1184 f.debug_struct("VulkanBufferView")
1185 .field("buffer", &self.buffer)
1186 .field("buffer_view", &self.buffer_view)
1187 .field("range", &self.range)
1188 .finish()
1189 }
1190}
1191
1192impl Drop for VulkanBufferView {
1193 fn drop(&mut self) {
1194 proceed_run(self.device.vkcore.vkDestroyBufferView(self.device.get_vk_device(), self.buffer_view, null()))
1195 }
1196}
1197
1198pub struct VulkanBuffer {
1200 pub device: Arc<VulkanDevice>,
1202
1203 size: VkDeviceSize,
1205
1206 buffer: VkBuffer,
1208}
1209
1210impl VulkanBuffer {
1211 pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
1213 let vkcore = device.vkcore.clone();
1214 let vkdevice = device.get_vk_device();
1215 let buffer_ci = VkBufferCreateInfo {
1216 sType: VkStructureType::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
1217 pNext: null(),
1218 flags: 0,
1219 size,
1220 usage,
1221 sharingMode: VkSharingMode::VK_SHARING_MODE_EXCLUSIVE,
1222 queueFamilyIndexCount: 0,
1223 pQueueFamilyIndices: null(),
1224 };
1225 let mut buffer: VkBuffer = null();
1226 vkcore.vkCreateBuffer(vkdevice, &buffer_ci, null(), &mut buffer)?;
1227 Ok(Self {
1228 device,
1229 size,
1230 buffer,
1231 })
1232 }
1233
1234 pub fn get_memory_requirements(&self) -> Result<VkMemoryRequirements, VulkanError> {
1236 let vkcore = self.device.vkcore.clone();
1237 let mut ret: VkMemoryRequirements = unsafe {MaybeUninit::zeroed().assume_init()};
1238 vkcore.vkGetBufferMemoryRequirements(self.device.get_vk_device(), self.buffer, &mut ret)?;
1239 Ok(ret)
1240 }
1241
1242 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
1244 self.buffer
1245 }
1246
1247 pub fn get_size(&self) -> VkDeviceSize {
1249 self.size
1250 }
1251}
1252
1253impl Debug for VulkanBuffer {
1254 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1255 f.debug_struct("VulkanBuffer")
1256 .field("size", &self.size)
1257 .field("buffer", &self.buffer)
1258 .finish()
1259 }
1260}
1261
1262impl Drop for VulkanBuffer {
1263 fn drop(&mut self) {
1264 proceed_run(self.device.vkcore.vkDestroyBuffer(self.device.get_vk_device(), self.buffer, null()))
1265 }
1266}
1267
1268unsafe impl Send for VulkanBuffer {}
1269unsafe impl Sync for VulkanBuffer {}
1270
1271#[derive(Debug)]
1273pub struct BufferRegion {
1274 pub offset: VkDeviceSize,
1275 pub size: VkDeviceSize,
1276}
1277
1278pub struct StagingBuffer {
1280 pub device: Arc<VulkanDevice>,
1282
1283 pub buffer: VulkanBuffer,
1285
1286 pub memory: VulkanMemory,
1288
1289 pub(crate) address: *mut c_void,
1291}
1292
1293impl StagingBuffer {
1294 pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize) -> Result<Self, VulkanError> {
1296 let buffer = VulkanBuffer::new(device.clone(), size, VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_SRC_BIT as VkBufferUsageFlags)?;
1297 let memory = VulkanMemory::new(device.clone(), &buffer.get_memory_requirements()?,
1298 VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT as VkMemoryPropertyFlags |
1299 VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT as VkMemoryPropertyFlags)?;
1300 memory.bind_vk_buffer(buffer.get_vk_buffer())?;
1301 let mut address: *mut c_void = null_mut();
1302 let mut mapping_state_lock = memory.mapping_state.lock().unwrap();
1303 assert_eq!(mapping_state_lock.map_count, 0, "The newly created `VulkanMemory` must be unmapped.");
1304 mapping_state_lock.map_count += 1;
1305 device.vkcore.vkMapMemory(device.get_vk_device(), memory.get_vk_memory(), 0, size, 0, &mut address)?;
1306 mapping_state_lock.mapped_address = address;
1307 drop(mapping_state_lock);
1308 Ok(Self {
1309 device,
1310 buffer,
1311 memory,
1312 address,
1313 })
1314 }
1315
1316 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
1318 self.buffer.get_vk_buffer()
1319 }
1320
1321 pub(crate) fn get_vk_memory(&self) -> VkDeviceMemory {
1323 self.memory.get_vk_memory()
1324 }
1325
1326 pub fn get_size(&self) -> VkDeviceSize {
1328 self.buffer.get_size()
1329 }
1330
1331 pub fn get_address(&self) -> *mut c_void {
1333 self.address
1334 }
1335
1336 pub fn set_data(&self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
1338 self.memory.set_data(data, offset, size)?;
1339 Ok(())
1340 }
1341
1342 pub fn get_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
1344 self.memory.get_data(data, offset, size)?;
1345 Ok(())
1346 }
1347
1348 pub(crate) unsafe fn mapped<'a>(&'a self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
1354 unsafe {self.memory.mapped(offset, size)}
1355 }
1356
1357 pub fn map<'a>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
1359 self.memory.map(offset, size)
1360 }
1361
1362 pub fn map_as_slice<'a, T>(&'a mut self) -> Result<TypedMappedMemory<'a, T>, VulkanError>
1364 where
1365 T: Sized + Clone + Copy {
1366 self.memory.map_as_slice(0, self.get_size() as usize)
1367 }
1368
1369 pub fn map_locked<'a>(&'a self, offset: VkDeviceSize, size: usize) -> Result<LockedMappedMemoryGuard<'a>, VulkanError> {
1371 self.memory.map_locked(offset, size)
1372 }
1373
1374 pub fn map_as_slice_locked<'a, T>(&'a self) -> Result<LockedTypedMappedMemoryGuard<'a, T, ()>, VulkanError>
1376 where
1377 T: Sized + Clone + Copy {
1378 self.memory.map_as_slice_locked(0, self.get_size() as usize)
1379 }
1380}
1381
1382impl Debug for StagingBuffer {
1383 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1384 f.debug_struct("StagingBuffer")
1385 .field("buffer", &self.buffer)
1386 .field("memory", &self.memory)
1387 .field("address", &self.address)
1388 .finish()
1389 }
1390}
1391
1392impl Drop for StagingBuffer {
1393 fn drop(&mut self) {
1394 let mut mapping_state_lock = self.memory.mapping_state.lock().unwrap();
1395 mapping_state_lock.map_count -= 1;
1396 if mapping_state_lock.map_count == 0 {
1397 proceed_run(self.device.vkcore.vkUnmapMemory(self.device.get_vk_device(), self.get_vk_memory()))
1398 }
1399 }
1400}
1401
1402pub struct VulkanSampler {
1404 pub device: Arc<VulkanDevice>,
1406
1407 sampler: VkSampler,
1409}
1410
1411impl VulkanSampler {
1412 pub fn new(device: Arc<VulkanDevice>, sampler_ci: &VkSamplerCreateInfo) -> Result<Self, VulkanError> {
1414 let mut sampler = null();
1415 device.vkcore.vkCreateSampler(device.get_vk_device(), sampler_ci, null(), &mut sampler)?;
1416 Ok(Self {
1417 device,
1418 sampler,
1419 })
1420 }
1421
1422 pub fn new_linear(device: Arc<VulkanDevice>, with_mipmaps: bool, anisotropy: bool) -> Result<Self, VulkanError> {
1424 let max_anisotropy = device.get_gpu().properties.limits.maxSamplerAnisotropy;
1425 let sampler_ci = VkSamplerCreateInfo {
1426 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1427 pNext: null(),
1428 flags: 0,
1429 magFilter: VkFilter::VK_FILTER_LINEAR,
1430 minFilter: VkFilter::VK_FILTER_LINEAR,
1431 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1432 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1433 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1434 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1435 mipLodBias: 0.0,
1436 anisotropyEnable: if anisotropy {1} else {0},
1437 maxAnisotropy: max_anisotropy,
1438 compareEnable: 0,
1439 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1440 minLod: 0.0,
1441 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1442 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1443 unnormalizedCoordinates: 0,
1444 };
1445 Self::new(device, &sampler_ci)
1446 }
1447
1448 pub fn new_nearest(device: Arc<VulkanDevice>, with_mipmaps: bool) -> Result<Self, VulkanError> {
1450 let sampler_ci = VkSamplerCreateInfo {
1451 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1452 pNext: null(),
1453 flags: 0,
1454 magFilter: VkFilter::VK_FILTER_NEAREST,
1455 minFilter: VkFilter::VK_FILTER_NEAREST,
1456 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1457 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1458 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1459 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT,
1460 mipLodBias: 0.0,
1461 anisotropyEnable: 0,
1462 maxAnisotropy: 1.0,
1463 compareEnable: 0,
1464 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1465 minLod: 0.0,
1466 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1467 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1468 unnormalizedCoordinates: 0,
1469 };
1470 Self::new(device, &sampler_ci)
1471 }
1472
1473 pub fn new_linear_clamp(device: Arc<VulkanDevice>, with_mipmaps: bool, anisotropy: bool) -> Result<Self, VulkanError> {
1475 let max_anisotropy = device.get_gpu().properties.limits.maxSamplerAnisotropy;
1476 let sampler_ci = VkSamplerCreateInfo {
1477 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1478 pNext: null(),
1479 flags: 0,
1480 magFilter: VkFilter::VK_FILTER_LINEAR,
1481 minFilter: VkFilter::VK_FILTER_LINEAR,
1482 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1483 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1484 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1485 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1486 mipLodBias: 0.0,
1487 anisotropyEnable: if anisotropy {1} else {0},
1488 maxAnisotropy: max_anisotropy,
1489 compareEnable: 0,
1490 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1491 minLod: 0.0,
1492 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1493 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1494 unnormalizedCoordinates: 0,
1495 };
1496 Self::new(device, &sampler_ci)
1497 }
1498
1499 pub fn new_nearest_clamp(device: Arc<VulkanDevice>, with_mipmaps: bool) -> Result<Self, VulkanError> {
1501 let sampler_ci = VkSamplerCreateInfo {
1502 sType: VkStructureType::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1503 pNext: null(),
1504 flags: 0,
1505 magFilter: VkFilter::VK_FILTER_NEAREST,
1506 minFilter: VkFilter::VK_FILTER_NEAREST,
1507 mipmapMode: VkSamplerMipmapMode::VK_SAMPLER_MIPMAP_MODE_NEAREST,
1508 addressModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1509 addressModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1510 addressModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1511 mipLodBias: 0.0,
1512 anisotropyEnable: 0,
1513 maxAnisotropy: 1.0,
1514 compareEnable: 0,
1515 compareOp: VkCompareOp::VK_COMPARE_OP_NEVER,
1516 minLod: 0.0,
1517 maxLod: if with_mipmaps {VK_LOD_CLAMP_NONE} else {0.0},
1518 borderColor: VkBorderColor::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1519 unnormalizedCoordinates: 0,
1520 };
1521 Self::new(device, &sampler_ci)
1522 }
1523
1524 pub fn get_vk_sampler(&self) -> VkSampler {
1526 self.sampler
1527 }
1528}
1529
1530impl Debug for VulkanSampler {
1531 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
1532 f.debug_struct("VulkanSampler")
1533 .field("sampler", &self.sampler)
1534 .finish()
1535 }
1536}
1537
1538impl Drop for VulkanSampler {
1539 fn drop(&mut self) {
1540 proceed_run(self.device.vkcore.vkDestroySampler(self.device.get_vk_device(), self.sampler, null()))
1541 }
1542}