1use crate::Instance;
2use ash::vk::AllocationCallbacks;
3use ash::{khr, vk};
4use std::borrow::Cow;
5use std::cell::RefCell;
6use std::cmp::Ordering;
7use std::collections::BTreeSet;
8use std::ffi::{CStr, CString};
9use std::fmt::Debug;
10use std::hash::Hash;
11use std::hint::unreachable_unchecked;
12use std::ops::Deref;
13use std::sync::Arc;
14
15fn supports_features(
16 supported: &vk::PhysicalDeviceFeatures,
17 requested: &vk::PhysicalDeviceFeatures,
18 features_supported: &GenericFeatureChain,
19 features_requested: &GenericFeatureChain,
20) -> bool {
21 macro_rules! check_feature {
22 ($feature: ident) => {
23 if requested.$feature == vk::TRUE && supported.$feature == vk::FALSE {
24 return false;
25 }
26 };
27 }
28
29 check_feature!(robust_buffer_access);
30 check_feature!(full_draw_index_uint32);
31 check_feature!(image_cube_array);
32 check_feature!(independent_blend);
33 check_feature!(geometry_shader);
34 check_feature!(tessellation_shader);
35 check_feature!(sample_rate_shading);
36 check_feature!(dual_src_blend);
37 check_feature!(logic_op);
38 check_feature!(multi_draw_indirect);
39 check_feature!(draw_indirect_first_instance);
40 check_feature!(depth_clamp);
41 check_feature!(depth_bias_clamp);
42 check_feature!(fill_mode_non_solid);
43 check_feature!(depth_bounds);
44 check_feature!(wide_lines);
45 check_feature!(large_points);
46 check_feature!(alpha_to_one);
47 check_feature!(multi_viewport);
48 check_feature!(sampler_anisotropy);
49 check_feature!(texture_compression_etc2);
50 check_feature!(texture_compression_astc_ldr);
51 check_feature!(texture_compression_bc);
52 check_feature!(occlusion_query_precise);
53 check_feature!(pipeline_statistics_query);
54 check_feature!(vertex_pipeline_stores_and_atomics);
55 check_feature!(fragment_stores_and_atomics);
56 check_feature!(shader_tessellation_and_geometry_point_size);
57 check_feature!(shader_image_gather_extended);
58 check_feature!(shader_storage_image_extended_formats);
59 check_feature!(shader_storage_image_multisample);
60 check_feature!(shader_storage_image_read_without_format);
61 check_feature!(shader_storage_image_write_without_format);
62 check_feature!(shader_uniform_buffer_array_dynamic_indexing);
63 check_feature!(shader_sampled_image_array_dynamic_indexing);
64 check_feature!(shader_storage_buffer_array_dynamic_indexing);
65 check_feature!(shader_storage_image_array_dynamic_indexing);
66 check_feature!(shader_clip_distance);
67 check_feature!(shader_cull_distance);
68 check_feature!(shader_float64);
69 check_feature!(shader_int64);
70 check_feature!(shader_int16);
71 check_feature!(shader_resource_residency);
72 check_feature!(shader_resource_min_lod);
73 check_feature!(sparse_binding);
74 check_feature!(sparse_residency_buffer);
75 check_feature!(sparse_residency_image2_d);
76 check_feature!(sparse_residency_image3_d);
77 check_feature!(sparse_residency2_samples);
78 check_feature!(sparse_residency4_samples);
79 check_feature!(sparse_residency8_samples);
80 check_feature!(sparse_residency16_samples);
81 check_feature!(sparse_residency_aliased);
82 check_feature!(variable_multisample_rate);
83 check_feature!(inherited_queries);
84
85 features_supported.match_all(features_requested)
86}
87
88#[inline]
89fn get_first_queue_index(
90 families: &[vk::QueueFamilyProperties],
91 desired_flags: vk::QueueFlags,
92) -> Option<usize> {
93 families
94 .iter()
95 .position(|f| f.queue_flags.contains(desired_flags))
96}
97
98fn get_separate_queue_index(
102 families: &[vk::QueueFamilyProperties],
103 desired_flags: vk::QueueFlags,
104 undesired_flags: vk::QueueFlags,
105) -> Option<usize> {
106 let mut index = None;
107 for (i, family) in families.iter().enumerate() {
108 if family.queue_flags.contains(desired_flags)
109 && !family.queue_flags.contains(vk::QueueFlags::GRAPHICS)
110 {
111 if !family.queue_flags.contains(undesired_flags) {
112 return Some(i);
113 } else {
114 index = Some(i);
115 }
116 }
117 }
118
119 index
120}
121
122fn get_dedicated_queue_index(
124 families: &[vk::QueueFamilyProperties],
125 desired_flags: vk::QueueFlags,
126 undesired_flags: vk::QueueFlags,
127) -> Option<usize> {
128 families.iter().position(|f| {
129 f.queue_flags.contains(desired_flags)
130 && !f.queue_flags.contains(vk::QueueFlags::GRAPHICS)
131 && !f.queue_flags.contains(undesired_flags)
132 })
133}
134
135fn get_present_queue_index(
136 instance: &Option<khr::surface::Instance>,
137 device: vk::PhysicalDevice,
138 surface: Option<vk::SurfaceKHR>,
139 families: &[vk::QueueFamilyProperties],
140) -> Option<usize> {
141 for (i, _) in families.iter().enumerate() {
142 if let Some((surface, instance)) = surface.zip(instance.as_ref()) {
143 let present_support =
144 unsafe { instance.get_physical_device_surface_support(device, i as u32, surface) };
145
146 if let Ok(present_support) = present_support {
147 if present_support {
148 return Some(i);
149 }
150 }
151 }
152 }
153
154 None
155}
156
157fn check_device_extension_support(
158 available_extensions: &BTreeSet<Cow<'_, str>>,
159 required_extensions: &BTreeSet<String>,
160) -> BTreeSet<String> {
161 let mut extensions_to_enable = BTreeSet::new();
162
163 for avail_ext in available_extensions {
164 for req_ext in required_extensions {
165 if avail_ext == req_ext {
166 extensions_to_enable.insert(req_ext.to_string());
167 break;
168 }
169 }
170 }
171
172 extensions_to_enable
173}
174
175#[repr(u8)]
176#[derive(Default, Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]
177pub enum PreferredDeviceType {
178 Other = 0,
179 Integrated = 1,
180 #[default]
181 Discrete = 2,
182 VirtualGpu = 3,
183 Cpu = 4,
184}
185
186#[derive(Default, Debug, Eq, PartialEq, Ord, PartialOrd)]
187pub enum Suitable {
188 #[default]
189 Yes,
190 Partial,
191 No,
192}
193
194#[derive(Default, Debug)]
195pub struct PhysicalDevice {
196 name: String,
197 physical_device: vk::PhysicalDevice,
198 surface: Option<vk::SurfaceKHR>,
199
200 features: vk::PhysicalDeviceFeatures,
201 properties: vk::PhysicalDeviceProperties,
202 memory_properties: vk::PhysicalDeviceMemoryProperties,
203 extensions_to_enable: BTreeSet<Cow<'static, str>>,
204 available_extensions: BTreeSet<Cow<'static, str>>,
205 queue_families: Vec<vk::QueueFamilyProperties>,
206 defer_surface_initialization: bool,
207 properties2_ext_enabled: bool,
208 suitable: Suitable,
210 supported_features_chain: GenericFeatureChain<'static>,
211 requested_features_chain: GenericFeatureChain<'static>,
212}
213
214impl AsRef<vk::PhysicalDevice> for PhysicalDevice {
215 fn as_ref(&self) -> &vk::PhysicalDevice {
216 &self.physical_device
217 }
218}
219
220impl Eq for PhysicalDevice {}
221
222impl PartialEq<Self> for PhysicalDevice {
223 fn eq(&self, other: &Self) -> bool {
224 self.name.eq(&other.name)
225 && self.physical_device.eq(&other.physical_device)
226 && self.suitable.eq(&other.suitable)
227 }
228}
229
230impl PartialOrd for PhysicalDevice {
231 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
232 Some(self.cmp(other))
233 }
234}
235
236impl Ord for PhysicalDevice {
237 fn cmp(&self, other: &Self) -> Ordering {
238 self.suitable.cmp(&other.suitable)
239 }
240}
241
242impl PhysicalDevice {
243 pub fn msaa_samples(&self) -> vk::SampleCountFlags {
244 let limits = &self.properties.limits;
245 let counts =
246 limits.framebuffer_color_sample_counts & limits.framebuffer_depth_sample_counts;
247
248 if counts.contains(vk::SampleCountFlags::TYPE_64) {
249 return vk::SampleCountFlags::TYPE_64;
250 }
251
252 if counts.contains(vk::SampleCountFlags::TYPE_32) {
253 return vk::SampleCountFlags::TYPE_32;
254 }
255
256 if counts.contains(vk::SampleCountFlags::TYPE_16) {
257 return vk::SampleCountFlags::TYPE_16;
258 }
259
260 if counts.contains(vk::SampleCountFlags::TYPE_8) {
261 return vk::SampleCountFlags::TYPE_8;
262 }
263
264 if counts.contains(vk::SampleCountFlags::TYPE_4) {
265 return vk::SampleCountFlags::TYPE_4;
266 }
267
268 if counts.contains(vk::SampleCountFlags::TYPE_2) {
269 return vk::SampleCountFlags::TYPE_2;
270 }
271
272 vk::SampleCountFlags::TYPE_1
273 }
274 pub fn enable_extension_if_present(&mut self, extension: impl Into<Cow<'static, str>>) -> bool {
275 let extension = extension.into();
276
277 if self.available_extensions.contains(&extension) {
278 self.extensions_to_enable.insert(extension)
279 } else {
280 false
281 }
282 }
283
284 pub fn enable_extensions_if_present<
285 T: Eq + Hash + Into<Cow<'static, str>>,
286 I: IntoIterator<Item = T>,
287 >(
288 &mut self,
289 extensions: I,
290 ) -> bool {
291 let extensions = extensions.into_iter().map(Into::into);
292 let extensions = BTreeSet::from_iter(extensions);
293 let intersection: BTreeSet<_> = self
294 .available_extensions
295 .intersection(&extensions)
296 .cloned()
297 .collect();
298
299 if intersection.len() == extensions.len() {
300 self.extensions_to_enable.extend(intersection);
301 true
302 } else {
303 false
304 }
305 }
306}
307
308#[derive(Debug, Clone)]
311pub enum VulkanPhysicalDeviceFeature2<'a> {
312 PhysicalDeviceVulkan11(vk::PhysicalDeviceVulkan11Features<'a>),
313 PhysicalDeviceVulkan12(vk::PhysicalDeviceVulkan12Features<'a>),
314 PhysicalDeviceVulkan13(vk::PhysicalDeviceVulkan13Features<'a>),
315}
316
317fn match_features(
318 requested: &VulkanPhysicalDeviceFeature2<'_>,
319 supported: &VulkanPhysicalDeviceFeature2<'_>,
320) -> bool {
321 assert_eq!(requested.s_type(), supported.s_type());
322
323 match (requested, supported) {
324 (
325 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan11(r),
326 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan11(s),
327 ) => {
328 if r.storage_buffer16_bit_access == vk::TRUE
329 && s.storage_buffer16_bit_access == vk::FALSE
330 {
331 return false;
332 }
333 if r.uniform_and_storage_buffer16_bit_access == vk::TRUE
334 && s.uniform_and_storage_buffer16_bit_access == vk::FALSE
335 {
336 return false;
337 }
338 if r.storage_push_constant16 == vk::TRUE && s.storage_push_constant16 == vk::FALSE {
339 return false;
340 }
341 if r.storage_input_output16 == vk::TRUE && s.storage_input_output16 == vk::FALSE {
342 return false;
343 }
344 if r.multiview == vk::TRUE && s.multiview == vk::FALSE {
345 return false;
346 }
347 if r.multiview_geometry_shader == vk::TRUE && s.multiview_geometry_shader == vk::FALSE {
348 return false;
349 }
350 if r.multiview_tessellation_shader == vk::TRUE
351 && s.multiview_tessellation_shader == vk::FALSE
352 {
353 return false;
354 }
355 if r.variable_pointers_storage_buffer == vk::TRUE
356 && s.variable_pointers_storage_buffer == vk::FALSE
357 {
358 return false;
359 }
360 if r.variable_pointers == vk::TRUE && s.variable_pointers == vk::FALSE {
361 return false;
362 }
363 if r.protected_memory == vk::TRUE && s.protected_memory == vk::FALSE {
364 return false;
365 }
366 if r.sampler_ycbcr_conversion == vk::TRUE && s.sampler_ycbcr_conversion == vk::FALSE {
367 return false;
368 }
369 if r.shader_draw_parameters == vk::TRUE && s.shader_draw_parameters == vk::FALSE {
370 return false;
371 }
372 true
373 }
374 (
375 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan12(r),
376 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan12(s),
377 ) => {
378 if r.sampler_mirror_clamp_to_edge == vk::TRUE
379 && s.sampler_mirror_clamp_to_edge == vk::FALSE
380 {
381 return false;
382 }
383 if r.draw_indirect_count == vk::TRUE && s.draw_indirect_count == vk::FALSE {
384 return false;
385 }
386 if r.storage_buffer8_bit_access == vk::TRUE && s.storage_buffer8_bit_access == vk::FALSE
387 {
388 return false;
389 }
390 if r.uniform_and_storage_buffer8_bit_access == vk::TRUE
391 && s.uniform_and_storage_buffer8_bit_access == vk::FALSE
392 {
393 return false;
394 }
395 if r.storage_push_constant8 == vk::TRUE && s.storage_push_constant8 == vk::FALSE {
396 return false;
397 }
398 if r.shader_buffer_int64_atomics == vk::TRUE
399 && s.shader_buffer_int64_atomics == vk::FALSE
400 {
401 return false;
402 }
403 if r.shader_shared_int64_atomics == vk::TRUE
404 && s.shader_shared_int64_atomics == vk::FALSE
405 {
406 return false;
407 }
408 if r.shader_float16 == vk::TRUE && s.shader_float16 == vk::FALSE {
409 return false;
410 }
411 if r.shader_int8 == vk::TRUE && s.shader_int8 == vk::FALSE {
412 return false;
413 }
414 if r.descriptor_indexing == vk::TRUE && s.descriptor_indexing == vk::FALSE {
415 return false;
416 }
417 if r.shader_input_attachment_array_dynamic_indexing == vk::TRUE
418 && s.shader_input_attachment_array_dynamic_indexing == vk::FALSE
419 {
420 return false;
421 }
422 if r.shader_uniform_texel_buffer_array_dynamic_indexing == vk::TRUE
423 && s.shader_uniform_texel_buffer_array_dynamic_indexing == vk::FALSE
424 {
425 return false;
426 }
427 if r.shader_storage_texel_buffer_array_dynamic_indexing == vk::TRUE
428 && s.shader_storage_texel_buffer_array_dynamic_indexing == vk::FALSE
429 {
430 return false;
431 }
432 if r.shader_uniform_buffer_array_non_uniform_indexing == vk::TRUE
433 && s.shader_uniform_buffer_array_non_uniform_indexing == vk::FALSE
434 {
435 return false;
436 }
437 if r.shader_sampled_image_array_non_uniform_indexing == vk::TRUE
438 && s.shader_sampled_image_array_non_uniform_indexing == vk::FALSE
439 {
440 return false;
441 }
442 if r.shader_storage_buffer_array_non_uniform_indexing == vk::TRUE
443 && s.shader_storage_buffer_array_non_uniform_indexing == vk::FALSE
444 {
445 return false;
446 }
447 if r.shader_storage_image_array_non_uniform_indexing == vk::TRUE
448 && s.shader_storage_image_array_non_uniform_indexing == vk::FALSE
449 {
450 return false;
451 }
452 if r.shader_input_attachment_array_non_uniform_indexing == vk::TRUE
453 && s.shader_input_attachment_array_non_uniform_indexing == vk::FALSE
454 {
455 return false;
456 }
457 if r.shader_uniform_texel_buffer_array_non_uniform_indexing == vk::TRUE
458 && s.shader_uniform_texel_buffer_array_non_uniform_indexing == vk::FALSE
459 {
460 return false;
461 }
462 if r.shader_storage_texel_buffer_array_non_uniform_indexing == vk::TRUE
463 && s.shader_storage_texel_buffer_array_non_uniform_indexing == vk::FALSE
464 {
465 return false;
466 }
467 if r.descriptor_binding_uniform_buffer_update_after_bind == vk::TRUE
468 && s.descriptor_binding_uniform_buffer_update_after_bind == vk::FALSE
469 {
470 return false;
471 }
472 if r.descriptor_binding_sampled_image_update_after_bind == vk::TRUE
473 && s.descriptor_binding_sampled_image_update_after_bind == vk::FALSE
474 {
475 return false;
476 }
477 if r.descriptor_binding_storage_image_update_after_bind == vk::TRUE
478 && s.descriptor_binding_storage_image_update_after_bind == vk::FALSE
479 {
480 return false;
481 }
482 if r.descriptor_binding_storage_buffer_update_after_bind == vk::TRUE
483 && s.descriptor_binding_storage_buffer_update_after_bind == vk::FALSE
484 {
485 return false;
486 }
487 if r.descriptor_binding_uniform_texel_buffer_update_after_bind == vk::TRUE
488 && s.descriptor_binding_uniform_texel_buffer_update_after_bind == vk::FALSE
489 {
490 return false;
491 }
492 if r.descriptor_binding_storage_texel_buffer_update_after_bind == vk::TRUE
493 && s.descriptor_binding_storage_texel_buffer_update_after_bind == vk::FALSE
494 {
495 return false;
496 }
497 if r.descriptor_binding_update_unused_while_pending == vk::TRUE
498 && s.descriptor_binding_update_unused_while_pending == vk::FALSE
499 {
500 return false;
501 }
502 if r.descriptor_binding_partially_bound == vk::TRUE
503 && s.descriptor_binding_partially_bound == vk::FALSE
504 {
505 return false;
506 }
507 if r.descriptor_binding_variable_descriptor_count == vk::TRUE
508 && s.descriptor_binding_variable_descriptor_count == vk::FALSE
509 {
510 return false;
511 }
512 if r.runtime_descriptor_array == vk::TRUE && s.runtime_descriptor_array == vk::FALSE {
513 return false;
514 }
515 if r.sampler_filter_minmax == vk::TRUE && s.sampler_filter_minmax == vk::FALSE {
516 return false;
517 }
518 if r.scalar_block_layout == vk::TRUE && s.scalar_block_layout == vk::FALSE {
519 return false;
520 }
521 if r.imageless_framebuffer == vk::TRUE && s.imageless_framebuffer == vk::FALSE {
522 return false;
523 }
524 if r.uniform_buffer_standard_layout == vk::TRUE
525 && s.uniform_buffer_standard_layout == vk::FALSE
526 {
527 return false;
528 }
529 if r.shader_subgroup_extended_types == vk::TRUE
530 && s.shader_subgroup_extended_types == vk::FALSE
531 {
532 return false;
533 }
534 if r.separate_depth_stencil_layouts == vk::TRUE
535 && s.separate_depth_stencil_layouts == vk::FALSE
536 {
537 return false;
538 }
539 if r.host_query_reset == vk::TRUE && s.host_query_reset == vk::FALSE {
540 return false;
541 }
542 if r.timeline_semaphore == vk::TRUE && s.timeline_semaphore == vk::FALSE {
543 return false;
544 }
545 if r.buffer_device_address == vk::TRUE && s.buffer_device_address == vk::FALSE {
546 return false;
547 }
548 if r.buffer_device_address_capture_replay == vk::TRUE
549 && s.buffer_device_address_capture_replay == vk::FALSE
550 {
551 return false;
552 }
553 if r.buffer_device_address_multi_device == vk::TRUE
554 && s.buffer_device_address_multi_device == vk::FALSE
555 {
556 return false;
557 }
558 if r.vulkan_memory_model == vk::TRUE && s.vulkan_memory_model == vk::FALSE {
559 return false;
560 }
561 if r.vulkan_memory_model_device_scope == vk::TRUE
562 && s.vulkan_memory_model_device_scope == vk::FALSE
563 {
564 return false;
565 }
566 if r.vulkan_memory_model_availability_visibility_chains == vk::TRUE
567 && s.vulkan_memory_model_availability_visibility_chains == vk::FALSE
568 {
569 return false;
570 }
571 if r.shader_output_viewport_index == vk::TRUE
572 && s.shader_output_viewport_index == vk::FALSE
573 {
574 return false;
575 }
576 if r.shader_output_layer == vk::TRUE && s.shader_output_layer == vk::FALSE {
577 return false;
578 }
579 if r.subgroup_broadcast_dynamic_id == vk::TRUE
580 && s.subgroup_broadcast_dynamic_id == vk::FALSE
581 {
582 return false;
583 }
584 true
585 }
586 (
587 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan13(r),
588 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan13(s),
589 ) => {
590 if r.robust_image_access == vk::TRUE && s.robust_image_access == vk::FALSE {
591 return false;
592 }
593 if r.inline_uniform_block == vk::TRUE && s.inline_uniform_block == vk::FALSE {
594 return false;
595 }
596 if r.descriptor_binding_inline_uniform_block_update_after_bind == vk::TRUE
597 && s.descriptor_binding_inline_uniform_block_update_after_bind == vk::FALSE
598 {
599 return false;
600 }
601 if r.pipeline_creation_cache_control == vk::TRUE
602 && s.pipeline_creation_cache_control == vk::FALSE
603 {
604 return false;
605 }
606 if r.private_data == vk::TRUE && s.private_data == vk::FALSE {
607 return false;
608 }
609 if r.shader_demote_to_helper_invocation == vk::TRUE
610 && s.shader_demote_to_helper_invocation == vk::FALSE
611 {
612 return false;
613 }
614 if r.shader_terminate_invocation == vk::TRUE
615 && s.shader_terminate_invocation == vk::FALSE
616 {
617 return false;
618 }
619 if r.subgroup_size_control == vk::TRUE && s.subgroup_size_control == vk::FALSE {
620 return false;
621 }
622 if r.compute_full_subgroups == vk::TRUE && s.compute_full_subgroups == vk::FALSE {
623 return false;
624 }
625 if r.synchronization2 == vk::TRUE && s.synchronization2 == vk::FALSE {
626 return false;
627 }
628 if r.texture_compression_astc_hdr == vk::TRUE
629 && s.texture_compression_astc_hdr == vk::FALSE
630 {
631 return false;
632 }
633 if r.shader_zero_initialize_workgroup_memory == vk::TRUE
634 && s.shader_zero_initialize_workgroup_memory == vk::FALSE
635 {
636 return false;
637 }
638 if r.dynamic_rendering == vk::TRUE && s.dynamic_rendering == vk::FALSE {
639 return false;
640 }
641 if r.shader_integer_dot_product == vk::TRUE && s.shader_integer_dot_product == vk::FALSE
642 {
643 return false;
644 }
645 if r.maintenance4 == vk::TRUE && s.maintenance4 == vk::FALSE {
646 return false;
647 }
648 true
649 }
650 _ => unsafe { unreachable_unchecked() },
651 }
652}
653impl<'a> VulkanPhysicalDeviceFeature2<'a> {
654 fn as_mut(&mut self) -> &mut dyn vk::ExtendsPhysicalDeviceFeatures2 {
655 match self {
656 Self::PhysicalDeviceVulkan11(f) => f,
657 Self::PhysicalDeviceVulkan12(f) => f,
658 Self::PhysicalDeviceVulkan13(f) => f,
659 }
660 }
661
662 fn combine(&mut self, other: &VulkanPhysicalDeviceFeature2<'a>) {
663 assert_eq!(self.s_type(), other.s_type());
664
665 match (self, other) {
666 (
667 Self::PhysicalDeviceVulkan11(f),
668 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan11(other),
669 ) => {
670 f.storage_buffer16_bit_access |= other.storage_buffer16_bit_access;
671 f.uniform_and_storage_buffer16_bit_access |=
672 other.uniform_and_storage_buffer16_bit_access;
673 f.storage_push_constant16 |= other.storage_push_constant16;
674 f.storage_input_output16 |= other.storage_input_output16;
675 f.multiview |= other.multiview;
676 f.multiview_geometry_shader |= other.multiview_geometry_shader;
677 f.multiview_tessellation_shader |= other.multiview_tessellation_shader;
678 f.variable_pointers_storage_buffer |= other.variable_pointers_storage_buffer;
679 f.variable_pointers |= other.variable_pointers;
680 f.protected_memory |= other.protected_memory;
681 f.sampler_ycbcr_conversion |= other.sampler_ycbcr_conversion;
682 f.shader_draw_parameters |= other.shader_draw_parameters;
683 }
684 (
685 Self::PhysicalDeviceVulkan12(f),
686 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan12(other),
687 ) => {
688 f.sampler_mirror_clamp_to_edge |= other.sampler_mirror_clamp_to_edge;
689 f.draw_indirect_count |= other.draw_indirect_count;
690 f.storage_buffer8_bit_access |= other.storage_buffer8_bit_access;
691 f.uniform_and_storage_buffer8_bit_access |=
692 other.uniform_and_storage_buffer8_bit_access;
693 f.storage_push_constant8 |= other.storage_push_constant8;
694 f.shader_buffer_int64_atomics |= other.shader_buffer_int64_atomics;
695 f.shader_shared_int64_atomics |= other.shader_shared_int64_atomics;
696 f.shader_float16 |= other.shader_float16;
697 f.shader_int8 |= other.shader_int8;
698 f.descriptor_indexing |= other.descriptor_indexing;
699 f.shader_input_attachment_array_dynamic_indexing |=
700 other.shader_input_attachment_array_dynamic_indexing;
701 f.shader_uniform_texel_buffer_array_dynamic_indexing |=
702 other.shader_uniform_texel_buffer_array_dynamic_indexing;
703 f.shader_storage_texel_buffer_array_dynamic_indexing |=
704 other.shader_storage_texel_buffer_array_dynamic_indexing;
705 f.shader_uniform_buffer_array_non_uniform_indexing |=
706 other.shader_uniform_buffer_array_non_uniform_indexing;
707 f.shader_sampled_image_array_non_uniform_indexing |=
708 other.shader_sampled_image_array_non_uniform_indexing;
709 f.shader_storage_buffer_array_non_uniform_indexing |=
710 other.shader_storage_buffer_array_non_uniform_indexing;
711 f.shader_storage_image_array_non_uniform_indexing |=
712 other.shader_storage_image_array_non_uniform_indexing;
713 f.shader_input_attachment_array_non_uniform_indexing |=
714 other.shader_input_attachment_array_non_uniform_indexing;
715 f.shader_uniform_texel_buffer_array_non_uniform_indexing |=
716 other.shader_uniform_texel_buffer_array_non_uniform_indexing;
717 f.shader_storage_texel_buffer_array_non_uniform_indexing |=
718 other.shader_storage_texel_buffer_array_non_uniform_indexing;
719 f.descriptor_binding_uniform_buffer_update_after_bind |=
720 other.descriptor_binding_uniform_buffer_update_after_bind;
721 f.descriptor_binding_sampled_image_update_after_bind |=
722 other.descriptor_binding_sampled_image_update_after_bind;
723 f.descriptor_binding_storage_image_update_after_bind |=
724 other.descriptor_binding_storage_image_update_after_bind;
725 f.descriptor_binding_storage_buffer_update_after_bind |=
726 other.descriptor_binding_storage_buffer_update_after_bind;
727 f.descriptor_binding_uniform_texel_buffer_update_after_bind |=
728 other.descriptor_binding_uniform_texel_buffer_update_after_bind;
729 f.descriptor_binding_storage_texel_buffer_update_after_bind |=
730 other.descriptor_binding_storage_texel_buffer_update_after_bind;
731 f.descriptor_binding_update_unused_while_pending |=
732 other.descriptor_binding_update_unused_while_pending;
733 f.descriptor_binding_partially_bound |= other.descriptor_binding_partially_bound;
734 f.descriptor_binding_variable_descriptor_count |=
735 other.descriptor_binding_variable_descriptor_count;
736 f.runtime_descriptor_array |= other.runtime_descriptor_array;
737 f.sampler_filter_minmax |= other.sampler_filter_minmax;
738 f.scalar_block_layout |= other.scalar_block_layout;
739 f.imageless_framebuffer |= other.imageless_framebuffer;
740 f.uniform_buffer_standard_layout |= other.uniform_buffer_standard_layout;
741 f.shader_subgroup_extended_types |= other.shader_subgroup_extended_types;
742 f.separate_depth_stencil_layouts |= other.separate_depth_stencil_layouts;
743 f.host_query_reset |= other.host_query_reset;
744 f.timeline_semaphore |= other.timeline_semaphore;
745 f.buffer_device_address |= other.buffer_device_address;
746 f.buffer_device_address_capture_replay |=
747 other.buffer_device_address_capture_replay;
748 f.buffer_device_address_multi_device |= other.buffer_device_address_multi_device;
749 f.vulkan_memory_model |= other.vulkan_memory_model;
750 f.vulkan_memory_model_device_scope |= other.vulkan_memory_model_device_scope;
751 f.vulkan_memory_model_availability_visibility_chains |=
752 other.vulkan_memory_model_availability_visibility_chains;
753 f.shader_output_viewport_index |= other.shader_output_viewport_index;
754 f.shader_output_layer |= other.shader_output_layer;
755 f.subgroup_broadcast_dynamic_id |= other.subgroup_broadcast_dynamic_id;
756 }
757 (
758 Self::PhysicalDeviceVulkan13(f),
759 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan13(other),
760 ) => {
761 f.robust_image_access |= other.robust_image_access;
762 f.inline_uniform_block |= other.inline_uniform_block;
763 f.descriptor_binding_inline_uniform_block_update_after_bind |=
764 other.descriptor_binding_inline_uniform_block_update_after_bind;
765 f.pipeline_creation_cache_control |= other.pipeline_creation_cache_control;
766 f.private_data |= other.private_data;
767 f.shader_demote_to_helper_invocation |= other.shader_demote_to_helper_invocation;
768 f.shader_terminate_invocation |= other.shader_terminate_invocation;
769 f.subgroup_size_control |= other.subgroup_size_control;
770 f.compute_full_subgroups |= other.compute_full_subgroups;
771 f.synchronization2 |= other.synchronization2;
772 f.texture_compression_astc_hdr |= other.texture_compression_astc_hdr;
773 f.shader_zero_initialize_workgroup_memory |=
774 other.shader_zero_initialize_workgroup_memory;
775 f.dynamic_rendering |= other.dynamic_rendering;
776 f.shader_integer_dot_product |= other.shader_integer_dot_product;
777 f.maintenance4 |= other.maintenance4;
778 }
779 _ => unsafe { unreachable_unchecked() },
780 }
781 }
782
783 fn s_type(&self) -> vk::StructureType {
784 match self {
785 Self::PhysicalDeviceVulkan11(f) => f.s_type,
786 Self::PhysicalDeviceVulkan12(f) => f.s_type,
787 Self::PhysicalDeviceVulkan13(f) => f.s_type,
788 }
789 }
790}
791
792impl<'a> From<vk::PhysicalDeviceVulkan11Features<'a>> for VulkanPhysicalDeviceFeature2<'a> {
793 fn from(value: vk::PhysicalDeviceVulkan11Features<'a>) -> Self {
794 Self::PhysicalDeviceVulkan11(value)
795 }
796}
797
798impl<'a> From<vk::PhysicalDeviceVulkan12Features<'a>> for VulkanPhysicalDeviceFeature2<'a> {
799 fn from(value: vk::PhysicalDeviceVulkan12Features<'a>) -> Self {
800 Self::PhysicalDeviceVulkan12(value)
801 }
802}
803
804impl<'a> From<vk::PhysicalDeviceVulkan13Features<'a>> for VulkanPhysicalDeviceFeature2<'a> {
805 fn from(value: vk::PhysicalDeviceVulkan13Features<'a>) -> Self {
806 Self::PhysicalDeviceVulkan13(value)
807 }
808}
809#[derive(Debug, Clone, Default)]
812struct GenericFeatureChain<'a> {
813 nodes: Vec<VulkanPhysicalDeviceFeature2<'a>>,
814}
815
816impl<'a> Deref for GenericFeatureChain<'a> {
817 type Target = Vec<VulkanPhysicalDeviceFeature2<'a>>;
818
819 fn deref(&self) -> &Self::Target {
820 &self.nodes
821 }
822}
823
824impl<'a> GenericFeatureChain<'a> {
825 fn new() -> Self {
826 Self { nodes: vec![] }
827 }
828
829 fn add(&mut self, feature: impl Into<VulkanPhysicalDeviceFeature2<'a>> + 'a) {
830 let new_node = feature.into();
831
832 for node in &mut self.nodes {
833 if new_node.s_type() == node.s_type() {
834 node.combine(&new_node);
835 return;
836 }
837 }
838
839 self.nodes.push(new_node);
840 }
841
842 fn match_all(&self, features_requested: &GenericFeatureChain) -> bool {
843 if features_requested.nodes.len() != self.nodes.len() {
844 return false;
845 }
846
847 let features_requested = features_requested.nodes.as_slice();
848 let features = self.nodes.as_slice();
849
850 for (requested_node, node) in features_requested.iter().zip(features) {
851 if !match_features(requested_node, node) {
852 return false;
853 }
854 }
855
856 true
857 }
858}
859
860#[derive(Debug)]
861struct SelectionCriteria<'a> {
862 name: String,
863 preferred_device_type: PreferredDeviceType,
864 allow_any_type: bool,
865 require_present: bool,
866 require_dedicated_transfer_queue: bool,
867 require_dedicated_compute_queue: bool,
868 require_separate_transfer_queue: bool,
869 require_separate_compute_queue: bool,
870 required_mem_size: vk::DeviceSize,
871 required_extensions: BTreeSet<String>,
872 required_version: u32,
873 required_features: vk::PhysicalDeviceFeatures,
874 required_formats: Vec<vk::Format>,
875 requested_features_chain: RefCell<GenericFeatureChain<'a>>,
876 defer_surface_initialization: bool,
877 use_first_gpu_unconditionally: bool,
878 enable_portability_subset: bool,
879}
880
881impl Default for SelectionCriteria<'_> {
882 fn default() -> Self {
883 Self {
884 name: String::new(),
885 preferred_device_type: PreferredDeviceType::Discrete,
886 allow_any_type: true,
887 require_present: true,
888 require_dedicated_transfer_queue: false,
889 require_dedicated_compute_queue: false,
890 require_separate_transfer_queue: false,
891 require_separate_compute_queue: false,
892 required_mem_size: 0,
893 required_extensions: BTreeSet::new(),
894 required_version: vk::API_VERSION_1_0,
895 required_features: vk::PhysicalDeviceFeatures::default(),
896 defer_surface_initialization: false,
897 use_first_gpu_unconditionally: false,
898 enable_portability_subset: true,
899 requested_features_chain: RefCell::new(GenericFeatureChain::new()),
900 required_formats: vec![],
901 }
902 }
903}
904
905pub struct PhysicalDeviceSelector {
906 instance: Arc<Instance>,
907 surface: Option<vk::SurfaceKHR>,
908 selection_criteria: SelectionCriteria<'static>,
909}
910
911impl PhysicalDeviceSelector {
912 pub fn new(instance: Arc<Instance>) -> PhysicalDeviceSelector {
913 let enable_portability_subset = cfg!(feature = "portability");
914 let require_present = instance.surface_instance.is_some();
915 let required_version = instance.api_version;
916 Self {
917 surface: instance.surface,
918 instance,
919 selection_criteria: SelectionCriteria {
920 require_present,
921 required_version,
922 enable_portability_subset,
923 ..Default::default()
924 },
925 }
926 }
927
928 pub fn surface(mut self, surface: vk::SurfaceKHR) -> Self {
929 self.surface.replace(surface);
930 self
931 }
932
933 pub fn add_required_extension_feature<
934 T: Into<VulkanPhysicalDeviceFeature2<'static>> + 'static,
935 >(
936 self,
937 feature: T,
938 ) -> Self {
939 self.selection_criteria
940 .requested_features_chain
941 .borrow_mut()
942 .add(feature);
943 self
944 }
945
946 pub fn add_required_features(mut self, features: vk::PhysicalDeviceFeatures) -> Self {
947 self.selection_criteria.required_features = features;
948 self
949 }
950
951 pub fn name(mut self, name: impl Into<String>) -> Self {
952 self.selection_criteria.name = name.into();
953 self
954 }
955
956 pub fn preferred_device_type(mut self, device_type: PreferredDeviceType) -> Self {
957 self.selection_criteria.preferred_device_type = device_type;
958 self
959 }
960
961 pub fn allow_any_gpu_device_type(mut self, allow: bool) -> Self {
962 self.selection_criteria.allow_any_type = allow;
963 self
964 }
965
966 pub fn require_dedicated_transfer_queue(mut self, require: bool) -> Self {
967 self.selection_criteria.require_dedicated_transfer_queue = require;
968 self
969 }
970
971 pub fn require_dedicated_compute_queue(mut self, require: bool) -> Self {
972 self.selection_criteria.require_dedicated_compute_queue = require;
973 self
974 }
975
976 pub fn require_separate_transfer_queue(mut self, require: bool) -> Self {
977 self.selection_criteria.require_separate_transfer_queue = require;
978 self
979 }
980
981 pub fn require_separate_compute_queue(mut self, require: bool) -> Self {
982 self.selection_criteria.require_separate_compute_queue = require;
983 self
984 }
985
986 pub fn required_device_memory_size(mut self, required: vk::DeviceSize) -> Self {
987 self.selection_criteria.required_mem_size = required;
988 self
989 }
990
991 pub fn required_formats(mut self, required: impl IntoIterator<Item = vk::Format>) -> Self {
992 self.selection_criteria.required_formats = required.into_iter().collect();
993 self
994 }
995
996 pub fn select_first_device_unconditionally(mut self, select: bool) -> Self {
997 self.selection_criteria.use_first_gpu_unconditionally = select;
998 self
999 }
1000
1001 fn set_is_suitable(&self, device: &mut PhysicalDevice) {
1002 let criteria = &self.selection_criteria;
1003
1004 let device_name = device
1005 .properties
1006 .device_name_as_c_str()
1007 .expect("device name should be correct cstr")
1008 .to_string_lossy();
1009
1010 if !criteria.name.is_empty() && Cow::Borrowed(&criteria.name) != device_name {
1011 #[cfg(feature = "enable_tracing")]
1012 {
1013 tracing::warn!(
1014 "Device {} is not suitable. Name requested: {}",
1015 device_name,
1016 criteria.name
1017 );
1018 }
1019 device.suitable = Suitable::No;
1020 return;
1021 };
1022
1023 if criteria.required_version > device.properties.api_version {
1024 #[cfg(feature = "enable_tracing")]
1025 {
1026 use crate::version::Version;
1027 let requested_version = Version::new(criteria.required_version);
1028 let available_version = Version::new(device.properties.api_version);
1029 tracing::warn!(
1030 "Device {} is not suitable. Requested version: {}, Available version: {}",
1031 device_name,
1032 requested_version,
1033 available_version
1034 );
1035 }
1036 device.suitable = Suitable::No;
1037 return;
1038 }
1039
1040 let dedicated_compute = get_dedicated_queue_index(
1041 &device.queue_families,
1042 vk::QueueFlags::COMPUTE,
1043 vk::QueueFlags::TRANSFER,
1044 );
1045
1046 let dedicated_transfer = get_dedicated_queue_index(
1047 &device.queue_families,
1048 vk::QueueFlags::TRANSFER,
1049 vk::QueueFlags::COMPUTE,
1050 );
1051
1052 let separate_compute = get_separate_queue_index(
1053 &device.queue_families,
1054 vk::QueueFlags::COMPUTE,
1055 vk::QueueFlags::TRANSFER,
1056 );
1057
1058 let separate_transfer = get_separate_queue_index(
1059 &device.queue_families,
1060 vk::QueueFlags::TRANSFER,
1061 vk::QueueFlags::COMPUTE,
1062 );
1063
1064 let present_queue = get_present_queue_index(
1065 &self.instance.surface_instance,
1066 device.physical_device,
1067 self.surface,
1068 &device.queue_families,
1069 );
1070
1071 if criteria.require_dedicated_compute_queue && dedicated_compute.is_none() {
1072 device.suitable = Suitable::No;
1073 return;
1074 }
1075
1076 if criteria.require_dedicated_transfer_queue && dedicated_transfer.is_none() {
1077 device.suitable = Suitable::No;
1078 return;
1079 }
1080
1081 if criteria.require_separate_transfer_queue && separate_transfer.is_none() {
1082 device.suitable = Suitable::No;
1083 return;
1084 }
1085
1086 if criteria.require_separate_compute_queue && separate_compute.is_none() {
1087 device.suitable = Suitable::No;
1088 return;
1089 }
1090
1091 if criteria.require_present
1092 && present_queue.is_none()
1093 && !criteria.defer_surface_initialization
1094 {
1095 device.suitable = Suitable::No;
1096 return;
1097 }
1098
1099 let required_extensions_supported = check_device_extension_support(
1100 &device.available_extensions,
1101 &criteria.required_extensions,
1102 );
1103
1104 if required_extensions_supported.len() != criteria.required_extensions.len() {
1105 device.suitable = Suitable::No;
1106 return;
1107 }
1108
1109 if !criteria.defer_surface_initialization && criteria.require_present {
1110 let instance = self.instance.as_ref();
1111 if let Some((surface_instance, surface)) =
1112 instance.surface_instance.as_ref().zip(self.surface)
1113 {
1114 let formats = unsafe {
1115 surface_instance
1116 .get_physical_device_surface_formats(device.physical_device, surface)
1117 };
1118 let Ok(formats) = formats else {
1119 device.suitable = Suitable::No;
1120 return;
1121 };
1122
1123 let present_modes = unsafe {
1124 surface_instance
1125 .get_physical_device_surface_present_modes(device.physical_device, surface)
1126 };
1127 let Ok(present_modes) = present_modes else {
1128 device.suitable = Suitable::No;
1129 return;
1130 };
1131
1132 if present_modes.is_empty() || formats.is_empty() {
1133 device.suitable = Suitable::No;
1134 return;
1135 }
1136 };
1137 };
1138
1139 let preferred_device_type =
1140 vk::PhysicalDeviceType::from_raw(criteria.preferred_device_type as u8 as i32);
1141 if !criteria.allow_any_type && device.properties.device_type != preferred_device_type {
1142 device.suitable = Suitable::Partial;
1143 }
1144
1145 let required_features_supported = supports_features(
1146 &device.features,
1147 &criteria.required_features,
1148 &device.supported_features_chain,
1149 &criteria.requested_features_chain.borrow(),
1150 );
1151
1152 if !required_features_supported {
1153 device.suitable = Suitable::No;
1154 return;
1155 }
1156
1157 for memory_heap in device.memory_properties.memory_heaps {
1160 if memory_heap
1161 .flags
1162 .contains(vk::MemoryHeapFlags::DEVICE_LOCAL)
1163 && memory_heap.size < criteria.required_mem_size
1164 {
1165 device.suitable = Suitable::No;
1166 return;
1167 }
1168 }
1169 }
1170
1171 fn populate_device_details(
1172 &self,
1173 vk_phys_device: vk::PhysicalDevice,
1174 ) -> crate::Result<PhysicalDevice> {
1175 let instance = self.instance.as_ref();
1176 let criteria = &self.selection_criteria;
1177
1178 let mut physical_device = PhysicalDevice {
1179 physical_device: vk_phys_device,
1180 surface: instance.surface,
1181 defer_surface_initialization: criteria.defer_surface_initialization,
1182 queue_families: unsafe {
1183 instance
1184 .instance
1185 .get_physical_device_queue_family_properties(vk_phys_device)
1186 },
1187 properties: unsafe {
1188 instance
1189 .instance
1190 .get_physical_device_properties(vk_phys_device)
1191 },
1192 features: unsafe {
1193 instance
1194 .instance
1195 .get_physical_device_features(vk_phys_device)
1196 },
1197 memory_properties: unsafe {
1198 instance
1199 .instance
1200 .get_physical_device_memory_properties(vk_phys_device)
1201 },
1202 properties2_ext_enabled: instance.properties2_ext_enabled,
1225 requested_features_chain: criteria.requested_features_chain.clone().into_inner(),
1226 ..Default::default()
1227 };
1228
1229 physical_device.name = physical_device
1230 .properties
1231 .clone()
1232 .device_name_as_c_str()
1233 .map_err(anyhow::Error::msg)?
1234 .to_string_lossy()
1235 .to_string();
1236
1237 let available_extensions = unsafe {
1238 instance
1239 .instance
1240 .enumerate_device_extension_properties(vk_phys_device)
1241 };
1242
1243 let Ok(available_extensions) = available_extensions else {
1244 return Ok(physical_device);
1245 };
1246
1247 let available_extensions_names = available_extensions
1248 .into_iter()
1249 .map(|e| {
1250 e.extension_name_as_c_str()
1251 .expect("Extension name should be correct null-terminated string")
1252 .to_string_lossy()
1253 .to_string()
1254 })
1255 .collect::<BTreeSet<_>>();
1256
1257 physical_device.available_extensions.extend(
1258 available_extensions_names
1259 .iter()
1260 .map(|s| Cow::Owned(s.clone())),
1261 );
1262
1263 physical_device.properties2_ext_enabled = instance.properties2_ext_enabled;
1264
1265 let requested_features_chain = criteria.requested_features_chain.borrow();
1266 let instance_is_11 = instance.instance_version >= vk::API_VERSION_1_1;
1267 if !requested_features_chain.is_empty()
1268 && (instance_is_11 || instance.properties2_ext_enabled)
1269 {
1270 let mut supported_features = requested_features_chain.clone();
1271 let mut local_features = vk::PhysicalDeviceFeatures2::default();
1272
1273 for node in supported_features.nodes.iter_mut() {
1274 local_features = local_features.push_next(node.as_mut());
1275 }
1276
1277 unsafe {
1278 instance.instance.get_physical_device_features2(
1279 physical_device.physical_device,
1280 &mut local_features,
1281 )
1282 };
1283
1284 physical_device.supported_features_chain = supported_features.clone();
1285 }
1286
1287 Ok(physical_device)
1288 }
1289
1290 fn select_devices(&self) -> crate::Result<BTreeSet<PhysicalDevice>> {
1291 let criteria = &self.selection_criteria;
1292 let instance = self.instance.as_ref();
1293 if criteria.require_present
1294 && !criteria.defer_surface_initialization
1295 && instance.surface.is_none()
1296 {
1297 return Err(crate::PhysicalDeviceError::NoSurfaceProvided.into());
1298 };
1299
1300 let physical_devices = unsafe { instance.instance.enumerate_physical_devices() }
1301 .map_err(|_| crate::PhysicalDeviceError::FailedToEnumeratePhysicalDevices)?;
1302 if physical_devices.is_empty() {
1303 return Err(crate::PhysicalDeviceError::NoPhysicalDevicesFound.into());
1304 };
1305
1306 let fill_out_phys_dev_with_criteria = |physical_device: &mut PhysicalDevice| {
1307 physical_device.features = criteria.required_features;
1308 let mut portability_ext_available = false;
1309 let portability_name = vk::KHR_PORTABILITY_SUBSET_NAME.to_string_lossy();
1310 for ext in &physical_device.available_extensions {
1311 if criteria.enable_portability_subset && ext == &portability_name {
1312 portability_ext_available = true;
1313 }
1314 }
1315
1316 physical_device.extensions_to_enable.clear();
1317 physical_device.extensions_to_enable.extend(
1318 criteria
1319 .required_extensions
1320 .iter()
1321 .map(|s| Cow::Owned(s.clone())),
1322 );
1323
1324 if portability_ext_available {
1325 physical_device
1326 .extensions_to_enable
1327 .insert(portability_name);
1328 }
1329 };
1330
1331 if criteria.use_first_gpu_unconditionally {
1332 let mut device = self.populate_device_details(physical_devices[0])?;
1333 fill_out_phys_dev_with_criteria(&mut device);
1334 return Ok(BTreeSet::from([device]));
1335 };
1336
1337 let physical_devices = physical_devices
1338 .into_iter()
1339 .filter_map(|p| {
1340 let mut phys_dev = self.populate_device_details(p).ok();
1341
1342 if let Some(phys_dev) = phys_dev.as_mut() {
1343 self.set_is_suitable(phys_dev);
1344 }
1345
1346 phys_dev.and_then(|mut phys_dev| {
1347 if phys_dev.suitable == Suitable::No {
1348 None
1349 } else {
1350 fill_out_phys_dev_with_criteria(&mut phys_dev);
1351
1352 Some(phys_dev)
1353 }
1354 })
1355 })
1356 .collect::<BTreeSet<_>>();
1357
1358 Ok(physical_devices)
1359 }
1360
1361 pub fn select(self) -> crate::Result<PhysicalDevice> {
1362 let devices = self.select_devices()?;
1363 #[cfg(feature = "enable_tracing")]
1364 {
1365 tracing::debug!(
1366 "Device suitability: {:#?}",
1367 devices
1368 .iter()
1369 .map(|d| (&d.name, &d.suitable))
1370 .collect::<Vec<_>>()
1371 );
1372 }
1373
1374 if devices.is_empty() {
1375 Err(crate::PhysicalDeviceError::NoSuitableDevice.into())
1376 } else {
1377 Ok(unsafe { devices.into_iter().next().unwrap_unchecked() })
1378 }
1379 }
1380}
1381
1382fn cow_to_c_cow(cow: Cow<'_, str>) -> Cow<'_, CStr> {
1383 match cow {
1384 Cow::Borrowed(s) => {
1385 if let Ok(c_str) = CStr::from_bytes_with_nul(s.as_bytes()) {
1387 Cow::Borrowed(c_str)
1388 } else {
1389 let c_string = CString::new(s).expect("Invalid C string");
1391 Cow::Owned(c_string)
1392 }
1393 }
1394 Cow::Owned(s) => {
1395 let c_string = CString::new(s).expect("Invalid C string");
1397 Cow::Owned(c_string)
1398 }
1399 }
1400}
1401
1402pub struct DeviceBuilder {
1403 instance: Arc<Instance>,
1404 physical_device: PhysicalDevice,
1405 allocation_callbacks: Option<AllocationCallbacks<'static>>,
1406 }
1409
1410impl DeviceBuilder {
1411 pub fn new(physical_device: PhysicalDevice, instance: Arc<Instance>) -> DeviceBuilder {
1412 Self {
1413 physical_device,
1414 allocation_callbacks: None,
1415 instance,
1416 }
1417 }
1418
1419 pub fn allocation_callbacks(
1420 mut self,
1421 allocation_callbacks: AllocationCallbacks<'static>,
1422 ) -> Self {
1423 self.allocation_callbacks.replace(allocation_callbacks);
1424 self
1425 }
1426
1427 pub fn build(mut self) -> crate::Result<Device> {
1428 let queue_descriptions = self
1431 .physical_device
1432 .queue_families
1433 .iter()
1434 .enumerate()
1435 .map(|(index, _)| (index, [1.]))
1436 .collect::<Vec<_>>();
1437
1438 let queue_create_infos = queue_descriptions
1439 .iter()
1440 .map(|(index, priorities)| {
1441 vk::DeviceQueueCreateInfo::default()
1442 .queue_family_index(*index as u32)
1443 .queue_priorities(priorities)
1444 })
1445 .collect::<Vec<_>>();
1446 let extensions_to_enable = self
1447 .physical_device
1448 .extensions_to_enable
1449 .iter()
1450 .map(|e| cow_to_c_cow(e.clone()))
1451 .collect::<Vec<_>>();
1452
1453 let mut extensions_to_enable = extensions_to_enable
1454 .iter()
1455 .map(|e| e.as_ptr())
1456 .collect::<Vec<_>>();
1457 if self.physical_device.surface.is_some()
1458 || self.physical_device.defer_surface_initialization
1459 {
1460 extensions_to_enable.push(vk::KHR_SWAPCHAIN_NAME.as_ptr());
1461 }
1462
1463 let mut device_create_info = vk::DeviceCreateInfo::default()
1464 .queue_create_infos(&queue_create_infos)
1465 .enabled_extension_names(&extensions_to_enable);
1466
1467 let requested_features_chain = &mut self.physical_device.requested_features_chain;
1468
1469 let mut features2 =
1470 vk::PhysicalDeviceFeatures2::default().features(self.physical_device.features);
1471
1472 if self.instance.instance_version >= vk::API_VERSION_1_1
1473 || self.physical_device.properties2_ext_enabled
1474 {
1475 device_create_info = device_create_info.push_next(&mut features2);
1476
1477 for node in requested_features_chain.nodes.iter_mut() {
1478 match node {
1479 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan11(f) => {
1480 device_create_info = device_create_info.push_next(f)
1481 }
1482 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan12(f) => {
1483 device_create_info = device_create_info.push_next(f)
1484 }
1485 VulkanPhysicalDeviceFeature2::PhysicalDeviceVulkan13(f) => {
1486 device_create_info = device_create_info.push_next(f)
1487 }
1488 }
1489 }
1490 }
1491
1492 let device = unsafe {
1493 self.instance.instance.create_device(
1494 self.physical_device.physical_device,
1495 &device_create_info,
1496 self.allocation_callbacks.as_ref(),
1497 )
1498 }?;
1499
1500 let physical_device = self.physical_device;
1501
1502 let surface = physical_device.surface;
1503 let allocation_callbacks = self.allocation_callbacks;
1504
1505 Ok(Device {
1506 device,
1507 surface,
1508 surface_instance: self.instance.surface_instance.clone(),
1509 physical_device,
1510 allocation_callbacks,
1511 })
1512 }
1513}
1514
1515pub struct Device {
1516 device: ash::Device,
1517 physical_device: PhysicalDevice,
1518 surface: Option<vk::SurfaceKHR>,
1519 surface_instance: Option<khr::surface::Instance>,
1520 allocation_callbacks: Option<AllocationCallbacks<'static>>,
1521}
1522
1523#[derive(Debug, Clone, PartialOrd, PartialEq, Eq, Ord)]
1524pub enum QueueType {
1525 Present,
1526 Graphics,
1527 Compute,
1528 Transfer,
1529}
1530
1531impl Device {
1532 pub fn device(&self) -> &ash::Device {
1533 &self.device
1534 }
1535
1536 pub fn physical_device(&self) -> &PhysicalDevice {
1537 &self.physical_device
1538 }
1539
1540 pub fn get_queue(&self, queue: QueueType) -> crate::Result<(usize, vk::Queue)> {
1541 let index = match queue {
1542 QueueType::Present => get_present_queue_index(
1543 &self.surface_instance,
1544 self.physical_device.physical_device,
1545 self.surface,
1546 &self.physical_device.queue_families,
1547 )
1548 .ok_or(crate::QueueError::PresentUnavailable),
1549 QueueType::Graphics => get_first_queue_index(
1550 &self.physical_device.queue_families,
1551 vk::QueueFlags::GRAPHICS,
1552 )
1553 .ok_or(crate::QueueError::GraphicsUnavailable),
1554 QueueType::Compute => get_separate_queue_index(
1555 &self.physical_device.queue_families,
1556 vk::QueueFlags::COMPUTE,
1557 vk::QueueFlags::TRANSFER,
1558 )
1559 .ok_or(crate::QueueError::ComputeUnavailable),
1560 QueueType::Transfer => get_separate_queue_index(
1561 &self.physical_device.queue_families,
1562 vk::QueueFlags::TRANSFER,
1563 vk::QueueFlags::COMPUTE,
1564 )
1565 .ok_or(crate::QueueError::TransferUnavailable),
1566 }?;
1567
1568 Ok((index, unsafe {
1569 self.device.get_device_queue(index as _, 0)
1570 }))
1571 }
1572
1573 pub fn get_dedicated_queue(&self, queue: QueueType) -> crate::Result<vk::Queue> {
1574 let index = match queue {
1575 QueueType::Compute => get_dedicated_queue_index(
1576 &self.physical_device.queue_families,
1577 vk::QueueFlags::COMPUTE,
1578 vk::QueueFlags::TRANSFER,
1579 )
1580 .ok_or(crate::QueueError::ComputeUnavailable),
1581 QueueType::Transfer => get_dedicated_queue_index(
1582 &self.physical_device.queue_families,
1583 vk::QueueFlags::TRANSFER,
1584 vk::QueueFlags::COMPUTE,
1585 )
1586 .ok_or(crate::QueueError::TransferUnavailable),
1587 _ => return Err(crate::QueueError::InvalidQueueFamilyIndex.into()),
1588 }?;
1589
1590 let info = vk::DeviceQueueInfo2::default()
1591 .queue_family_index(index as _)
1592 .queue_index(0);
1593
1594 Ok(unsafe { self.device.get_device_queue2(&info) })
1595 }
1596
1597 pub fn destroy(&self) {
1598 unsafe {
1599 self.device
1600 .destroy_device(self.allocation_callbacks.as_ref());
1601 }
1602 }
1603}
1604
1605impl AsRef<ash::Device> for Device {
1606 fn as_ref(&self) -> &ash::Device {
1607 &self.device
1608 }
1609}
1610
1611impl Deref for Device {
1612 type Target = ash::Device;
1613
1614 fn deref(&self) -> &Self::Target {
1615 &self.device
1616 }
1617}