1use ash::vk;
2use naga::back::spv;
3use std::{ffi, mem, str};
4
5const DUMP_PREFIX: Option<&str> = None;
6
7struct CompiledShader<'a> {
8 vk_module: vk::ShaderModule,
9 _entry_point: ffi::CString,
10 create_info: vk::PipelineShaderStageCreateInfo<'a>,
11 attribute_mappings: Vec<crate::VertexAttributeMapping>,
12 wg_size: [u32; 3],
13}
14
15impl super::Context {
16 fn make_spv_options(&self, data_layouts: &[&crate::ShaderDataLayout]) -> spv::Options<'_> {
17 let mut binding_map = spv::BindingMap::default();
19 for (group_index, layout) in data_layouts.iter().enumerate() {
20 for (binding_index, &(_, binding)) in layout.bindings.iter().enumerate() {
21 let binding_array_size = match binding {
22 crate::ShaderBinding::TextureArray { count }
23 | crate::ShaderBinding::BufferArray { count }
24 | crate::ShaderBinding::AccelerationStructureArray { count } => Some(count),
25 _ => None,
26 };
27 let rb = naga::ResourceBinding {
28 group: group_index as u32,
29 binding: binding_index as u32,
30 };
31 binding_map.insert(
32 rb,
33 spv::BindingInfo {
34 descriptor_set: group_index as u32,
35 binding: binding_index as u32,
36 binding_array_size,
37 },
38 );
39 }
40 }
41
42 spv::Options {
43 lang_version: match self.device.ray_tracing {
44 Some(_) => (1, 4),
46 None => (1, 3),
47 },
48 flags: self.naga_flags,
49 fake_missing_bindings: false,
50 binding_map,
51 capabilities: None,
52 bounds_check_policies: naga::proc::BoundsCheckPolicies::default(),
53 zero_initialize_workgroup_memory: spv::ZeroInitializeWorkgroupMemoryMode::None,
54 force_loop_bounding: false,
55 ray_query_initialization_tracking: true,
56 use_storage_input_output_16: false,
57 debug_info: None,
58 task_dispatch_limits: None,
59 mesh_shader_primitive_indices_clamp: true,
60 }
61 }
62
63 fn load_shader(
64 &self,
65 sf: crate::ShaderFunction,
66 naga_options_base: &spv::Options,
67 group_layouts: &[&crate::ShaderDataLayout],
68 group_infos: &mut [crate::ShaderDataInfo],
69 vertex_fetch_states: &[crate::VertexFetchState],
70 ) -> CompiledShader<'_> {
71 let ep_index = sf.entry_point_index();
72 let ep = &sf.shader.module.entry_points[ep_index];
73 let ep_info = sf.shader.info.get_entry_point(ep_index);
74
75 let (mut module, module_info) = sf.shader.resolve_constants(sf.constants);
76 crate::Shader::fill_resource_bindings(
77 &mut module,
78 group_infos,
79 ep.stage,
80 ep_info,
81 group_layouts,
82 );
83 let attribute_mappings =
84 crate::Shader::fill_vertex_locations(&mut module, ep_index, vertex_fetch_states);
85
86 let pipeline_options = spv::PipelineOptions {
87 shader_stage: ep.stage,
88 entry_point: sf.entry_point.to_string(),
89 };
90 let file_path;
91 let file_name_str;
92 let mut naga_options_debug;
93 let naga_options = if let Some(ref temp_dir) = self.shader_debug_path {
94 use std::{
95 fs,
96 hash::{DefaultHasher, Hash as _, Hasher as _},
97 };
98 let mut hasher = DefaultHasher::new();
99 sf.shader.source.hash(&mut hasher);
100 file_path = temp_dir.join(format!("{}-{:x}.wgsl", sf.entry_point, hasher.finish()));
101 let _ = fs::write(&file_path, &sf.shader.source);
102
103 naga_options_debug = naga_options_base.clone();
104 file_name_str = file_path.to_string_lossy().into_owned();
105 naga_options_debug.debug_info = Some(naga::back::spv::DebugInfo {
106 source_code: &sf.shader.source,
107 file_name: &file_name_str,
108 language: naga::back::spv::SourceLanguage::GLSL,
110 });
111 &naga_options_debug
112 } else {
113 naga_options_base
114 };
115
116 let spv =
117 spv::write_vec(&module, &module_info, naga_options, Some(&pipeline_options)).unwrap();
118
119 if let Some(dump_prefix) = DUMP_PREFIX {
120 let mut file_name = String::new();
121 for i in 1.. {
122 file_name = format!("{}{}_{:?}{}.spv", dump_prefix, sf.entry_point, ep.stage, i);
123 if !std::path::Path::new(&file_name).exists() {
124 break;
125 }
126 }
127 let spv_bytes =
128 unsafe { std::slice::from_raw_parts(spv.as_ptr() as *const u8, spv.len() * 4) };
129 println!("Dumping {}", file_name);
130 std::fs::write(file_name, spv_bytes).unwrap();
131 }
132
133 let vk_info = vk::ShaderModuleCreateInfo::default().code(&spv);
134
135 let vk_module = unsafe {
136 self.device
137 .core
138 .create_shader_module(&vk_info, None)
139 .unwrap()
140 };
141
142 let vk_stage = match ep.stage {
143 naga::ShaderStage::Compute => vk::ShaderStageFlags::COMPUTE,
144 naga::ShaderStage::Vertex => vk::ShaderStageFlags::VERTEX,
145 naga::ShaderStage::Fragment => vk::ShaderStageFlags::FRAGMENT,
146 _ => panic!("Unsupported shader stage: {:?}", ep.stage),
147 };
148
149 let entry_point = ffi::CString::new(sf.entry_point).unwrap();
150 let create_info = vk::PipelineShaderStageCreateInfo {
151 stage: vk_stage,
152 module: vk_module,
153 p_name: entry_point.as_ptr(),
154 ..Default::default()
155 };
156
157 CompiledShader {
158 vk_module,
159 _entry_point: entry_point,
160 create_info,
161 attribute_mappings,
162 wg_size: ep.workgroup_size,
163 }
164 }
165
166 fn create_descriptor_set_layout(
167 &self,
168 layout: &crate::ShaderDataLayout,
169 info: &crate::ShaderDataInfo,
170 ) -> super::DescriptorSetLayout {
171 if info.visibility.is_empty() {
172 return super::DescriptorSetLayout {
174 raw: unsafe {
175 self.device
176 .core
177 .create_descriptor_set_layout(&Default::default(), None)
178 .unwrap()
179 },
180 ..Default::default()
181 };
182 }
183
184 let stage_flags = map_shader_visibility(info.visibility);
185 let mut vk_bindings = Vec::with_capacity(layout.bindings.len());
186 let mut template_entries = Vec::with_capacity(layout.bindings.len());
187 let mut template_offsets = Vec::with_capacity(layout.bindings.len());
188 let mut binding_flags = Vec::with_capacity(layout.bindings.len());
189 let mut update_offset = 0;
190 for (binding_index, (&(_, binding), &access)) in layout
191 .bindings
192 .iter()
193 .zip(info.binding_access.iter())
194 .enumerate()
195 {
196 let (descriptor_type, descriptor_size, descriptor_count, flag) = match binding {
197 crate::ShaderBinding::Texture => (
198 if access.is_empty() {
199 vk::DescriptorType::SAMPLED_IMAGE
200 } else {
201 vk::DescriptorType::STORAGE_IMAGE
202 },
203 mem::size_of::<vk::DescriptorImageInfo>(),
204 1u32,
205 vk::DescriptorBindingFlags::empty(),
206 ),
207 crate::ShaderBinding::TextureArray { count } => (
208 if access.is_empty() {
209 vk::DescriptorType::SAMPLED_IMAGE
210 } else {
211 vk::DescriptorType::STORAGE_IMAGE
212 },
213 mem::size_of::<vk::DescriptorImageInfo>(),
214 count,
215 vk::DescriptorBindingFlags::PARTIALLY_BOUND,
216 ),
217 crate::ShaderBinding::Sampler => (
218 vk::DescriptorType::SAMPLER,
219 mem::size_of::<vk::DescriptorImageInfo>(),
220 1u32,
221 vk::DescriptorBindingFlags::empty(),
222 ),
223 crate::ShaderBinding::Buffer => (
224 vk::DescriptorType::STORAGE_BUFFER,
225 mem::size_of::<vk::DescriptorBufferInfo>(),
226 1u32,
227 vk::DescriptorBindingFlags::empty(),
228 ),
229 crate::ShaderBinding::BufferArray { count } => (
230 vk::DescriptorType::STORAGE_BUFFER,
231 mem::size_of::<vk::DescriptorBufferInfo>(),
232 count,
233 vk::DescriptorBindingFlags::PARTIALLY_BOUND,
234 ),
235 crate::ShaderBinding::AccelerationStructure => (
236 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
237 mem::size_of::<vk::AccelerationStructureKHR>(),
238 1u32,
239 vk::DescriptorBindingFlags::empty(),
240 ),
241 crate::ShaderBinding::AccelerationStructureArray { count } => (
242 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
243 mem::size_of::<vk::AccelerationStructureKHR>(),
244 count,
245 vk::DescriptorBindingFlags::PARTIALLY_BOUND,
246 ),
247 crate::ShaderBinding::Plain { size } => {
248 if self.device.inline_uniform_blocks {
249 (
250 vk::DescriptorType::INLINE_UNIFORM_BLOCK_EXT,
251 1,
252 size,
253 vk::DescriptorBindingFlags::empty(),
254 )
255 } else {
256 (
257 vk::DescriptorType::UNIFORM_BUFFER,
258 mem::size_of::<vk::DescriptorBufferInfo>(),
259 1u32,
260 vk::DescriptorBindingFlags::empty(),
261 )
262 }
263 }
264 };
265 if descriptor_type == vk::DescriptorType::INLINE_UNIFORM_BLOCK_EXT {
266 assert_eq!(
267 descriptor_count % 4,
268 0,
269 "Inline uniform block binding {} size must be 4-byte aligned, got {}",
270 binding_index,
271 descriptor_count
272 );
273 assert!(
274 descriptor_count <= crate::limits::PLAIN_DATA_SIZE,
275 "Inline uniform block binding {} size {} exceeds blade limit {}",
276 binding_index,
277 descriptor_count,
278 crate::limits::PLAIN_DATA_SIZE
279 );
280 }
281 if descriptor_type == vk::DescriptorType::UNIFORM_BUFFER
283 && let crate::ShaderBinding::Plain { size } = binding
284 {
285 assert!(
286 size <= crate::limits::PLAIN_DATA_SIZE,
287 "UBO binding {} size {} exceeds blade limit {}",
288 binding_index,
289 size,
290 crate::limits::PLAIN_DATA_SIZE
291 );
292 }
293
294 vk_bindings.push(vk::DescriptorSetLayoutBinding {
295 binding: binding_index as u32,
296 descriptor_type,
297 descriptor_count,
298 stage_flags,
299 ..Default::default()
300 });
301 template_entries.push(vk::DescriptorUpdateTemplateEntryKHR {
302 dst_binding: binding_index as u32,
303 dst_array_element: 0,
304 descriptor_count,
305 descriptor_type,
306 offset: update_offset,
307 stride: descriptor_size,
308 });
309 binding_flags.push(flag);
310 template_offsets.push(update_offset as u32);
311 update_offset += descriptor_size * descriptor_count as usize;
312 }
313
314 let mut binding_flags_info =
315 vk::DescriptorSetLayoutBindingFlagsCreateInfo::default().binding_flags(&binding_flags);
316 let set_layout_info = vk::DescriptorSetLayoutCreateInfo::default()
317 .bindings(&vk_bindings)
318 .push_next(&mut binding_flags_info);
319 let raw = unsafe {
320 self.device
321 .core
322 .create_descriptor_set_layout(&set_layout_info, None)
323 .unwrap()
324 };
325
326 let template_create_info = vk::DescriptorUpdateTemplateCreateInfo::default()
327 .descriptor_update_entries(&template_entries)
328 .template_type(vk::DescriptorUpdateTemplateTypeKHR::DESCRIPTOR_SET)
329 .descriptor_set_layout(raw);
330 let update_template = unsafe {
331 self.device
332 .core
333 .create_descriptor_update_template(&template_create_info, None)
334 .unwrap()
335 };
336
337 super::DescriptorSetLayout {
338 raw,
339 update_template,
340 template_size: update_offset as u32,
341 template_offsets: template_offsets.into_boxed_slice(),
342 }
343 }
344
345 fn create_pipeline_layout(
346 &self,
347 group_layouts: &[&crate::ShaderDataLayout],
348 group_infos: &[crate::ShaderDataInfo],
349 ) -> super::PipelineLayout {
350 let mut descriptor_set_layouts = Vec::with_capacity(group_layouts.len());
351 let mut vk_set_layouts = Vec::with_capacity(group_layouts.len());
352 for (&layout, info) in group_layouts.iter().zip(group_infos) {
353 let dsl = self.create_descriptor_set_layout(layout, info);
354 vk_set_layouts.push(dsl.raw);
355 descriptor_set_layouts.push(dsl);
356 }
357
358 let vk_info = vk::PipelineLayoutCreateInfo::default().set_layouts(&vk_set_layouts);
359 let raw = unsafe {
360 self.device
361 .core
362 .create_pipeline_layout(&vk_info, None)
363 .unwrap()
364 };
365
366 super::PipelineLayout {
367 raw,
368 descriptor_set_layouts,
369 }
370 }
371
372 fn destroy_pipeline_layout(&self, layout: &mut super::PipelineLayout) {
373 unsafe {
374 self.device
375 .core
376 .destroy_pipeline_layout(mem::take(&mut layout.raw), None);
377 }
378 for dsl in layout.descriptor_set_layouts.drain(..) {
379 unsafe {
380 self.device
381 .core
382 .destroy_descriptor_set_layout(dsl.raw, None);
383 }
384 if !dsl.is_empty() {
385 unsafe {
386 self.device
387 .core
388 .destroy_descriptor_update_template(dsl.update_template, None);
389 }
390 }
391 }
392 }
393}
394
395#[hidden_trait::expose]
396impl crate::traits::ShaderDevice for super::Context {
397 type ComputePipeline = super::ComputePipeline;
398 type RenderPipeline = super::RenderPipeline;
399
400 fn create_compute_pipeline(&self, desc: crate::ComputePipelineDesc) -> super::ComputePipeline {
401 let mut group_infos = desc
402 .data_layouts
403 .iter()
404 .map(|layout| layout.to_info())
405 .collect::<Vec<_>>();
406
407 let options = self.make_spv_options(desc.data_layouts);
408 let cs = self.load_shader(
409 desc.compute,
410 &options,
411 desc.data_layouts,
412 &mut group_infos,
413 &[],
414 );
415
416 let layout = self.create_pipeline_layout(desc.data_layouts, &group_infos);
417
418 let create_info = vk::ComputePipelineCreateInfo::default()
419 .layout(layout.raw)
420 .stage(cs.create_info);
421
422 let mut raw_vec = unsafe {
423 self.device
424 .core
425 .create_compute_pipelines(vk::PipelineCache::null(), &[create_info], None)
426 .unwrap_or_else(|(_, err)| {
427 panic!("Failed to create compute pipeline '{}': {err:?}", desc.name)
428 })
429 };
430 let raw = raw_vec.pop().unwrap();
431
432 unsafe { self.device.core.destroy_shader_module(cs.vk_module, None) };
433
434 if let Some(ref ext) = self.device.shader_info
435 && let Ok(statistics) =
436 unsafe { ext.get_shader_info_statistics(raw, vk::ShaderStageFlags::COMPUTE) }
437 {
438 let ru = &statistics.resource_usage;
439 log::info!(
440 "Compute pipeline '{}' uses: {} VGPRs, {} SGPRs",
441 desc.name,
442 ru.num_used_vgprs,
443 ru.num_used_sgprs,
444 );
445 }
446
447 if !desc.name.is_empty() {
448 self.set_object_name(raw, desc.name);
449 }
450 super::ComputePipeline {
451 raw,
452 layout,
453 wg_size: cs.wg_size,
454 }
455 }
456
457 fn destroy_compute_pipeline(&self, pipeline: &mut super::ComputePipeline) {
458 self.destroy_pipeline_layout(&mut pipeline.layout);
459 unsafe {
460 self.device.core.destroy_pipeline(pipeline.raw, None);
461 }
462 }
463
464 fn create_render_pipeline(&self, desc: crate::RenderPipelineDesc) -> super::RenderPipeline {
465 let mut group_infos = desc
466 .data_layouts
467 .iter()
468 .map(|layout| layout.to_info())
469 .collect::<Vec<_>>();
470
471 let options = self.make_spv_options(desc.data_layouts);
472 let vs = self.load_shader(
473 desc.vertex,
474 &options,
475 desc.data_layouts,
476 &mut group_infos,
477 desc.vertex_fetches,
478 );
479 let fs = desc.fragment.map(|desc_fragment| {
480 self.load_shader(
481 desc_fragment,
482 &options,
483 desc.data_layouts,
484 &mut group_infos,
485 &[],
486 )
487 });
488
489 let mut stages = [vs.create_info, vk::PipelineShaderStageCreateInfo::default()];
490 let mut stage_count = 1;
491 if let Some(ref fs) = fs {
492 stages[1] = fs.create_info;
493 stage_count += 1;
494 }
495 let stages = &stages[..stage_count]; let layout = self.create_pipeline_layout(desc.data_layouts, &group_infos);
498
499 let vertex_buffers = desc
500 .vertex_fetches
501 .iter()
502 .enumerate()
503 .map(|(i, vf)| vk::VertexInputBindingDescription {
504 binding: i as u32,
505 stride: vf.layout.stride,
506 input_rate: if vf.instanced {
507 vk::VertexInputRate::INSTANCE
508 } else {
509 vk::VertexInputRate::VERTEX
510 },
511 })
512 .collect::<Vec<_>>();
513 let vertex_attributes = vs
514 .attribute_mappings
515 .into_iter()
516 .enumerate()
517 .map(|(index, mapping)| {
518 let (_, ref at) = desc.vertex_fetches[mapping.buffer_index].layout.attributes
519 [mapping.attribute_index];
520 vk::VertexInputAttributeDescription {
521 location: index as u32,
522 binding: mapping.buffer_index as u32,
523 format: super::map_vertex_format(at.format),
524 offset: at.offset,
525 }
526 })
527 .collect::<Vec<_>>();
528
529 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
530 .vertex_binding_descriptions(&vertex_buffers)
531 .vertex_attribute_descriptions(&vertex_attributes);
532 let (raw_topology, supports_restart) = map_primitive_topology(desc.primitive.topology);
533 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
534 .topology(raw_topology)
535 .primitive_restart_enable(supports_restart);
536
537 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
538 .polygon_mode(if desc.primitive.wireframe {
539 vk::PolygonMode::LINE
540 } else {
541 vk::PolygonMode::FILL
542 })
543 .front_face(map_front_face(desc.primitive.front_face))
544 .line_width(1.0);
545 if let Some(face) = desc.primitive.cull_mode {
546 vk_rasterization = vk_rasterization.cull_mode(map_cull_face(face));
547 }
548
549 let mut vk_depth_clip_state =
550 vk::PipelineRasterizationDepthClipStateCreateInfoEXT::default()
551 .depth_clip_enable(false);
552 if desc.primitive.unclipped_depth {
553 vk_rasterization = vk_rasterization.push_next(&mut vk_depth_clip_state);
554 }
555
556 let dynamic_states = [
557 vk::DynamicState::VIEWPORT,
558 vk::DynamicState::SCISSOR,
559 vk::DynamicState::BLEND_CONSTANTS,
560 vk::DynamicState::STENCIL_REFERENCE,
561 ];
562 let vk_dynamic_state =
563 vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
564
565 let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
566 .flags(vk::PipelineViewportStateCreateFlags::empty())
567 .scissor_count(1)
568 .viewport_count(1);
569
570 let vk_sample_mask = [
571 desc.multisample_state.sample_mask as u32,
572 (desc.multisample_state.sample_mask >> 32) as u32,
573 ];
574
575 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
576 .rasterization_samples(vk::SampleCountFlags::from_raw(
577 desc.multisample_state.sample_count,
578 ))
579 .alpha_to_coverage_enable(desc.multisample_state.alpha_to_coverage)
580 .sample_mask(&vk_sample_mask);
581
582 let mut d_format = vk::Format::UNDEFINED;
583 let mut s_format = vk::Format::UNDEFINED;
584 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
585 if let Some(ref ds) = desc.depth_stencil {
586 let ds_format = super::map_texture_format(ds.format);
587 if ds.format.aspects().contains(crate::TexelAspects::DEPTH) {
588 d_format = ds_format;
589 }
590 if ds.format.aspects().contains(crate::TexelAspects::STENCIL) {
591 s_format = ds_format;
592 }
593
594 if ds.depth_write_enabled || ds.depth_compare != crate::CompareFunction::Always {
595 vk_depth_stencil = vk_depth_stencil
596 .depth_test_enable(true)
597 .depth_write_enable(ds.depth_write_enabled)
598 .depth_compare_op(map_comparison(ds.depth_compare));
599 }
600 if ds.stencil != crate::StencilState::default() {
601 let s = &ds.stencil;
602 let front = map_stencil_face_state(&s.front, s.read_mask, s.write_mask);
603 let back = map_stencil_face_state(&s.back, s.read_mask, s.write_mask);
604 vk_depth_stencil = vk_depth_stencil
605 .stencil_test_enable(true)
606 .front(front)
607 .back(back);
608 }
609
610 if ds.bias != crate::DepthBiasState::default() {
611 vk_rasterization = vk_rasterization
612 .depth_bias_enable(true)
613 .depth_bias_constant_factor(ds.bias.constant as f32)
614 .depth_bias_clamp(ds.bias.clamp)
615 .depth_bias_slope_factor(ds.bias.slope_scale);
616 }
617 }
618
619 let mut color_formats = Vec::with_capacity(desc.color_targets.len());
620 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
621 for ct in desc.color_targets {
622 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
623 .color_write_mask(vk::ColorComponentFlags::from_raw(ct.write_mask.bits()));
624 if let Some(ref blend) = ct.blend {
625 assert!(
626 !blend.uses_dual_source() || self.dual_source_blending,
627 "Dual-source blending is not supported by this Vulkan device"
628 );
629
630 let (color_op, color_src, color_dst) = map_blend_component(&blend.color);
631 let (alpha_op, alpha_src, alpha_dst) = map_blend_component(&blend.alpha);
632 vk_attachment = vk_attachment
633 .blend_enable(true)
634 .color_blend_op(color_op)
635 .src_color_blend_factor(color_src)
636 .dst_color_blend_factor(color_dst)
637 .alpha_blend_op(alpha_op)
638 .src_alpha_blend_factor(alpha_src)
639 .dst_alpha_blend_factor(alpha_dst);
640 }
641
642 color_formats.push(super::map_texture_format(ct.format));
643 vk_attachments.push(vk_attachment);
644 }
645 let vk_color_blend =
646 vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
647
648 let mut rendering_info = vk::PipelineRenderingCreateInfo::default()
649 .color_attachment_formats(&color_formats)
650 .depth_attachment_format(d_format)
651 .stencil_attachment_format(s_format);
652
653 let create_info = vk::GraphicsPipelineCreateInfo::default()
654 .layout(layout.raw)
655 .stages(stages)
656 .vertex_input_state(&vk_vertex_input)
657 .input_assembly_state(&vk_input_assembly)
658 .rasterization_state(&vk_rasterization)
659 .viewport_state(&vk_viewport)
660 .multisample_state(&vk_multisample)
661 .depth_stencil_state(&vk_depth_stencil)
662 .color_blend_state(&vk_color_blend)
663 .dynamic_state(&vk_dynamic_state)
664 .push_next(&mut rendering_info);
665
666 let mut raw_vec = unsafe {
667 self.device
668 .core
669 .create_graphics_pipelines(vk::PipelineCache::null(), &[create_info], None)
670 .unwrap_or_else(|(_, err)| {
671 panic!("Failed to create render pipeline '{}': {err:?}", desc.name)
672 })
673 };
674 let raw = raw_vec.pop().unwrap();
675
676 unsafe { self.device.core.destroy_shader_module(vs.vk_module, None) };
677 if let Some(fs) = fs {
678 unsafe { self.device.core.destroy_shader_module(fs.vk_module, None) };
679 }
680
681 if !desc.name.is_empty() {
682 self.set_object_name(raw, desc.name);
683 }
684 super::RenderPipeline { raw, layout }
685 }
686
687 fn destroy_render_pipeline(&self, pipeline: &mut super::RenderPipeline) {
688 self.destroy_pipeline_layout(&mut pipeline.layout);
689 unsafe {
690 self.device.core.destroy_pipeline(pipeline.raw, None);
691 }
692 }
693}
694
695fn map_shader_visibility(visibility: crate::ShaderVisibility) -> vk::ShaderStageFlags {
696 use crate::ShaderVisibility as Sv;
697 use vk::ShaderStageFlags as Flags;
698
699 let mut flags = Flags::empty();
700 if visibility.contains(Sv::COMPUTE) {
701 flags |= Flags::COMPUTE;
702 }
703 if visibility.contains(Sv::VERTEX) {
704 flags |= Flags::VERTEX;
705 }
706 if visibility.contains(Sv::FRAGMENT) {
707 flags |= Flags::FRAGMENT;
708 }
709
710 flags
711}
712
713fn map_primitive_topology(topology: crate::PrimitiveTopology) -> (vk::PrimitiveTopology, bool) {
714 use crate::PrimitiveTopology as Pt;
715 match topology {
716 Pt::PointList => (vk::PrimitiveTopology::POINT_LIST, false),
717 Pt::LineList => (vk::PrimitiveTopology::LINE_LIST, false),
718 Pt::LineStrip => (vk::PrimitiveTopology::LINE_STRIP, true),
719 Pt::TriangleList => (vk::PrimitiveTopology::TRIANGLE_LIST, false),
720 Pt::TriangleStrip => (vk::PrimitiveTopology::TRIANGLE_STRIP, true),
721 }
722}
723
724fn map_front_face(front_face: crate::FrontFace) -> vk::FrontFace {
725 match front_face {
726 crate::FrontFace::Cw => vk::FrontFace::CLOCKWISE,
727 crate::FrontFace::Ccw => vk::FrontFace::COUNTER_CLOCKWISE,
728 }
729}
730
731fn map_cull_face(face: crate::Face) -> vk::CullModeFlags {
732 match face {
733 crate::Face::Front => vk::CullModeFlags::FRONT,
734 crate::Face::Back => vk::CullModeFlags::BACK,
735 }
736}
737
738fn map_comparison(fun: crate::CompareFunction) -> vk::CompareOp {
739 use crate::CompareFunction as Cf;
740 match fun {
741 Cf::Never => vk::CompareOp::NEVER,
742 Cf::Less => vk::CompareOp::LESS,
743 Cf::LessEqual => vk::CompareOp::LESS_OR_EQUAL,
744 Cf::Equal => vk::CompareOp::EQUAL,
745 Cf::GreaterEqual => vk::CompareOp::GREATER_OR_EQUAL,
746 Cf::Greater => vk::CompareOp::GREATER,
747 Cf::NotEqual => vk::CompareOp::NOT_EQUAL,
748 Cf::Always => vk::CompareOp::ALWAYS,
749 }
750}
751
752fn map_stencil_op(op: crate::StencilOperation) -> vk::StencilOp {
753 use crate::StencilOperation as So;
754 match op {
755 So::Keep => vk::StencilOp::KEEP,
756 So::Zero => vk::StencilOp::ZERO,
757 So::Replace => vk::StencilOp::REPLACE,
758 So::Invert => vk::StencilOp::INVERT,
759 So::IncrementClamp => vk::StencilOp::INCREMENT_AND_CLAMP,
760 So::IncrementWrap => vk::StencilOp::INCREMENT_AND_WRAP,
761 So::DecrementClamp => vk::StencilOp::DECREMENT_AND_CLAMP,
762 So::DecrementWrap => vk::StencilOp::DECREMENT_AND_WRAP,
763 }
764}
765
766fn map_stencil_face_state(
767 face: &crate::StencilFaceState,
768 compare_mask: u32,
769 write_mask: u32,
770) -> vk::StencilOpState {
771 vk::StencilOpState {
772 fail_op: map_stencil_op(face.fail_op),
773 pass_op: map_stencil_op(face.pass_op),
774 depth_fail_op: map_stencil_op(face.depth_fail_op),
775 compare_op: map_comparison(face.compare),
776 compare_mask,
777 write_mask,
778 reference: 0,
779 }
780}
781
782fn map_blend_factor(factor: crate::BlendFactor) -> vk::BlendFactor {
783 use crate::BlendFactor as Bf;
784 match factor {
785 Bf::Zero => vk::BlendFactor::ZERO,
786 Bf::One => vk::BlendFactor::ONE,
787 Bf::Src => vk::BlendFactor::SRC_COLOR,
788 Bf::OneMinusSrc => vk::BlendFactor::ONE_MINUS_SRC_COLOR,
789 Bf::SrcAlpha => vk::BlendFactor::SRC_ALPHA,
790 Bf::OneMinusSrcAlpha => vk::BlendFactor::ONE_MINUS_SRC_ALPHA,
791 Bf::Dst => vk::BlendFactor::DST_COLOR,
792 Bf::OneMinusDst => vk::BlendFactor::ONE_MINUS_DST_COLOR,
793 Bf::DstAlpha => vk::BlendFactor::DST_ALPHA,
794 Bf::OneMinusDstAlpha => vk::BlendFactor::ONE_MINUS_DST_ALPHA,
795 Bf::SrcAlphaSaturated => vk::BlendFactor::SRC_ALPHA_SATURATE,
796 Bf::Constant => vk::BlendFactor::CONSTANT_COLOR,
797 Bf::OneMinusConstant => vk::BlendFactor::ONE_MINUS_CONSTANT_COLOR,
798 Bf::Src1 => vk::BlendFactor::SRC1_COLOR,
799 Bf::OneMinusSrc1 => vk::BlendFactor::ONE_MINUS_SRC1_COLOR,
800 Bf::Src1Alpha => vk::BlendFactor::SRC1_ALPHA,
801 Bf::OneMinusSrc1Alpha => vk::BlendFactor::ONE_MINUS_SRC1_ALPHA,
802 }
803}
804
805fn map_blend_op(operation: crate::BlendOperation) -> vk::BlendOp {
806 use crate::BlendOperation as Bo;
807 match operation {
808 Bo::Add => vk::BlendOp::ADD,
809 Bo::Subtract => vk::BlendOp::SUBTRACT,
810 Bo::ReverseSubtract => vk::BlendOp::REVERSE_SUBTRACT,
811 Bo::Min => vk::BlendOp::MIN,
812 Bo::Max => vk::BlendOp::MAX,
813 }
814}
815
816fn map_blend_component(
817 component: &crate::BlendComponent,
818) -> (vk::BlendOp, vk::BlendFactor, vk::BlendFactor) {
819 let op = map_blend_op(component.operation);
820 let src = map_blend_factor(component.src_factor);
821 let dst = map_blend_factor(component.dst_factor);
822 (op, src, dst)
823}