vulkano_taskgraph/command_buffer/commands/
sync.rs1use crate::{
2 command_buffer::{RecordingCommandBuffer, Result},
3 Id,
4};
5use ash::vk;
6use smallvec::SmallVec;
7use std::ops::Range;
8use vulkano::{
9 buffer::Buffer,
10 device::DeviceOwned,
11 image::{Image, ImageAspects, ImageLayout, ImageSubresourceRange},
12 sync::{AccessFlags, DependencyFlags, PipelineStages},
13 DeviceSize, Version, VulkanObject,
14};
15
16impl RecordingCommandBuffer<'_> {
18 pub unsafe fn pipeline_barrier(
19 &mut self,
20 dependency_info: &DependencyInfo<'_>,
21 ) -> Result<&mut Self> {
22 Ok(unsafe { self.pipeline_barrier_unchecked(dependency_info) })
23 }
24
25 pub unsafe fn pipeline_barrier_unchecked(
26 &mut self,
27 dependency_info: &DependencyInfo<'_>,
28 ) -> &mut Self {
29 if dependency_info.is_empty() {
30 return self;
31 }
32
33 let &DependencyInfo {
34 dependency_flags,
35 memory_barriers,
36 buffer_memory_barriers,
37 image_memory_barriers,
38 _ne: _,
39 } = dependency_info;
40
41 if self.device().enabled_features().synchronization2 {
42 let memory_barriers_vk: SmallVec<[_; 2]> = memory_barriers
43 .iter()
44 .map(|barrier| {
45 let &MemoryBarrier {
46 src_stages,
47 src_access,
48 dst_stages,
49 dst_access,
50 _ne: _,
51 } = barrier;
52
53 vk::MemoryBarrier2::default()
54 .src_stage_mask(src_stages.into())
55 .src_access_mask(src_access.into())
56 .dst_stage_mask(dst_stages.into())
57 .dst_access_mask(dst_access.into())
58 })
59 .collect();
60
61 let buffer_memory_barriers_vk: SmallVec<[_; 8]> = buffer_memory_barriers
62 .iter()
63 .map(|barrier| {
64 let &BufferMemoryBarrier {
65 src_stages,
66 src_access,
67 dst_stages,
68 dst_access,
69 buffer,
70 ref range,
71 _ne: _,
72 } = barrier;
73
74 let buffer = unsafe { self.accesses.buffer_unchecked(buffer) };
75
76 vk::BufferMemoryBarrier2::default()
77 .src_stage_mask(src_stages.into())
78 .src_access_mask(src_access.into())
79 .dst_stage_mask(dst_stages.into())
80 .dst_access_mask(dst_access.into())
81 .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
82 .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
83 .buffer(buffer.handle())
84 .offset(range.start)
85 .size(range.end - range.start)
86 })
87 .collect();
88
89 let image_memory_barriers_vk: SmallVec<[_; 8]> = image_memory_barriers
90 .iter()
91 .map(|barrier| {
92 let &ImageMemoryBarrier {
93 src_stages,
94 src_access,
95 dst_stages,
96 dst_access,
97 old_layout,
98 new_layout,
99 image,
100 ref subresource_range,
101 _ne: _,
102 } = barrier;
103
104 let image = unsafe { self.accesses.image_unchecked(image) };
105
106 vk::ImageMemoryBarrier2::default()
107 .src_stage_mask(src_stages.into())
108 .src_access_mask(src_access.into())
109 .dst_stage_mask(dst_stages.into())
110 .dst_access_mask(dst_access.into())
111 .old_layout(old_layout.into())
112 .new_layout(new_layout.into())
113 .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
114 .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
115 .image(image.handle())
116 .subresource_range(subresource_range.clone().to_vk())
117 })
118 .collect();
119
120 let dependency_info_vk = vk::DependencyInfo::default()
121 .dependency_flags(dependency_flags.into())
122 .memory_barriers(&memory_barriers_vk)
123 .buffer_memory_barriers(&buffer_memory_barriers_vk)
124 .image_memory_barriers(&image_memory_barriers_vk);
125
126 let fns = self.device().fns();
127 let cmd_pipeline_barrier2 = if self.device().api_version() >= Version::V1_3 {
128 fns.v1_3.cmd_pipeline_barrier2
129 } else {
130 fns.khr_synchronization2.cmd_pipeline_barrier2_khr
131 };
132
133 unsafe { cmd_pipeline_barrier2(self.handle(), &dependency_info_vk) };
134 } else {
135 let mut src_stage_mask = vk::PipelineStageFlags::empty();
136 let mut dst_stage_mask = vk::PipelineStageFlags::empty();
137
138 let memory_barriers_vk: SmallVec<[_; 2]> = memory_barriers
139 .iter()
140 .map(|barrier| {
141 let &MemoryBarrier {
142 src_stages,
143 src_access,
144 dst_stages,
145 dst_access,
146 _ne: _,
147 } = barrier;
148
149 src_stage_mask |= src_stages.into();
150 dst_stage_mask |= dst_stages.into();
151
152 vk::MemoryBarrier::default()
153 .src_access_mask(src_access.into())
154 .dst_access_mask(dst_access.into())
155 })
156 .collect();
157
158 let buffer_memory_barriers_vk: SmallVec<[_; 8]> = buffer_memory_barriers
159 .iter()
160 .map(|barrier| {
161 let &BufferMemoryBarrier {
162 src_stages,
163 src_access,
164 dst_stages,
165 dst_access,
166 buffer,
167 ref range,
168 _ne: _,
169 } = barrier;
170
171 src_stage_mask |= src_stages.into();
172 dst_stage_mask |= dst_stages.into();
173
174 let buffer = unsafe { self.accesses.buffer_unchecked(buffer) };
175
176 vk::BufferMemoryBarrier::default()
177 .src_access_mask(src_access.into())
178 .dst_access_mask(dst_access.into())
179 .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
180 .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
181 .buffer(buffer.handle())
182 .offset(range.start)
183 .size(range.end - range.start)
184 })
185 .collect();
186
187 let image_memory_barriers_vk: SmallVec<[_; 8]> = image_memory_barriers
188 .iter()
189 .map(|barrier| {
190 let &ImageMemoryBarrier {
191 src_stages,
192 src_access,
193 dst_stages,
194 dst_access,
195 old_layout,
196 new_layout,
197 image,
198 ref subresource_range,
199 _ne: _,
200 } = barrier;
201
202 src_stage_mask |= src_stages.into();
203 dst_stage_mask |= dst_stages.into();
204
205 let image = unsafe { self.accesses.image_unchecked(image) };
206
207 vk::ImageMemoryBarrier::default()
208 .src_access_mask(src_access.into())
209 .dst_access_mask(dst_access.into())
210 .old_layout(old_layout.into())
211 .new_layout(new_layout.into())
212 .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
213 .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)
214 .image(image.handle())
215 .subresource_range(subresource_range.clone().to_vk())
216 })
217 .collect();
218
219 if src_stage_mask.is_empty() {
220 src_stage_mask |= vk::PipelineStageFlags::TOP_OF_PIPE;
223 }
224
225 if dst_stage_mask.is_empty() {
226 dst_stage_mask |= vk::PipelineStageFlags::BOTTOM_OF_PIPE;
229 }
230
231 let fns = self.device().fns();
232 unsafe {
233 (fns.v1_0.cmd_pipeline_barrier)(
234 self.handle(),
235 src_stage_mask,
236 dst_stage_mask,
237 dependency_flags.into(),
238 memory_barriers_vk.len() as u32,
239 memory_barriers_vk.as_ptr(),
240 buffer_memory_barriers_vk.len() as u32,
241 buffer_memory_barriers_vk.as_ptr(),
242 image_memory_barriers_vk.len() as u32,
243 image_memory_barriers_vk.as_ptr(),
244 )
245 };
246 }
247
248 self
249 }
250}
251
252#[derive(Clone, Debug)]
267pub struct DependencyInfo<'a> {
268 pub dependency_flags: DependencyFlags,
272
273 pub memory_barriers: &'a [MemoryBarrier<'a>],
277
278 pub buffer_memory_barriers: &'a [BufferMemoryBarrier<'a>],
282
283 pub image_memory_barriers: &'a [ImageMemoryBarrier<'a>],
287
288 pub _ne: crate::NonExhaustive<'a>,
289}
290
291impl DependencyInfo<'_> {
292 #[inline]
294 pub fn is_empty(&self) -> bool {
295 self.memory_barriers.is_empty()
296 && self.buffer_memory_barriers.is_empty()
297 && self.image_memory_barriers.is_empty()
298 }
299}
300
301impl Default for DependencyInfo<'_> {
302 #[inline]
303 fn default() -> Self {
304 DependencyInfo {
305 dependency_flags: DependencyFlags::default(),
306 memory_barriers: &[],
307 buffer_memory_barriers: &[],
308 image_memory_barriers: &[],
309 _ne: crate::NE,
310 }
311 }
312}
313
314#[derive(Clone, Debug)]
316pub struct MemoryBarrier<'a> {
317 pub src_stages: PipelineStages,
321
322 pub src_access: AccessFlags,
326
327 pub dst_stages: PipelineStages,
331
332 pub dst_access: AccessFlags,
337
338 pub _ne: crate::NonExhaustive<'a>,
339}
340
341impl Default for MemoryBarrier<'_> {
342 #[inline]
343 fn default() -> Self {
344 Self {
345 src_stages: PipelineStages::empty(),
346 src_access: AccessFlags::empty(),
347 dst_stages: PipelineStages::empty(),
348 dst_access: AccessFlags::empty(),
349 _ne: crate::NE,
350 }
351 }
352}
353
354#[derive(Clone, Debug)]
356pub struct BufferMemoryBarrier<'a> {
357 pub src_stages: PipelineStages,
361
362 pub src_access: AccessFlags,
366
367 pub dst_stages: PipelineStages,
371
372 pub dst_access: AccessFlags,
377
378 pub buffer: Id<Buffer>,
382
383 pub range: Range<DeviceSize>,
387
388 pub _ne: crate::NonExhaustive<'a>,
389}
390
391impl Default for BufferMemoryBarrier<'_> {
392 #[inline]
393 fn default() -> Self {
394 BufferMemoryBarrier {
395 src_stages: PipelineStages::empty(),
396 src_access: AccessFlags::empty(),
397 dst_stages: PipelineStages::empty(),
398 dst_access: AccessFlags::empty(),
399 buffer: Id::INVALID,
400 range: 0..0,
401 _ne: crate::NE,
402 }
403 }
404}
405
406#[derive(Clone, Debug)]
408pub struct ImageMemoryBarrier<'a> {
409 pub src_stages: PipelineStages,
413
414 pub src_access: AccessFlags,
418
419 pub dst_stages: PipelineStages,
423
424 pub dst_access: AccessFlags,
429
430 pub old_layout: ImageLayout,
435
436 pub new_layout: ImageLayout,
441
442 pub image: Id<Image>,
446
447 pub subresource_range: ImageSubresourceRange,
451
452 pub _ne: crate::NonExhaustive<'a>,
453}
454
455impl Default for ImageMemoryBarrier<'_> {
456 #[inline]
457 fn default() -> Self {
458 ImageMemoryBarrier {
459 src_stages: PipelineStages::empty(),
460 src_access: AccessFlags::empty(),
461 dst_stages: PipelineStages::empty(),
462 dst_access: AccessFlags::empty(),
463 old_layout: ImageLayout::Undefined,
464 new_layout: ImageLayout::Undefined,
465 image: Id::INVALID,
466 subresource_range: ImageSubresourceRange {
467 aspects: ImageAspects::empty(),
468 mip_levels: 0..0,
469 array_layers: 0..0,
470 },
471 _ne: crate::NE,
472 }
473 }
474}