vk_sync_fork/
cmd.rs

1use super::*;
2use ash;
3
4/// Simplified wrapper around `vkCmdPipelineBarrier`.
5/// The mapping functions defined above are used to translate the passed in
6/// barrier definitions into a set of pipeline stages and native Vulkan memory
7/// barriers to be passed to `vkCmdPipelineBarrier`.
8/// `command_buffer` is passed unmodified to `vkCmdPipelineBarrier`.
9pub fn pipeline_barrier(
10    device: &ash::Device,
11    command_buffer: vk::CommandBuffer,
12    global_barrier: Option<GlobalBarrier>,
13    buffer_barriers: &[BufferBarrier],
14    image_barriers: &[ImageBarrier],
15) {
16    let mut src_stage_mask = vk::PipelineStageFlags::TOP_OF_PIPE;
17    let mut dst_stage_mask = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
18
19    // TODO: Optimize out the Vec heap allocations
20    let mut vk_memory_barriers: Vec<vk::MemoryBarrier> = Vec::with_capacity(1);
21    let mut vk_buffer_barriers: Vec<vk::BufferMemoryBarrier> =
22        Vec::with_capacity(buffer_barriers.len());
23    let mut vk_image_barriers: Vec<vk::ImageMemoryBarrier> =
24        Vec::with_capacity(image_barriers.len());
25
26    // Global memory barrier
27    if let Some(ref barrier) = global_barrier {
28        let (src_mask, dst_mask, barrier) = get_memory_barrier(barrier);
29        src_stage_mask |= src_mask;
30        dst_stage_mask |= dst_mask;
31        vk_memory_barriers.push(barrier);
32    }
33
34    // Buffer memory barriers
35    for buffer_barrier in buffer_barriers {
36        let (src_mask, dst_mask, barrier) = get_buffer_memory_barrier(buffer_barrier);
37        src_stage_mask |= src_mask;
38        dst_stage_mask |= dst_mask;
39        vk_buffer_barriers.push(barrier);
40    }
41
42    // Image memory barriers
43    for image_barrier in image_barriers {
44        let (src_mask, dst_mask, barrier) = get_image_memory_barrier(image_barrier);
45        src_stage_mask |= src_mask;
46        dst_stage_mask |= dst_mask;
47        vk_image_barriers.push(barrier);
48    }
49
50    unsafe {
51        device.cmd_pipeline_barrier(
52            command_buffer,
53            src_stage_mask,
54            dst_stage_mask,
55            vk::DependencyFlags::empty(),
56            &vk_memory_barriers,
57            &vk_buffer_barriers,
58            &vk_image_barriers,
59        );
60    }
61}
62
63/// Wrapper around `vkCmdSetEvent`.
64/// Sets an event when the accesses defined by `previous_accesses` are completed.
65/// `command_buffer` and `event` are passed unmodified to `vkCmdSetEvent`.
66pub fn set_event(
67    device: &ash::Device,
68    command_buffer: vk::CommandBuffer,
69    event: vk::Event,
70    previous_accesses: &[AccessType],
71) {
72    let mut stage_mask = vk::PipelineStageFlags::TOP_OF_PIPE;
73    for previous_access in previous_accesses {
74        let previous_info = get_access_info(*previous_access);
75        stage_mask |= previous_info.stage_mask;
76    }
77
78    unsafe {
79        device.cmd_set_event(command_buffer, event, stage_mask);
80    }
81}
82
83/// Wrapper around `vkCmdResetEvent`.
84/// Resets an event when the accesses defined by `previous_accesses` are completed.
85/// `command_buffer` and `event` are passed unmodified to `vkCmdResetEvent`.
86pub fn reset_event(
87    device: &ash::Device,
88    command_buffer: vk::CommandBuffer,
89    event: vk::Event,
90    previous_accesses: &[AccessType],
91) {
92    let mut stage_mask = vk::PipelineStageFlags::TOP_OF_PIPE;
93    for previous_access in previous_accesses {
94        let previous_info = get_access_info(*previous_access);
95        stage_mask |= previous_info.stage_mask;
96    }
97
98    unsafe {
99        device.cmd_reset_event(command_buffer, event, stage_mask);
100    }
101}
102
103/// Simplified wrapper around `vkCmdWaitEvents`.
104/// The mapping functions defined above are used to translate the passed in
105/// barrier definitions into a set of pipeline stages and native Vulkan memory
106/// barriers to be passed to `vkCmdPipelineBarrier`.
107///
108/// `commandBuffer` and `events` are passed unmodified to `vkCmdWaitEvents`.
109pub fn wait_events(
110    device: &ash::Device,
111    command_buffer: vk::CommandBuffer,
112    events: &[vk::Event],
113    global_barrier: Option<GlobalBarrier>,
114    buffer_barriers: &[BufferBarrier],
115    image_barriers: &[ImageBarrier],
116) {
117    let mut src_stage_mask = vk::PipelineStageFlags::TOP_OF_PIPE;
118    let mut dst_stage_mask = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
119
120    // TODO: Optimize out the Vec heap allocations
121    let mut vk_memory_barriers: Vec<vk::MemoryBarrier> = Vec::with_capacity(1);
122    let mut vk_buffer_barriers: Vec<vk::BufferMemoryBarrier> =
123        Vec::with_capacity(buffer_barriers.len());
124    let mut vk_image_barriers: Vec<vk::ImageMemoryBarrier> =
125        Vec::with_capacity(image_barriers.len());
126
127    // Global memory barrier
128    if let Some(ref barrier) = global_barrier {
129        let (src_mask, dst_mask, barrier) = get_memory_barrier(barrier);
130        src_stage_mask |= src_mask;
131        dst_stage_mask |= dst_mask;
132        vk_memory_barriers.push(barrier);
133    }
134
135    // Buffer memory barriers
136    for buffer_barrier in buffer_barriers {
137        let (src_mask, dst_mask, barrier) = get_buffer_memory_barrier(buffer_barrier);
138        src_stage_mask |= src_mask;
139        dst_stage_mask |= dst_mask;
140        vk_buffer_barriers.push(barrier);
141    }
142
143    // Image memory barriers
144    for image_barrier in image_barriers {
145        let (src_mask, dst_mask, barrier) = get_image_memory_barrier(image_barrier);
146        src_stage_mask |= src_mask;
147        dst_stage_mask |= dst_mask;
148        vk_image_barriers.push(barrier);
149    }
150
151    unsafe {
152        device.cmd_wait_events(
153            command_buffer,
154            events,
155            src_stage_mask,
156            dst_stage_mask,
157            &vk_memory_barriers,
158            &vk_buffer_barriers,
159            &vk_image_barriers,
160        );
161    }
162}