vulkano_taskgraph/command_buffer/commands/
bind_push.rs

1use crate::{
2    command_buffer::{RecordingCommandBuffer, Result},
3    Id,
4};
5use ash::vk;
6use smallvec::SmallVec;
7use std::{ffi::c_void, mem, ptr, sync::Arc};
8use vulkano::{
9    self,
10    buffer::{Buffer, BufferContents, IndexType},
11    device::DeviceOwned,
12    pipeline::{
13        ray_tracing::RayTracingPipeline, ComputePipeline, GraphicsPipeline, PipelineLayout,
14    },
15    DeviceSize, Version, VulkanObject,
16};
17
18/// # Commands to bind or push state for pipeline execution commands
19///
20/// These commands require a queue with a pipeline type that uses the given state.
21impl RecordingCommandBuffer<'_> {
22    /// Binds an index buffer for future indexed draw calls.
23    pub unsafe fn bind_index_buffer(
24        &mut self,
25        buffer: Id<Buffer>,
26        offset: DeviceSize,
27        size: DeviceSize,
28        index_type: IndexType,
29    ) -> Result<&mut Self> {
30        Ok(unsafe { self.bind_index_buffer_unchecked(buffer, offset, size, index_type) })
31    }
32
33    pub unsafe fn bind_index_buffer_unchecked(
34        &mut self,
35        buffer: Id<Buffer>,
36        offset: DeviceSize,
37        size: DeviceSize,
38        index_type: IndexType,
39    ) -> &mut Self {
40        let buffer = unsafe { self.accesses.buffer_unchecked(buffer) };
41
42        let fns = self.device().fns();
43
44        if self.device().enabled_extensions().khr_maintenance5 {
45            unsafe {
46                (fns.khr_maintenance5.cmd_bind_index_buffer2_khr)(
47                    self.handle(),
48                    buffer.handle(),
49                    offset,
50                    size,
51                    index_type.into(),
52                )
53            };
54        } else {
55            unsafe {
56                (fns.v1_0.cmd_bind_index_buffer)(
57                    self.handle(),
58                    buffer.handle(),
59                    offset,
60                    index_type.into(),
61                )
62            };
63        }
64
65        self
66    }
67
68    /// Binds a compute pipeline for future dispatch calls.
69    pub unsafe fn bind_pipeline_compute(
70        &mut self,
71        pipeline: &Arc<ComputePipeline>,
72    ) -> Result<&mut Self> {
73        Ok(unsafe { self.bind_pipeline_compute_unchecked(pipeline) })
74    }
75
76    pub unsafe fn bind_pipeline_compute_unchecked(
77        &mut self,
78        pipeline: &Arc<ComputePipeline>,
79    ) -> &mut Self {
80        let fns = self.device().fns();
81        unsafe {
82            (fns.v1_0.cmd_bind_pipeline)(
83                self.handle(),
84                vk::PipelineBindPoint::COMPUTE,
85                pipeline.handle(),
86            )
87        };
88
89        self.death_row.push(pipeline.clone());
90
91        self
92    }
93
94    /// Binds a graphics pipeline for future draw calls.
95    pub unsafe fn bind_pipeline_graphics(
96        &mut self,
97        pipeline: &Arc<GraphicsPipeline>,
98    ) -> Result<&mut Self> {
99        Ok(unsafe { self.bind_pipeline_graphics_unchecked(pipeline) })
100    }
101
102    pub unsafe fn bind_pipeline_graphics_unchecked(
103        &mut self,
104        pipeline: &Arc<GraphicsPipeline>,
105    ) -> &mut Self {
106        let fns = self.device().fns();
107        unsafe {
108            (fns.v1_0.cmd_bind_pipeline)(
109                self.handle(),
110                vk::PipelineBindPoint::GRAPHICS,
111                pipeline.handle(),
112            )
113        };
114
115        self.death_row.push(pipeline.clone());
116
117        self
118    }
119
120    /// Binds a ray tracing pipeline for future ray tracing calls.
121    pub unsafe fn bind_pipeline_ray_tracing(
122        &mut self,
123        pipeline: &Arc<RayTracingPipeline>,
124    ) -> Result<&mut Self> {
125        Ok(unsafe { self.bind_pipeline_ray_tracing_unchecked(pipeline) })
126    }
127
128    pub unsafe fn bind_pipeline_ray_tracing_unchecked(
129        &mut self,
130        pipeline: &Arc<RayTracingPipeline>,
131    ) -> &mut Self {
132        let fns = self.device().fns();
133        unsafe {
134            (fns.v1_0.cmd_bind_pipeline)(
135                self.handle(),
136                vk::PipelineBindPoint::RAY_TRACING_KHR,
137                pipeline.handle(),
138            )
139        };
140
141        self.death_row.push(pipeline.clone());
142
143        self
144    }
145
146    /// Binds vertex buffers for future draw calls.
147    pub unsafe fn bind_vertex_buffers(
148        &mut self,
149        first_binding: u32,
150        buffers: &[Id<Buffer>],
151        offsets: &[DeviceSize],
152        sizes: &[DeviceSize],
153        strides: &[DeviceSize],
154    ) -> Result<&mut Self> {
155        Ok(unsafe {
156            self.bind_vertex_buffers_unchecked(first_binding, buffers, offsets, sizes, strides)
157        })
158    }
159
160    pub unsafe fn bind_vertex_buffers_unchecked(
161        &mut self,
162        first_binding: u32,
163        buffers: &[Id<Buffer>],
164        offsets: &[DeviceSize],
165        sizes: &[DeviceSize],
166        strides: &[DeviceSize],
167    ) -> &mut Self {
168        if buffers.is_empty() {
169            return self;
170        }
171
172        let buffers_vk = buffers
173            .iter()
174            .map(|&buffer| unsafe { self.accesses.buffer_unchecked(buffer) }.handle())
175            .collect::<SmallVec<[_; 2]>>();
176
177        let device = self.device();
178        let fns = self.device().fns();
179
180        if device.api_version() >= Version::V1_3
181            || device.enabled_extensions().ext_extended_dynamic_state
182            || device.enabled_extensions().ext_shader_object
183        {
184            let cmd_bind_vertex_buffers2 = if device.api_version() >= Version::V1_3 {
185                fns.v1_3.cmd_bind_vertex_buffers2
186            } else if device.enabled_extensions().ext_extended_dynamic_state {
187                fns.ext_extended_dynamic_state.cmd_bind_vertex_buffers2_ext
188            } else {
189                fns.ext_shader_object.cmd_bind_vertex_buffers2_ext
190            };
191
192            unsafe {
193                cmd_bind_vertex_buffers2(
194                    self.handle(),
195                    first_binding,
196                    buffers_vk.len() as u32,
197                    buffers_vk.as_ptr(),
198                    offsets.as_ptr(),
199                    if sizes.is_empty() {
200                        ptr::null()
201                    } else {
202                        sizes.as_ptr()
203                    },
204                    if strides.is_empty() {
205                        ptr::null()
206                    } else {
207                        strides.as_ptr()
208                    },
209                )
210            };
211        } else {
212            unsafe {
213                (fns.v1_0.cmd_bind_vertex_buffers)(
214                    self.handle(),
215                    first_binding,
216                    buffers_vk.len() as u32,
217                    buffers_vk.as_ptr(),
218                    offsets.as_ptr(),
219                )
220            };
221        }
222
223        self
224    }
225
226    /// Sets push constants for future dispatch or draw calls.
227    pub unsafe fn push_constants(
228        &mut self,
229        layout: &Arc<PipelineLayout>,
230        offset: u32,
231        values: &(impl BufferContents + ?Sized),
232    ) -> Result<&mut Self> {
233        Ok(unsafe { self.push_constants_unchecked(layout, offset, values) })
234    }
235
236    pub unsafe fn push_constants_unchecked(
237        &mut self,
238        layout: &Arc<PipelineLayout>,
239        offset: u32,
240        values: &(impl BufferContents + ?Sized),
241    ) -> &mut Self {
242        unsafe {
243            self.push_constants_unchecked_inner(
244                layout,
245                offset,
246                <*const _>::cast(values),
247                mem::size_of_val(values) as u32,
248            )
249        }
250    }
251
252    unsafe fn push_constants_unchecked_inner(
253        &mut self,
254        layout: &Arc<PipelineLayout>,
255        offset: u32,
256        values: *const c_void,
257        size: u32,
258    ) -> &mut Self {
259        if size == 0 {
260            return self;
261        }
262
263        let fns = self.device().fns();
264        let mut current_offset = offset;
265        let mut remaining_size = size;
266
267        for range in layout
268            .push_constant_ranges_disjoint()
269            .iter()
270            .skip_while(|range| range.offset + range.size <= offset)
271        {
272            // There is a gap between ranges, but the passed `values` contain some bytes in this
273            // gap.
274            if range.offset > current_offset {
275                std::process::abort();
276            }
277
278            // Push the minimum of the whole remaining data and the part until the end of this
279            // range.
280            let push_size = remaining_size.min(range.offset + range.size - current_offset);
281            let push_offset = (current_offset - offset) as usize;
282            debug_assert!(push_offset < size as usize);
283            let push_values = unsafe { values.add(push_offset) };
284
285            unsafe {
286                (fns.v1_0.cmd_push_constants)(
287                    self.handle(),
288                    layout.handle(),
289                    range.stages.into(),
290                    current_offset,
291                    push_size,
292                    push_values,
293                )
294            };
295
296            current_offset += push_size;
297            remaining_size -= push_size;
298
299            if remaining_size == 0 {
300                break;
301            }
302        }
303
304        self
305    }
306}