1use super::{conv, Command as C};
2use arrayvec::ArrayVec;
3use std::{mem, ops::Range};
4
5#[derive(Clone, Copy, Debug, Default)]
6struct TextureSlotDesc {
7 tex_target: super::BindTarget,
8 sampler_index: Option<u8>,
9}
10
11#[derive(Default)]
12pub(super) struct State {
13 topology: u32,
14 primitive: super::PrimitiveState,
15 index_format: wgt::IndexFormat,
16 index_offset: wgt::BufferAddress,
17 vertex_buffers:
18 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
19 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
20 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
21 stencil: super::StencilState,
22 depth_bias: wgt::DepthBiasState,
23 alpha_to_coverage_enabled: bool,
24 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
25 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
26 render_size: wgt::Extent3d,
27 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
28 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
29 has_pass_label: bool,
30 instance_vbuf_mask: usize,
31 dirty_vbuf_mask: usize,
32 active_first_instance: u32,
33 push_offset_to_uniform: ArrayVec<super::UniformDesc, { super::MAX_PUSH_CONSTANTS }>,
34 end_of_pass_timestamp: Option<glow::Query>,
35}
36
37impl super::CommandBuffer {
38 fn clear(&mut self) {
39 self.label = None;
40 self.commands.clear();
41 self.data_bytes.clear();
42 self.queries.clear();
43 }
44
45 fn add_marker(&mut self, marker: &str) -> Range<u32> {
46 let start = self.data_bytes.len() as u32;
47 self.data_bytes.extend(marker.as_bytes());
48 start..self.data_bytes.len() as u32
49 }
50
51 fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
52 let data_raw = unsafe {
53 std::slice::from_raw_parts(data.as_ptr() as *const _, mem::size_of_val(data))
54 };
55 let start = self.data_bytes.len();
56 assert!(start < u32::MAX as usize);
57 self.data_bytes.extend_from_slice(data_raw);
58 let end = self.data_bytes.len();
59 assert!(end < u32::MAX as usize);
60 (start as u32)..(end as u32)
61 }
62}
63
64impl super::CommandEncoder {
65 fn rebind_stencil_func(&mut self) {
66 fn make(s: &super::StencilSide, face: u32) -> C {
67 C::SetStencilFunc {
68 face,
69 function: s.function,
70 reference: s.reference,
71 read_mask: s.mask_read,
72 }
73 }
74
75 let s = &self.state.stencil;
76 if s.front.function == s.back.function
77 && s.front.mask_read == s.back.mask_read
78 && s.front.reference == s.back.reference
79 {
80 self.cmd_buffer
81 .commands
82 .push(make(&s.front, glow::FRONT_AND_BACK));
83 } else {
84 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
85 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
86 }
87 }
88
89 fn rebind_vertex_data(&mut self, first_instance: u32) {
90 if self
91 .private_caps
92 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
93 {
94 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
95 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
96 continue;
97 }
98 let (buffer_desc, vb) = match *pair {
99 (_, None) => continue,
101 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
102 };
103 let instance_offset = match buffer_desc.step {
104 wgt::VertexStepMode::Vertex => 0,
105 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
106 };
107
108 self.cmd_buffer.commands.push(C::SetVertexBuffer {
109 index: index as u32,
110 buffer: super::BufferBinding {
111 raw: vb.raw,
112 offset: vb.offset + instance_offset as wgt::BufferAddress,
113 },
114 buffer_desc,
115 });
116 self.state.dirty_vbuf_mask ^= 1 << index;
117 }
118 } else {
119 let mut vbuf_mask = 0;
120 for attribute in self.state.vertex_attributes.iter() {
121 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
122 continue;
123 }
124 let (buffer_desc, vb) =
125 match self.state.vertex_buffers[attribute.buffer_index as usize] {
126 (_, None) => continue,
128 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
129 };
130
131 let mut attribute_desc = attribute.clone();
132 attribute_desc.offset += vb.offset as u32;
133 if buffer_desc.step == wgt::VertexStepMode::Instance {
134 attribute_desc.offset += buffer_desc.stride * first_instance;
135 }
136
137 self.cmd_buffer.commands.push(C::SetVertexAttribute {
138 buffer: Some(vb.raw),
139 buffer_desc,
140 attribute_desc,
141 });
142 vbuf_mask |= 1 << attribute.buffer_index;
143 }
144 self.state.dirty_vbuf_mask ^= vbuf_mask;
145 }
146 }
147
148 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
149 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
150 if dirty_textures & (1 << texture_index) != 0
151 || slot
152 .sampler_index
153 .map_or(false, |si| dirty_samplers & (1 << si) != 0)
154 {
155 let sampler = slot
156 .sampler_index
157 .and_then(|si| self.state.samplers[si as usize]);
158 self.cmd_buffer
159 .commands
160 .push(C::BindSampler(texture_index as u32, sampler));
161 }
162 }
163 }
164
165 fn prepare_draw(&mut self, first_instance: u32) {
166 if first_instance != self.state.active_first_instance {
167 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
169 self.state.active_first_instance = first_instance;
170 }
171 if self.state.dirty_vbuf_mask != 0 {
172 self.rebind_vertex_data(first_instance);
173 }
174 }
175
176 fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
177 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
178
179 self.state.push_offset_to_uniform.clear();
180 self.state
181 .push_offset_to_uniform
182 .extend(inner.uniforms.iter().cloned());
183
184 let mut dirty_textures = 0u32;
186 for (texture_index, (slot, &sampler_index)) in self
187 .state
188 .texture_slots
189 .iter_mut()
190 .zip(inner.sampler_map.iter())
191 .enumerate()
192 {
193 if slot.sampler_index != sampler_index {
194 slot.sampler_index = sampler_index;
195 dirty_textures |= 1 << texture_index;
196 }
197 }
198 if dirty_textures != 0 {
199 self.rebind_sampler_states(dirty_textures, 0);
200 }
201 }
202}
203
204impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
205 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
206 self.state = State::default();
207 self.cmd_buffer.label = label.map(str::to_string);
208 Ok(())
209 }
210 unsafe fn discard_encoding(&mut self) {
211 self.cmd_buffer.clear();
212 }
213 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
214 Ok(mem::take(&mut self.cmd_buffer))
215 }
216 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
217 }
219
220 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
221 where
222 T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
223 {
224 if !self
225 .private_caps
226 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
227 {
228 return;
229 }
230 for bar in barriers {
231 if !bar
233 .usage
234 .start
235 .contains(crate::BufferUses::STORAGE_READ_WRITE)
236 {
237 continue;
238 }
239 self.cmd_buffer
240 .commands
241 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.end));
242 }
243 }
244
245 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
246 where
247 T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
248 {
249 if !self
250 .private_caps
251 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
252 {
253 return;
254 }
255
256 let mut combined_usage = crate::TextureUses::empty();
257 for bar in barriers {
258 if !bar
260 .usage
261 .start
262 .contains(crate::TextureUses::STORAGE_READ_WRITE)
263 {
264 continue;
265 }
266 combined_usage |= bar.usage.end;
269 }
270
271 if !combined_usage.is_empty() {
272 self.cmd_buffer
273 .commands
274 .push(C::TextureBarrier(combined_usage));
275 }
276 }
277
278 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
279 self.cmd_buffer.commands.push(C::ClearBuffer {
280 dst: buffer.clone(),
281 dst_target: buffer.target,
282 range,
283 });
284 }
285
286 unsafe fn copy_buffer_to_buffer<T>(
287 &mut self,
288 src: &super::Buffer,
289 dst: &super::Buffer,
290 regions: T,
291 ) where
292 T: Iterator<Item = crate::BufferCopy>,
293 {
294 let (src_target, dst_target) = if src.target == dst.target {
295 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
296 } else {
297 (src.target, dst.target)
298 };
299 for copy in regions {
300 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
301 src: src.clone(),
302 src_target,
303 dst: dst.clone(),
304 dst_target,
305 copy,
306 })
307 }
308 }
309
310 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
311 unsafe fn copy_external_image_to_texture<T>(
312 &mut self,
313 src: &wgt::ImageCopyExternalImage,
314 dst: &super::Texture,
315 dst_premultiplication: bool,
316 regions: T,
317 ) where
318 T: Iterator<Item = crate::TextureCopy>,
319 {
320 let (dst_raw, dst_target) = dst.inner.as_native();
321 for copy in regions {
322 self.cmd_buffer
323 .commands
324 .push(C::CopyExternalImageToTexture {
325 src: src.clone(),
326 dst: dst_raw,
327 dst_target,
328 dst_format: dst.format,
329 dst_premultiplication,
330 copy,
331 })
332 }
333 }
334
335 unsafe fn copy_texture_to_texture<T>(
336 &mut self,
337 src: &super::Texture,
338 _src_usage: crate::TextureUses,
339 dst: &super::Texture,
340 regions: T,
341 ) where
342 T: Iterator<Item = crate::TextureCopy>,
343 {
344 let (src_raw, src_target) = src.inner.as_native();
345 let (dst_raw, dst_target) = dst.inner.as_native();
346 for mut copy in regions {
347 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
348 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
349 src: src_raw,
350 src_target,
351 dst: dst_raw,
352 dst_target,
353 copy,
354 })
355 }
356 }
357
358 unsafe fn copy_buffer_to_texture<T>(
359 &mut self,
360 src: &super::Buffer,
361 dst: &super::Texture,
362 regions: T,
363 ) where
364 T: Iterator<Item = crate::BufferTextureCopy>,
365 {
366 let (dst_raw, dst_target) = dst.inner.as_native();
367
368 for mut copy in regions {
369 copy.clamp_size_to_virtual(&dst.copy_size);
370 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
371 src: src.clone(),
372 src_target: src.target,
373 dst: dst_raw,
374 dst_target,
375 dst_format: dst.format,
376 copy,
377 })
378 }
379 }
380
381 unsafe fn copy_texture_to_buffer<T>(
382 &mut self,
383 src: &super::Texture,
384 _src_usage: crate::TextureUses,
385 dst: &super::Buffer,
386 regions: T,
387 ) where
388 T: Iterator<Item = crate::BufferTextureCopy>,
389 {
390 let (src_raw, src_target) = src.inner.as_native();
391 for mut copy in regions {
392 copy.clamp_size_to_virtual(&src.copy_size);
393 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
394 src: src_raw,
395 src_target,
396 src_format: src.format,
397 dst: dst.clone(),
398 dst_target: dst.target,
399 copy,
400 })
401 }
402 }
403
404 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
405 let query = set.queries[index as usize];
406 self.cmd_buffer
407 .commands
408 .push(C::BeginQuery(query, set.target));
409 }
410 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
411 self.cmd_buffer.commands.push(C::EndQuery(set.target));
412 }
413 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
414 let query = set.queries[index as usize];
415 self.cmd_buffer.commands.push(C::TimestampQuery(query));
416 }
417 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
418 }
420 unsafe fn copy_query_results(
421 &mut self,
422 set: &super::QuerySet,
423 range: Range<u32>,
424 buffer: &super::Buffer,
425 offset: wgt::BufferAddress,
426 _stride: wgt::BufferSize,
427 ) {
428 let start = self.cmd_buffer.queries.len();
429 self.cmd_buffer
430 .queries
431 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
432 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
433 self.cmd_buffer.commands.push(C::CopyQueryResults {
434 query_range,
435 dst: buffer.clone(),
436 dst_target: buffer.target,
437 dst_offset: offset,
438 });
439 }
440
441 unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
444 debug_assert!(self.state.end_of_pass_timestamp.is_none());
445 if let Some(ref t) = desc.timestamp_writes {
446 if let Some(index) = t.beginning_of_pass_write_index {
447 unsafe { self.write_timestamp(t.query_set, index) }
448 }
449 self.state.end_of_pass_timestamp = t
450 .end_of_pass_write_index
451 .map(|index| t.query_set.queries[index as usize]);
452 }
453
454 self.state.render_size = desc.extent;
455 self.state.resolve_attachments.clear();
456 self.state.invalidate_attachments.clear();
457 if let Some(label) = desc.label {
458 let range = self.cmd_buffer.add_marker(label);
459 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
460 self.state.has_pass_label = true;
461 }
462
463 let rendering_to_external_framebuffer = desc
464 .color_attachments
465 .iter()
466 .filter_map(|at| at.as_ref())
467 .any(|at| match at.target.view.inner {
468 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
469 super::TextureInner::ExternalFramebuffer { .. } => true,
470 _ => false,
471 });
472
473 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
474 panic!("Multiple render attachments with external framebuffers are not supported.");
475 }
476
477 match desc
478 .color_attachments
479 .first()
480 .filter(|at| at.is_some())
481 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
482 {
483 Some(&super::TextureInner::DefaultRenderbuffer) => {
485 self.cmd_buffer
486 .commands
487 .push(C::ResetFramebuffer { is_default: true });
488 }
489 _ => {
490 self.cmd_buffer
492 .commands
493 .push(C::ResetFramebuffer { is_default: false });
494
495 for (i, cat) in desc.color_attachments.iter().enumerate() {
496 if let Some(cat) = cat.as_ref() {
497 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
498 self.cmd_buffer.commands.push(C::BindAttachment {
499 attachment,
500 view: cat.target.view.clone(),
501 });
502 if let Some(ref rat) = cat.resolve_target {
503 self.state
504 .resolve_attachments
505 .push((attachment, rat.view.clone()));
506 }
507 if !cat.ops.contains(crate::AttachmentOps::STORE) {
508 self.state.invalidate_attachments.push(attachment);
509 }
510 }
511 }
512 if let Some(ref dsat) = desc.depth_stencil_attachment {
513 let aspects = dsat.target.view.aspects;
514 let attachment = match aspects {
515 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
516 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
517 _ => glow::DEPTH_STENCIL_ATTACHMENT,
518 };
519 self.cmd_buffer.commands.push(C::BindAttachment {
520 attachment,
521 view: dsat.target.view.clone(),
522 });
523 if aspects.contains(crate::FormatAspects::DEPTH)
524 && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
525 {
526 self.state
527 .invalidate_attachments
528 .push(glow::DEPTH_ATTACHMENT);
529 }
530 if aspects.contains(crate::FormatAspects::STENCIL)
531 && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
532 {
533 self.state
534 .invalidate_attachments
535 .push(glow::STENCIL_ATTACHMENT);
536 }
537 }
538
539 if !rendering_to_external_framebuffer {
540 self.cmd_buffer
542 .commands
543 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
544 }
545 }
546 }
547
548 let rect = crate::Rect {
549 x: 0,
550 y: 0,
551 w: desc.extent.width as i32,
552 h: desc.extent.height as i32,
553 };
554 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
555 self.cmd_buffer.commands.push(C::SetViewport {
556 rect,
557 depth: 0.0..1.0,
558 });
559
560 for (i, cat) in desc
562 .color_attachments
563 .iter()
564 .filter_map(|at| at.as_ref())
565 .enumerate()
566 {
567 if !cat.ops.contains(crate::AttachmentOps::LOAD) {
568 let c = &cat.clear_value;
569 self.cmd_buffer.commands.push(
570 match cat.target.view.format.sample_type(None).unwrap() {
571 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
572 draw_buffer: i as u32,
573 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
574 is_srgb: cat.target.view.format.is_srgb(),
575 },
576 wgt::TextureSampleType::Uint => C::ClearColorU(
577 i as u32,
578 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
579 ),
580 wgt::TextureSampleType::Sint => C::ClearColorI(
581 i as u32,
582 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
583 ),
584 wgt::TextureSampleType::Depth => unreachable!(),
585 },
586 );
587 }
588 }
589 if let Some(ref dsat) = desc.depth_stencil_attachment {
590 let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
591 let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
592
593 if clear_depth && clear_stencil {
594 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
595 dsat.clear_value.0,
596 dsat.clear_value.1,
597 ));
598 } else if clear_depth {
599 self.cmd_buffer
600 .commands
601 .push(C::ClearDepth(dsat.clear_value.0));
602 } else if clear_stencil {
603 self.cmd_buffer
604 .commands
605 .push(C::ClearStencil(dsat.clear_value.1));
606 }
607 }
608 }
609 unsafe fn end_render_pass(&mut self) {
610 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
611 self.cmd_buffer.commands.push(C::ResolveAttachment {
612 attachment,
613 dst,
614 size: self.state.render_size,
615 });
616 }
617 if !self.state.invalidate_attachments.is_empty() {
618 self.cmd_buffer.commands.push(C::InvalidateAttachments(
619 self.state.invalidate_attachments.clone(),
620 ));
621 self.state.invalidate_attachments.clear();
622 }
623 if self.state.has_pass_label {
624 self.cmd_buffer.commands.push(C::PopDebugGroup);
625 self.state.has_pass_label = false;
626 }
627 self.state.instance_vbuf_mask = 0;
628 self.state.dirty_vbuf_mask = 0;
629 self.state.active_first_instance = 0;
630 self.state.color_targets.clear();
631 for vat in &self.state.vertex_attributes {
632 self.cmd_buffer
633 .commands
634 .push(C::UnsetVertexAttribute(vat.location));
635 }
636 self.state.vertex_attributes.clear();
637 self.state.primitive = super::PrimitiveState::default();
638
639 if let Some(query) = self.state.end_of_pass_timestamp.take() {
640 self.cmd_buffer.commands.push(C::TimestampQuery(query));
641 }
642 }
643
644 unsafe fn set_bind_group(
645 &mut self,
646 layout: &super::PipelineLayout,
647 index: u32,
648 group: &super::BindGroup,
649 dynamic_offsets: &[wgt::DynamicOffset],
650 ) {
651 let mut do_index = 0;
652 let mut dirty_textures = 0u32;
653 let mut dirty_samplers = 0u32;
654 let group_info = &layout.group_infos[index as usize];
655
656 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
657 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
658 match *raw_binding {
659 super::RawBinding::Buffer {
660 raw,
661 offset: base_offset,
662 size,
663 } => {
664 let mut offset = base_offset;
665 let target = match binding_layout.ty {
666 wgt::BindingType::Buffer {
667 ty,
668 has_dynamic_offset,
669 min_binding_size: _,
670 } => {
671 if has_dynamic_offset {
672 offset += dynamic_offsets[do_index] as i32;
673 do_index += 1;
674 }
675 match ty {
676 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
677 wgt::BufferBindingType::Storage { .. } => {
678 glow::SHADER_STORAGE_BUFFER
679 }
680 }
681 }
682 _ => unreachable!(),
683 };
684 self.cmd_buffer.commands.push(C::BindBuffer {
685 target,
686 slot,
687 buffer: raw,
688 offset,
689 size,
690 });
691 }
692 super::RawBinding::Sampler(sampler) => {
693 dirty_samplers |= 1 << slot;
694 self.state.samplers[slot as usize] = Some(sampler);
695 }
696 super::RawBinding::Texture {
697 raw,
698 target,
699 aspects,
700 } => {
701 dirty_textures |= 1 << slot;
702 self.state.texture_slots[slot as usize].tex_target = target;
703 self.cmd_buffer.commands.push(C::BindTexture {
704 slot,
705 texture: raw,
706 target,
707 aspects,
708 });
709 }
710 super::RawBinding::Image(ref binding) => {
711 self.cmd_buffer.commands.push(C::BindImage {
712 slot,
713 binding: binding.clone(),
714 });
715 }
716 }
717 }
718
719 self.rebind_sampler_states(dirty_textures, dirty_samplers);
720 }
721
722 unsafe fn set_push_constants(
723 &mut self,
724 _layout: &super::PipelineLayout,
725 _stages: wgt::ShaderStages,
726 start_offset: u32,
727 data: &[u32],
728 ) {
729 let range = self.cmd_buffer.add_push_constant_data(data);
730
731 let end = start_offset + data.len() as u32 * 4;
732 let mut offset = start_offset;
733 while offset < end {
734 let uniform = self.state.push_offset_to_uniform[offset as usize / 4].clone();
735 let size = uniform.size;
736 if uniform.location.is_none() {
737 panic!("No uniform for push constant");
738 }
739 self.cmd_buffer.commands.push(C::SetPushConstants {
740 uniform,
741 offset: range.start + offset,
742 });
743 offset += size;
744 }
745 }
746
747 unsafe fn insert_debug_marker(&mut self, label: &str) {
748 let range = self.cmd_buffer.add_marker(label);
749 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
750 }
751 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
752 let range = self.cmd_buffer.add_marker(group_label);
753 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
754 }
755 unsafe fn end_debug_marker(&mut self) {
756 self.cmd_buffer.commands.push(C::PopDebugGroup);
757 }
758
759 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
760 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
761
762 if self
763 .private_caps
764 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
765 {
766 for vat in pipeline.vertex_attributes.iter() {
767 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
768 self.cmd_buffer.commands.push(C::SetVertexAttribute {
770 buffer: None,
771 buffer_desc: vb.clone(),
772 attribute_desc: vat.clone(),
773 });
774 }
775 } else {
776 for vat in &self.state.vertex_attributes {
777 self.cmd_buffer
778 .commands
779 .push(C::UnsetVertexAttribute(vat.location));
780 }
781 self.state.vertex_attributes.clear();
782
783 self.state.dirty_vbuf_mask = 0;
784 for vat in pipeline.vertex_attributes.iter() {
786 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
788 self.state.vertex_attributes.push(vat.clone());
789 }
790 }
791
792 self.state.instance_vbuf_mask = 0;
793 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
795 .state
796 .vertex_buffers
797 .iter_mut()
798 .zip(pipeline.vertex_buffers.iter())
799 .enumerate()
800 {
801 if pipe_desc.step == wgt::VertexStepMode::Instance {
802 self.state.instance_vbuf_mask |= 1 << index;
803 }
804 if state_desc != pipe_desc {
805 self.state.dirty_vbuf_mask |= 1 << index;
806 *state_desc = pipe_desc.clone();
807 }
808 }
809
810 self.set_pipeline_inner(&pipeline.inner);
811
812 let prim_state = conv::map_primitive_state(&pipeline.primitive);
814 if prim_state != self.state.primitive {
815 self.cmd_buffer
816 .commands
817 .push(C::SetPrimitive(prim_state.clone()));
818 self.state.primitive = prim_state;
819 }
820
821 let mut aspects = crate::FormatAspects::empty();
823 if pipeline.depth_bias != self.state.depth_bias {
824 self.state.depth_bias = pipeline.depth_bias;
825 self.cmd_buffer
826 .commands
827 .push(C::SetDepthBias(pipeline.depth_bias));
828 }
829 if let Some(ref depth) = pipeline.depth {
830 aspects |= crate::FormatAspects::DEPTH;
831 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
832 }
833 if let Some(ref stencil) = pipeline.stencil {
834 aspects |= crate::FormatAspects::STENCIL;
835 self.state.stencil = stencil.clone();
836 self.rebind_stencil_func();
837 if stencil.front.ops == stencil.back.ops
838 && stencil.front.mask_write == stencil.back.mask_write
839 {
840 self.cmd_buffer.commands.push(C::SetStencilOps {
841 face: glow::FRONT_AND_BACK,
842 write_mask: stencil.front.mask_write,
843 ops: stencil.front.ops.clone(),
844 });
845 } else {
846 self.cmd_buffer.commands.push(C::SetStencilOps {
847 face: glow::FRONT,
848 write_mask: stencil.front.mask_write,
849 ops: stencil.front.ops.clone(),
850 });
851 self.cmd_buffer.commands.push(C::SetStencilOps {
852 face: glow::BACK,
853 write_mask: stencil.back.mask_write,
854 ops: stencil.back.ops.clone(),
855 });
856 }
857 }
858 self.cmd_buffer
859 .commands
860 .push(C::ConfigureDepthStencil(aspects));
861
862 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
864 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
865 self.cmd_buffer
866 .commands
867 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
868 }
869
870 if self.state.color_targets[..] != pipeline.color_targets[..] {
872 if pipeline
873 .color_targets
874 .iter()
875 .skip(1)
876 .any(|ct| *ct != pipeline.color_targets[0])
877 {
878 for (index, ct) in pipeline.color_targets.iter().enumerate() {
879 self.cmd_buffer.commands.push(C::SetColorTarget {
880 draw_buffer_index: Some(index as u32),
881 desc: ct.clone(),
882 });
883 }
884 } else {
885 self.cmd_buffer.commands.push(C::SetColorTarget {
886 draw_buffer_index: None,
887 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
888 });
889 }
890 }
891 self.state.color_targets.clear();
892 for ct in pipeline.color_targets.iter() {
893 self.state.color_targets.push(ct.clone());
894 }
895 }
896
897 unsafe fn set_index_buffer<'a>(
898 &mut self,
899 binding: crate::BufferBinding<'a, super::Api>,
900 format: wgt::IndexFormat,
901 ) {
902 self.state.index_offset = binding.offset;
903 self.state.index_format = format;
904 self.cmd_buffer
905 .commands
906 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
907 }
908 unsafe fn set_vertex_buffer<'a>(
909 &mut self,
910 index: u32,
911 binding: crate::BufferBinding<'a, super::Api>,
912 ) {
913 self.state.dirty_vbuf_mask |= 1 << index;
914 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
915 *vb = Some(super::BufferBinding {
916 raw: binding.buffer.raw.unwrap(),
917 offset: binding.offset,
918 });
919 }
920 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
921 self.cmd_buffer.commands.push(C::SetViewport {
922 rect: crate::Rect {
923 x: rect.x as i32,
924 y: rect.y as i32,
925 w: rect.w as i32,
926 h: rect.h as i32,
927 },
928 depth,
929 });
930 }
931 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
932 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
933 x: rect.x as i32,
934 y: rect.y as i32,
935 w: rect.w as i32,
936 h: rect.h as i32,
937 }));
938 }
939 unsafe fn set_stencil_reference(&mut self, value: u32) {
940 self.state.stencil.front.reference = value;
941 self.state.stencil.back.reference = value;
942 self.rebind_stencil_func();
943 }
944 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
945 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
946 }
947
948 unsafe fn draw(
949 &mut self,
950 start_vertex: u32,
951 vertex_count: u32,
952 start_instance: u32,
953 instance_count: u32,
954 ) {
955 self.prepare_draw(start_instance);
956 self.cmd_buffer.commands.push(C::Draw {
957 topology: self.state.topology,
958 start_vertex,
959 vertex_count,
960 instance_count,
961 });
962 }
963 unsafe fn draw_indexed(
964 &mut self,
965 start_index: u32,
966 index_count: u32,
967 base_vertex: i32,
968 start_instance: u32,
969 instance_count: u32,
970 ) {
971 self.prepare_draw(start_instance);
972 let (index_size, index_type) = match self.state.index_format {
973 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
974 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
975 };
976 let index_offset = self.state.index_offset + index_size * start_index as wgt::BufferAddress;
977 self.cmd_buffer.commands.push(C::DrawIndexed {
978 topology: self.state.topology,
979 index_type,
980 index_offset,
981 index_count,
982 base_vertex,
983 instance_count,
984 });
985 }
986 unsafe fn draw_indirect(
987 &mut self,
988 buffer: &super::Buffer,
989 offset: wgt::BufferAddress,
990 draw_count: u32,
991 ) {
992 self.prepare_draw(0);
993 for draw in 0..draw_count as wgt::BufferAddress {
994 let indirect_offset =
995 offset + draw * mem::size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
996 self.cmd_buffer.commands.push(C::DrawIndirect {
997 topology: self.state.topology,
998 indirect_buf: buffer.raw.unwrap(),
999 indirect_offset,
1000 });
1001 }
1002 }
1003 unsafe fn draw_indexed_indirect(
1004 &mut self,
1005 buffer: &super::Buffer,
1006 offset: wgt::BufferAddress,
1007 draw_count: u32,
1008 ) {
1009 self.prepare_draw(0);
1010 let index_type = match self.state.index_format {
1011 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1012 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1013 };
1014 for draw in 0..draw_count as wgt::BufferAddress {
1015 let indirect_offset = offset
1016 + draw * mem::size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1017 self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1018 topology: self.state.topology,
1019 index_type,
1020 indirect_buf: buffer.raw.unwrap(),
1021 indirect_offset,
1022 });
1023 }
1024 }
1025 unsafe fn draw_indirect_count(
1026 &mut self,
1027 _buffer: &super::Buffer,
1028 _offset: wgt::BufferAddress,
1029 _count_buffer: &super::Buffer,
1030 _count_offset: wgt::BufferAddress,
1031 _max_count: u32,
1032 ) {
1033 unreachable!()
1034 }
1035 unsafe fn draw_indexed_indirect_count(
1036 &mut self,
1037 _buffer: &super::Buffer,
1038 _offset: wgt::BufferAddress,
1039 _count_buffer: &super::Buffer,
1040 _count_offset: wgt::BufferAddress,
1041 _max_count: u32,
1042 ) {
1043 unreachable!()
1044 }
1045
1046 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::Api>) {
1049 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1050 if let Some(ref t) = desc.timestamp_writes {
1051 if let Some(index) = t.beginning_of_pass_write_index {
1052 unsafe { self.write_timestamp(t.query_set, index) }
1053 }
1054 self.state.end_of_pass_timestamp = t
1055 .end_of_pass_write_index
1056 .map(|index| t.query_set.queries[index as usize]);
1057 }
1058
1059 if let Some(label) = desc.label {
1060 let range = self.cmd_buffer.add_marker(label);
1061 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1062 self.state.has_pass_label = true;
1063 }
1064 }
1065 unsafe fn end_compute_pass(&mut self) {
1066 if self.state.has_pass_label {
1067 self.cmd_buffer.commands.push(C::PopDebugGroup);
1068 self.state.has_pass_label = false;
1069 }
1070
1071 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1072 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1073 }
1074 }
1075
1076 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1077 self.set_pipeline_inner(&pipeline.inner);
1078 }
1079
1080 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1081 self.cmd_buffer.commands.push(C::Dispatch(count));
1082 }
1083 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1084 self.cmd_buffer.commands.push(C::DispatchIndirect {
1085 indirect_buf: buffer.raw.unwrap(),
1086 indirect_offset: offset,
1087 });
1088 }
1089}