1use alloc::string::String;
2use core::{mem, ops::Range};
3
4use arrayvec::ArrayVec;
5
6use super::{conv, Command as C};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10 tex_target: super::BindTarget,
11 sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15 topology: u32,
16 primitive: super::PrimitiveState,
17 index_format: wgt::IndexFormat,
18 index_offset: wgt::BufferAddress,
19 vertex_buffers:
20 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
21 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
22 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
23 stencil: super::StencilState,
24 depth_bias: wgt::DepthBiasState,
25 alpha_to_coverage_enabled: bool,
26 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
27 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
28 render_size: wgt::Extent3d,
29 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
30 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
31 has_pass_label: bool,
32 instance_vbuf_mask: usize,
33 dirty_vbuf_mask: usize,
34 active_first_instance: u32,
35 first_instance_location: Option<glow::UniformLocation>,
36 immediates_descs: ArrayVec<super::ImmediateDesc, { super::MAX_IMMEDIATES_COMMANDS }>,
37 current_immediates_data: [u32; super::MAX_IMMEDIATES],
39 end_of_pass_timestamp: Option<glow::Query>,
40 clip_distance_count: u32,
41}
42
43impl Default for State {
44 fn default() -> Self {
45 Self {
46 topology: Default::default(),
47 primitive: Default::default(),
48 index_format: Default::default(),
49 index_offset: Default::default(),
50 vertex_buffers: Default::default(),
51 vertex_attributes: Default::default(),
52 color_targets: Default::default(),
53 stencil: Default::default(),
54 depth_bias: Default::default(),
55 alpha_to_coverage_enabled: Default::default(),
56 samplers: Default::default(),
57 texture_slots: Default::default(),
58 render_size: Default::default(),
59 resolve_attachments: Default::default(),
60 invalidate_attachments: Default::default(),
61 has_pass_label: Default::default(),
62 instance_vbuf_mask: Default::default(),
63 dirty_vbuf_mask: Default::default(),
64 active_first_instance: Default::default(),
65 first_instance_location: Default::default(),
66 immediates_descs: Default::default(),
67 current_immediates_data: [0; super::MAX_IMMEDIATES],
68 end_of_pass_timestamp: Default::default(),
69 clip_distance_count: Default::default(),
70 }
71 }
72}
73
74impl super::CommandBuffer {
75 fn clear(&mut self) {
76 self.label = None;
77 self.commands.clear();
78 self.data_bytes.clear();
79 self.queries.clear();
80 }
81
82 fn add_marker(&mut self, marker: &str) -> Range<u32> {
83 let start = self.data_bytes.len() as u32;
84 self.data_bytes.extend(marker.as_bytes());
85 start..self.data_bytes.len() as u32
86 }
87
88 fn add_immediates_data(&mut self, data: &[u32]) -> Range<u32> {
89 let data_raw = bytemuck::cast_slice(data);
90 let start = self.data_bytes.len();
91 assert!(start < u32::MAX as usize);
92 self.data_bytes.extend_from_slice(data_raw);
93 let end = self.data_bytes.len();
94 assert!(end < u32::MAX as usize);
95 (start as u32)..(end as u32)
96 }
97}
98
99impl Drop for super::CommandEncoder {
100 fn drop(&mut self) {
101 use crate::CommandEncoder;
102 unsafe { self.discard_encoding() }
103 self.counters.command_encoders.sub(1);
104 }
105}
106
107impl super::CommandEncoder {
108 fn rebind_stencil_func(&mut self) {
109 fn make(s: &super::StencilSide, face: u32) -> C {
110 C::SetStencilFunc {
111 face,
112 function: s.function,
113 reference: s.reference,
114 read_mask: s.mask_read,
115 }
116 }
117
118 let s = &self.state.stencil;
119 if s.front.function == s.back.function
120 && s.front.mask_read == s.back.mask_read
121 && s.front.reference == s.back.reference
122 {
123 self.cmd_buffer
124 .commands
125 .push(make(&s.front, glow::FRONT_AND_BACK));
126 } else {
127 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
128 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
129 }
130 }
131
132 fn rebind_vertex_data(&mut self, first_instance: u32) {
133 if self
134 .private_caps
135 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
136 {
137 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
138 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
139 continue;
140 }
141 let (buffer_desc, vb) = match *pair {
142 (_, None) => continue,
144 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
145 };
146 let instance_offset = match buffer_desc.step {
147 wgt::VertexStepMode::Vertex => 0,
148 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
149 };
150
151 self.cmd_buffer.commands.push(C::SetVertexBuffer {
152 index: index as u32,
153 buffer: super::BufferBinding {
154 raw: vb.raw,
155 offset: vb.offset + instance_offset as wgt::BufferAddress,
156 },
157 buffer_desc,
158 });
159 self.state.dirty_vbuf_mask ^= 1 << index;
160 }
161 } else {
162 let mut vbuf_mask = 0;
163 for attribute in self.state.vertex_attributes.iter() {
164 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
165 continue;
166 }
167 let (buffer_desc, vb) =
168 match self.state.vertex_buffers[attribute.buffer_index as usize] {
169 (_, None) => continue,
171 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
172 };
173
174 let mut attribute_desc = attribute.clone();
175 attribute_desc.offset += vb.offset as u32;
176 if buffer_desc.step == wgt::VertexStepMode::Instance {
177 attribute_desc.offset += buffer_desc.stride * first_instance;
178 }
179
180 self.cmd_buffer.commands.push(C::SetVertexAttribute {
181 buffer: Some(vb.raw),
182 buffer_desc,
183 attribute_desc,
184 });
185 vbuf_mask |= 1 << attribute.buffer_index;
186 }
187 self.state.dirty_vbuf_mask ^= vbuf_mask;
188 }
189 }
190
191 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
192 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
193 if dirty_textures & (1 << texture_index) != 0
194 || slot
195 .sampler_index
196 .is_some_and(|si| dirty_samplers & (1 << si) != 0)
197 {
198 let sampler = slot
199 .sampler_index
200 .and_then(|si| self.state.samplers[si as usize]);
201 self.cmd_buffer
202 .commands
203 .push(C::BindSampler(texture_index as u32, sampler));
204 }
205 }
206 }
207
208 fn prepare_draw(&mut self, first_instance: u32) {
209 let emulated_first_instance_value = if self
212 .private_caps
213 .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
214 {
215 0
216 } else {
217 first_instance
218 };
219
220 if emulated_first_instance_value != self.state.active_first_instance {
221 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
223 self.state.active_first_instance = emulated_first_instance_value;
224 }
225 if self.state.dirty_vbuf_mask != 0 {
226 self.rebind_vertex_data(emulated_first_instance_value);
227 }
228 }
229
230 fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
231 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
232
233 self.state
234 .first_instance_location
235 .clone_from(&inner.first_instance_location);
236 self.state
237 .immediates_descs
238 .clone_from(&inner.immediates_descs);
239
240 let mut dirty_textures = 0u32;
242 for (texture_index, (slot, &sampler_index)) in self
243 .state
244 .texture_slots
245 .iter_mut()
246 .zip(inner.sampler_map.iter())
247 .enumerate()
248 {
249 if slot.sampler_index != sampler_index {
250 slot.sampler_index = sampler_index;
251 dirty_textures |= 1 << texture_index;
252 }
253 }
254 if dirty_textures != 0 {
255 self.rebind_sampler_states(dirty_textures, 0);
256 }
257 }
258}
259
260impl crate::CommandEncoder for super::CommandEncoder {
261 type A = super::Api;
262
263 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
264 self.state = State::default();
265 self.cmd_buffer.label = label.map(String::from);
266 Ok(())
267 }
268 unsafe fn discard_encoding(&mut self) {
269 self.cmd_buffer.clear();
270 }
271 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
272 Ok(mem::take(&mut self.cmd_buffer))
273 }
274 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
275 }
277
278 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
279 where
280 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
281 {
282 if !self
283 .private_caps
284 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
285 {
286 return;
287 }
288 for bar in barriers {
289 if !bar.usage.from.contains(wgt::BufferUses::STORAGE_READ_WRITE) {
291 continue;
292 }
293 self.cmd_buffer
294 .commands
295 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
296 }
297 }
298
299 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
300 where
301 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
302 {
303 if !self
304 .private_caps
305 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
306 {
307 return;
308 }
309
310 let mut combined_usage = wgt::TextureUses::empty();
311 for bar in barriers {
312 if !bar.usage.from.intersects(
315 wgt::TextureUses::STORAGE_READ_WRITE | wgt::TextureUses::STORAGE_WRITE_ONLY,
316 ) {
317 continue;
318 }
319 combined_usage |= bar.usage.to;
322 }
323
324 if !combined_usage.is_empty() {
325 self.cmd_buffer
326 .commands
327 .push(C::TextureBarrier(combined_usage));
328 }
329 }
330
331 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
332 self.cmd_buffer.commands.push(C::ClearBuffer {
333 dst: buffer.clone(),
334 dst_target: buffer.target,
335 range,
336 });
337 }
338
339 unsafe fn copy_buffer_to_buffer<T>(
340 &mut self,
341 src: &super::Buffer,
342 dst: &super::Buffer,
343 regions: T,
344 ) where
345 T: Iterator<Item = crate::BufferCopy>,
346 {
347 let (src_target, dst_target) = if src.target == dst.target {
348 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
349 } else {
350 (src.target, dst.target)
351 };
352 for copy in regions {
353 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
354 src: src.clone(),
355 src_target,
356 dst: dst.clone(),
357 dst_target,
358 copy,
359 })
360 }
361 }
362
363 #[cfg(webgl)]
364 unsafe fn copy_external_image_to_texture<T>(
365 &mut self,
366 src: &wgt::CopyExternalImageSourceInfo,
367 dst: &super::Texture,
368 dst_premultiplication: bool,
369 regions: T,
370 ) where
371 T: Iterator<Item = crate::TextureCopy>,
372 {
373 let (dst_raw, dst_target) = dst.inner.as_native();
374 for copy in regions {
375 self.cmd_buffer
376 .commands
377 .push(C::CopyExternalImageToTexture {
378 src: src.clone(),
379 dst: dst_raw,
380 dst_target,
381 dst_format: dst.format,
382 dst_premultiplication,
383 copy,
384 })
385 }
386 }
387
388 unsafe fn copy_texture_to_texture<T>(
389 &mut self,
390 src: &super::Texture,
391 _src_usage: wgt::TextureUses,
392 dst: &super::Texture,
393 regions: T,
394 ) where
395 T: Iterator<Item = crate::TextureCopy>,
396 {
397 let (src_raw, src_target) = src.inner.as_native();
398 let (dst_raw, dst_target) = dst.inner.as_native();
399 for mut copy in regions {
400 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
401 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
402 src: src_raw,
403 src_target,
404 dst: dst_raw,
405 dst_target,
406 copy,
407 })
408 }
409 }
410
411 unsafe fn copy_buffer_to_texture<T>(
412 &mut self,
413 src: &super::Buffer,
414 dst: &super::Texture,
415 regions: T,
416 ) where
417 T: Iterator<Item = crate::BufferTextureCopy>,
418 {
419 let (dst_raw, dst_target) = dst.inner.as_native();
420
421 for mut copy in regions {
422 copy.clamp_size_to_virtual(&dst.copy_size);
423 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
424 src: src.clone(),
425 src_target: src.target,
426 dst: dst_raw,
427 dst_target,
428 dst_format: dst.format,
429 copy,
430 })
431 }
432 }
433
434 unsafe fn copy_texture_to_buffer<T>(
435 &mut self,
436 src: &super::Texture,
437 _src_usage: wgt::TextureUses,
438 dst: &super::Buffer,
439 regions: T,
440 ) where
441 T: Iterator<Item = crate::BufferTextureCopy>,
442 {
443 let (src_raw, src_target) = src.inner.as_native();
444 for mut copy in regions {
445 copy.clamp_size_to_virtual(&src.copy_size);
446 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
447 src: src_raw,
448 src_target,
449 src_format: src.format,
450 dst: dst.clone(),
451 dst_target: dst.target,
452 copy,
453 })
454 }
455 }
456
457 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
458 let query = set.queries[index as usize];
459 self.cmd_buffer
460 .commands
461 .push(C::BeginQuery(query, set.target));
462 }
463 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
464 self.cmd_buffer.commands.push(C::EndQuery(set.target));
465 }
466 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
467 let query = set.queries[index as usize];
468 self.cmd_buffer.commands.push(C::TimestampQuery(query));
469 }
470 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
471 }
473 unsafe fn copy_query_results(
474 &mut self,
475 set: &super::QuerySet,
476 range: Range<u32>,
477 buffer: &super::Buffer,
478 offset: wgt::BufferAddress,
479 _stride: wgt::BufferSize,
480 ) {
481 let start = self.cmd_buffer.queries.len();
482 self.cmd_buffer
483 .queries
484 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
485 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
486 self.cmd_buffer.commands.push(C::CopyQueryResults {
487 query_range,
488 dst: buffer.clone(),
489 dst_target: buffer.target,
490 dst_offset: offset,
491 });
492 }
493
494 unsafe fn begin_render_pass(
497 &mut self,
498 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
499 ) -> Result<(), crate::DeviceError> {
500 debug_assert!(self.state.end_of_pass_timestamp.is_none());
501 if let Some(ref t) = desc.timestamp_writes {
502 if let Some(index) = t.beginning_of_pass_write_index {
503 unsafe { self.write_timestamp(t.query_set, index) }
504 }
505 self.state.end_of_pass_timestamp = t
506 .end_of_pass_write_index
507 .map(|index| t.query_set.queries[index as usize]);
508 }
509
510 self.state.render_size = desc.extent;
511 self.state.resolve_attachments.clear();
512 self.state.invalidate_attachments.clear();
513 if let Some(label) = desc.label {
514 let range = self.cmd_buffer.add_marker(label);
515 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
516 self.state.has_pass_label = true;
517 }
518
519 let rendering_to_external_framebuffer = desc
520 .color_attachments
521 .iter()
522 .filter_map(|at| at.as_ref())
523 .any(|at| match at.target.view.inner {
524 #[cfg(webgl)]
525 super::TextureInner::ExternalFramebuffer { .. } => true,
526 #[cfg(native)]
527 super::TextureInner::ExternalNativeFramebuffer { .. } => true,
528 _ => false,
529 });
530
531 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
532 panic!("Multiple render attachments with external framebuffers are not supported.");
533 }
534
535 assert!(desc.color_attachments.len() <= 32);
537
538 match desc
539 .color_attachments
540 .first()
541 .filter(|at| at.is_some())
542 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
543 {
544 Some(&super::TextureInner::DefaultRenderbuffer) => {
546 self.cmd_buffer
547 .commands
548 .push(C::ResetFramebuffer { is_default: true });
549 }
550 _ => {
551 self.cmd_buffer
553 .commands
554 .push(C::ResetFramebuffer { is_default: false });
555
556 for (i, cat) in desc.color_attachments.iter().enumerate() {
557 if let Some(cat) = cat.as_ref() {
558 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
559 if let Some(ref rat) = cat.resolve_target {
561 if matches!(rat.view.inner, super::TextureInner::Texture { .. })
562 && self.private_caps.contains(
563 super::PrivateCapabilities::MULTISAMPLED_RENDER_TO_TEXTURE,
564 )
565 && !cat.ops.contains(crate::AttachmentOps::STORE)
566 && i == 0
568 {
569 self.cmd_buffer.commands.push(C::BindAttachment {
570 attachment,
571 view: rat.view.clone(),
572 depth_slice: None,
573 sample_count: desc.sample_count,
574 });
575 continue;
576 }
577 }
578 self.cmd_buffer.commands.push(C::BindAttachment {
579 attachment,
580 view: cat.target.view.clone(),
581 depth_slice: cat.depth_slice,
582 sample_count: 1,
583 });
584 if let Some(ref rat) = cat.resolve_target {
585 self.state
586 .resolve_attachments
587 .push((attachment, rat.view.clone()));
588 }
589 if cat.ops.contains(crate::AttachmentOps::STORE_DISCARD) {
590 self.state.invalidate_attachments.push(attachment);
591 }
592 }
593 }
594 if let Some(ref dsat) = desc.depth_stencil_attachment {
595 let aspects = dsat.target.view.aspects;
596 let attachment = match aspects {
597 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
598 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
599 _ => glow::DEPTH_STENCIL_ATTACHMENT,
600 };
601 self.cmd_buffer.commands.push(C::BindAttachment {
602 attachment,
603 view: dsat.target.view.clone(),
604 depth_slice: None,
605 sample_count: 1,
606 });
607 if aspects.contains(crate::FormatAspects::DEPTH)
608 && dsat.depth_ops.contains(crate::AttachmentOps::STORE_DISCARD)
609 {
610 self.state
611 .invalidate_attachments
612 .push(glow::DEPTH_ATTACHMENT);
613 }
614 if aspects.contains(crate::FormatAspects::STENCIL)
615 && dsat
616 .stencil_ops
617 .contains(crate::AttachmentOps::STORE_DISCARD)
618 {
619 self.state
620 .invalidate_attachments
621 .push(glow::STENCIL_ATTACHMENT);
622 }
623 }
624 }
625 }
626
627 let rect = crate::Rect {
628 x: 0,
629 y: 0,
630 w: desc.extent.width as i32,
631 h: desc.extent.height as i32,
632 };
633 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
634 self.cmd_buffer.commands.push(C::SetViewport {
635 rect,
636 depth: 0.0..1.0,
637 });
638
639 if !rendering_to_external_framebuffer {
640 self.cmd_buffer
642 .commands
643 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
644 }
645
646 for (i, cat) in desc
648 .color_attachments
649 .iter()
650 .filter_map(|at| at.as_ref())
651 .enumerate()
652 {
653 if cat.ops.contains(crate::AttachmentOps::LOAD_CLEAR) {
654 let c = &cat.clear_value;
655 self.cmd_buffer.commands.push(
656 match cat.target.view.format.sample_type(None, None).unwrap() {
657 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
658 draw_buffer: i as u32,
659 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
660 is_srgb: cat.target.view.format.is_srgb(),
661 },
662 wgt::TextureSampleType::Uint => C::ClearColorU(
663 i as u32,
664 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
665 ),
666 wgt::TextureSampleType::Sint => C::ClearColorI(
667 i as u32,
668 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
669 ),
670 wgt::TextureSampleType::Depth => unreachable!(),
671 },
672 );
673 }
674 }
675
676 if let Some(ref dsat) = desc.depth_stencil_attachment {
677 let clear_depth = dsat.depth_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
678 let clear_stencil = dsat.stencil_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
679
680 if clear_depth && clear_stencil {
681 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
682 dsat.clear_value.0,
683 dsat.clear_value.1,
684 ));
685 } else if clear_depth {
686 self.cmd_buffer
687 .commands
688 .push(C::ClearDepth(dsat.clear_value.0));
689 } else if clear_stencil {
690 self.cmd_buffer
691 .commands
692 .push(C::ClearStencil(dsat.clear_value.1));
693 }
694 }
695 Ok(())
696 }
697 unsafe fn end_render_pass(&mut self) {
698 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
699 self.cmd_buffer.commands.push(C::ResolveAttachment {
700 attachment,
701 dst,
702 size: self.state.render_size,
703 });
704 }
705 if !self.state.invalidate_attachments.is_empty() {
706 self.cmd_buffer.commands.push(C::InvalidateAttachments(
707 self.state.invalidate_attachments.clone(),
708 ));
709 self.state.invalidate_attachments.clear();
710 }
711 if self.state.has_pass_label {
712 self.cmd_buffer.commands.push(C::PopDebugGroup);
713 self.state.has_pass_label = false;
714 }
715 self.state.instance_vbuf_mask = 0;
716 self.state.dirty_vbuf_mask = 0;
717 self.state.active_first_instance = 0;
718 self.state.color_targets.clear();
719 for vat in &self.state.vertex_attributes {
720 self.cmd_buffer
721 .commands
722 .push(C::UnsetVertexAttribute(vat.location));
723 }
724 self.state.vertex_attributes.clear();
725 self.state.primitive = super::PrimitiveState::default();
726
727 if let Some(query) = self.state.end_of_pass_timestamp.take() {
728 self.cmd_buffer.commands.push(C::TimestampQuery(query));
729 }
730 }
731
732 unsafe fn set_bind_group(
733 &mut self,
734 layout: &super::PipelineLayout,
735 index: u32,
736 group: &super::BindGroup,
737 dynamic_offsets: &[wgt::DynamicOffset],
738 ) {
739 let mut do_index = 0;
740 let mut dirty_textures = 0u32;
741 let mut dirty_samplers = 0u32;
742 let group_info = layout.group_infos[index as usize].as_ref().unwrap();
743
744 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
745 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
746 match *raw_binding {
747 super::RawBinding::Buffer {
748 raw,
749 offset: base_offset,
750 size,
751 } => {
752 let mut offset = base_offset;
753 let target = match binding_layout.ty {
754 wgt::BindingType::Buffer {
755 ty,
756 has_dynamic_offset,
757 min_binding_size: _,
758 } => {
759 if has_dynamic_offset {
760 offset += dynamic_offsets[do_index] as i32;
761 do_index += 1;
762 }
763 match ty {
764 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
765 wgt::BufferBindingType::Storage { .. } => {
766 glow::SHADER_STORAGE_BUFFER
767 }
768 }
769 }
770 _ => unreachable!(),
771 };
772 self.cmd_buffer.commands.push(C::BindBuffer {
773 target,
774 slot,
775 buffer: raw,
776 offset,
777 size,
778 });
779 }
780 super::RawBinding::Sampler(sampler) => {
781 dirty_samplers |= 1 << slot;
782 self.state.samplers[slot as usize] = Some(sampler);
783 }
784 super::RawBinding::Texture {
785 raw,
786 target,
787 aspects,
788 ref mip_levels,
789 } => {
790 dirty_textures |= 1 << slot;
791 self.state.texture_slots[slot as usize].tex_target = target;
792 self.cmd_buffer.commands.push(C::BindTexture {
793 slot,
794 texture: raw,
795 target,
796 aspects,
797 mip_levels: mip_levels.clone(),
798 });
799 }
800 super::RawBinding::Image(ref binding) => {
801 self.cmd_buffer.commands.push(C::BindImage {
802 slot,
803 binding: binding.clone(),
804 });
805 }
806 }
807 }
808
809 self.rebind_sampler_states(dirty_textures, dirty_samplers);
810 }
811
812 unsafe fn set_immediates(
813 &mut self,
814 _layout: &super::PipelineLayout,
815 offset_bytes: u32,
816 data: &[u32],
817 ) {
818 let start_words = offset_bytes / 4;
826 let end_words = start_words + data.len() as u32;
827 self.state.current_immediates_data[start_words as usize..end_words as usize]
828 .copy_from_slice(data);
829
830 for uniform in self.state.immediates_descs.iter().cloned() {
836 let uniform_size_words = uniform.size_bytes / 4;
837 let uniform_start_words = uniform.offset / 4;
838 let uniform_end_words = uniform_start_words + uniform_size_words;
839
840 let needs_updating =
842 start_words < uniform_end_words || uniform_start_words <= end_words;
843
844 if needs_updating {
845 let uniform_data = &self.state.current_immediates_data
846 [uniform_start_words as usize..uniform_end_words as usize];
847
848 let range = self.cmd_buffer.add_immediates_data(uniform_data);
849
850 self.cmd_buffer.commands.push(C::SetImmediates {
851 uniform,
852 offset: range.start,
853 });
854 }
855 }
856 }
857
858 unsafe fn insert_debug_marker(&mut self, label: &str) {
859 let range = self.cmd_buffer.add_marker(label);
860 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
861 }
862 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
863 let range = self.cmd_buffer.add_marker(group_label);
864 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
865 }
866 unsafe fn end_debug_marker(&mut self) {
867 self.cmd_buffer.commands.push(C::PopDebugGroup);
868 }
869
870 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
871 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
872
873 if self
874 .private_caps
875 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
876 {
877 for vat in pipeline.vertex_attributes.iter() {
878 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
879 self.cmd_buffer.commands.push(C::SetVertexAttribute {
881 buffer: None,
882 buffer_desc: vb.clone(),
883 attribute_desc: vat.clone(),
884 });
885 }
886 } else {
887 for vat in &self.state.vertex_attributes {
888 self.cmd_buffer
889 .commands
890 .push(C::UnsetVertexAttribute(vat.location));
891 }
892 self.state.vertex_attributes.clear();
893
894 self.state.dirty_vbuf_mask = 0;
895 for vat in pipeline.vertex_attributes.iter() {
897 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
899 self.state.vertex_attributes.push(vat.clone());
900 }
901 }
902
903 self.state.instance_vbuf_mask = 0;
904 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
906 .state
907 .vertex_buffers
908 .iter_mut()
909 .zip(pipeline.vertex_buffers.iter())
910 .enumerate()
911 {
912 if pipe_desc.step == wgt::VertexStepMode::Instance {
913 self.state.instance_vbuf_mask |= 1 << index;
914 }
915 if state_desc != pipe_desc {
916 self.state.dirty_vbuf_mask |= 1 << index;
917 *state_desc = pipe_desc.clone();
918 }
919 }
920
921 self.set_pipeline_inner(&pipeline.inner);
922
923 let prim_state = conv::map_primitive_state(&pipeline.primitive);
925 if prim_state != self.state.primitive {
926 self.cmd_buffer
927 .commands
928 .push(C::SetPrimitive(prim_state.clone()));
929 self.state.primitive = prim_state;
930 }
931
932 let mut aspects = crate::FormatAspects::empty();
934 if pipeline.depth_bias != self.state.depth_bias {
935 self.state.depth_bias = pipeline.depth_bias;
936 self.cmd_buffer
937 .commands
938 .push(C::SetDepthBias(pipeline.depth_bias));
939 }
940 if let Some(ref depth) = pipeline.depth {
941 aspects |= crate::FormatAspects::DEPTH;
942 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
943 }
944 if let Some(ref stencil) = pipeline.stencil {
945 aspects |= crate::FormatAspects::STENCIL;
946 self.state.stencil = stencil.clone();
947 self.rebind_stencil_func();
948 if stencil.front.ops == stencil.back.ops
949 && stencil.front.mask_write == stencil.back.mask_write
950 {
951 self.cmd_buffer.commands.push(C::SetStencilOps {
952 face: glow::FRONT_AND_BACK,
953 write_mask: stencil.front.mask_write,
954 ops: stencil.front.ops.clone(),
955 });
956 } else {
957 self.cmd_buffer.commands.push(C::SetStencilOps {
958 face: glow::FRONT,
959 write_mask: stencil.front.mask_write,
960 ops: stencil.front.ops.clone(),
961 });
962 self.cmd_buffer.commands.push(C::SetStencilOps {
963 face: glow::BACK,
964 write_mask: stencil.back.mask_write,
965 ops: stencil.back.ops.clone(),
966 });
967 }
968 }
969 self.cmd_buffer
970 .commands
971 .push(C::ConfigureDepthStencil(aspects));
972
973 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
975 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
976 self.cmd_buffer
977 .commands
978 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
979 }
980
981 if self.state.color_targets[..] != pipeline.color_targets[..] {
983 if pipeline
984 .color_targets
985 .iter()
986 .skip(1)
987 .any(|ct| *ct != pipeline.color_targets[0])
988 {
989 for (index, ct) in pipeline.color_targets.iter().enumerate() {
990 self.cmd_buffer.commands.push(C::SetColorTarget {
991 draw_buffer_index: Some(index as u32),
992 desc: ct.clone(),
993 });
994 }
995 } else {
996 self.cmd_buffer.commands.push(C::SetColorTarget {
997 draw_buffer_index: None,
998 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
999 });
1000 }
1001 }
1002 self.state.color_targets.clear();
1003 for ct in pipeline.color_targets.iter() {
1004 self.state.color_targets.push(ct.clone());
1005 }
1006
1007 if pipeline.inner.clip_distance_count != self.state.clip_distance_count {
1009 self.cmd_buffer.commands.push(C::SetClipDistances {
1010 old_count: self.state.clip_distance_count,
1011 new_count: pipeline.inner.clip_distance_count,
1012 });
1013 self.state.clip_distance_count = pipeline.inner.clip_distance_count;
1014 }
1015 }
1016
1017 unsafe fn set_index_buffer<'a>(
1018 &mut self,
1019 binding: crate::BufferBinding<'a, super::Buffer>,
1020 format: wgt::IndexFormat,
1021 ) {
1022 self.state.index_offset = binding.offset;
1023 self.state.index_format = format;
1024 self.cmd_buffer
1025 .commands
1026 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
1027 }
1028 unsafe fn set_vertex_buffer<'a>(
1029 &mut self,
1030 index: u32,
1031 binding: crate::BufferBinding<'a, super::Buffer>,
1032 ) {
1033 self.state.dirty_vbuf_mask |= 1 << index;
1034 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
1035 *vb = Some(super::BufferBinding {
1036 raw: binding.buffer.raw.unwrap(),
1037 offset: binding.offset,
1038 });
1039 }
1040 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1041 self.cmd_buffer.commands.push(C::SetViewport {
1042 rect: crate::Rect {
1043 x: rect.x as i32,
1044 y: rect.y as i32,
1045 w: rect.w as i32,
1046 h: rect.h as i32,
1047 },
1048 depth,
1049 });
1050 }
1051 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1052 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1053 x: rect.x as i32,
1054 y: rect.y as i32,
1055 w: rect.w as i32,
1056 h: rect.h as i32,
1057 }));
1058 }
1059 unsafe fn set_stencil_reference(&mut self, value: u32) {
1060 self.state.stencil.front.reference = value;
1061 self.state.stencil.back.reference = value;
1062 self.rebind_stencil_func();
1063 }
1064 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1065 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1066 }
1067
1068 unsafe fn draw(
1069 &mut self,
1070 first_vertex: u32,
1071 vertex_count: u32,
1072 first_instance: u32,
1073 instance_count: u32,
1074 ) {
1075 self.prepare_draw(first_instance);
1076 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::Draw {
1078 topology: self.state.topology,
1079 first_vertex,
1080 vertex_count,
1081 first_instance,
1082 instance_count,
1083 first_instance_location: self.state.first_instance_location.clone(),
1084 });
1085 }
1086 unsafe fn draw_indexed(
1087 &mut self,
1088 first_index: u32,
1089 index_count: u32,
1090 base_vertex: i32,
1091 first_instance: u32,
1092 instance_count: u32,
1093 ) {
1094 self.prepare_draw(first_instance);
1095 let (index_size, index_type) = match self.state.index_format {
1096 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1097 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1098 };
1099 let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1100 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexed {
1102 topology: self.state.topology,
1103 index_type,
1104 index_offset,
1105 index_count,
1106 base_vertex,
1107 first_instance,
1108 instance_count,
1109 first_instance_location: self.state.first_instance_location.clone(),
1110 });
1111 }
1112 unsafe fn draw_mesh_tasks(
1113 &mut self,
1114 _group_count_x: u32,
1115 _group_count_y: u32,
1116 _group_count_z: u32,
1117 ) {
1118 unreachable!()
1119 }
1120 unsafe fn draw_indirect(
1121 &mut self,
1122 buffer: &super::Buffer,
1123 offset: wgt::BufferAddress,
1124 draw_count: u32,
1125 ) {
1126 self.prepare_draw(0);
1127 for draw in 0..draw_count as wgt::BufferAddress {
1128 let indirect_offset =
1129 offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1130 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndirect {
1132 topology: self.state.topology,
1133 indirect_buf: buffer.raw.unwrap(),
1134 indirect_offset,
1135 first_instance_location: self.state.first_instance_location.clone(),
1136 });
1137 }
1138 }
1139 unsafe fn draw_indexed_indirect(
1140 &mut self,
1141 buffer: &super::Buffer,
1142 offset: wgt::BufferAddress,
1143 draw_count: u32,
1144 ) {
1145 self.prepare_draw(0);
1146 let index_type = match self.state.index_format {
1147 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1148 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1149 };
1150 for draw in 0..draw_count as wgt::BufferAddress {
1151 let indirect_offset =
1152 offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1153 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1155 topology: self.state.topology,
1156 index_type,
1157 indirect_buf: buffer.raw.unwrap(),
1158 indirect_offset,
1159 first_instance_location: self.state.first_instance_location.clone(),
1160 });
1161 }
1162 }
1163 unsafe fn draw_mesh_tasks_indirect(
1164 &mut self,
1165 _buffer: &<Self::A as crate::Api>::Buffer,
1166 _offset: wgt::BufferAddress,
1167 _draw_count: u32,
1168 ) {
1169 unreachable!()
1170 }
1171 unsafe fn draw_indirect_count(
1172 &mut self,
1173 _buffer: &super::Buffer,
1174 _offset: wgt::BufferAddress,
1175 _count_buffer: &super::Buffer,
1176 _count_offset: wgt::BufferAddress,
1177 _max_count: u32,
1178 ) {
1179 unreachable!()
1180 }
1181 unsafe fn draw_indexed_indirect_count(
1182 &mut self,
1183 _buffer: &super::Buffer,
1184 _offset: wgt::BufferAddress,
1185 _count_buffer: &super::Buffer,
1186 _count_offset: wgt::BufferAddress,
1187 _max_count: u32,
1188 ) {
1189 unreachable!()
1190 }
1191 unsafe fn draw_mesh_tasks_indirect_count(
1192 &mut self,
1193 _buffer: &<Self::A as crate::Api>::Buffer,
1194 _offset: wgt::BufferAddress,
1195 _count_buffer: &<Self::A as crate::Api>::Buffer,
1196 _count_offset: wgt::BufferAddress,
1197 _max_count: u32,
1198 ) {
1199 unreachable!()
1200 }
1201
1202 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1205 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1206 if let Some(ref t) = desc.timestamp_writes {
1207 if let Some(index) = t.beginning_of_pass_write_index {
1208 unsafe { self.write_timestamp(t.query_set, index) }
1209 }
1210 self.state.end_of_pass_timestamp = t
1211 .end_of_pass_write_index
1212 .map(|index| t.query_set.queries[index as usize]);
1213 }
1214
1215 if let Some(label) = desc.label {
1216 let range = self.cmd_buffer.add_marker(label);
1217 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1218 self.state.has_pass_label = true;
1219 }
1220 }
1221 unsafe fn end_compute_pass(&mut self) {
1222 if self.state.has_pass_label {
1223 self.cmd_buffer.commands.push(C::PopDebugGroup);
1224 self.state.has_pass_label = false;
1225 }
1226
1227 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1228 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1229 }
1230 }
1231
1232 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1233 self.set_pipeline_inner(&pipeline.inner);
1234 }
1235
1236 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1237 if count.contains(&0) {
1239 return;
1240 }
1241 self.cmd_buffer.commands.push(C::Dispatch(count));
1242 }
1243 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1244 self.cmd_buffer.commands.push(C::DispatchIndirect {
1245 indirect_buf: buffer.raw.unwrap(),
1246 indirect_offset: offset,
1247 });
1248 }
1249
1250 unsafe fn build_acceleration_structures<'a, T>(
1251 &mut self,
1252 _descriptor_count: u32,
1253 _descriptors: T,
1254 ) where
1255 super::Api: 'a,
1256 T: IntoIterator<
1257 Item = crate::BuildAccelerationStructureDescriptor<
1258 'a,
1259 super::Buffer,
1260 super::AccelerationStructure,
1261 >,
1262 >,
1263 {
1264 unimplemented!()
1265 }
1266
1267 unsafe fn place_acceleration_structure_barrier(
1268 &mut self,
1269 _barriers: crate::AccelerationStructureBarrier,
1270 ) {
1271 unimplemented!()
1272 }
1273
1274 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1275 &mut self,
1276 _src: &super::AccelerationStructure,
1277 _dst: &super::AccelerationStructure,
1278 _copy: wgt::AccelerationStructureCopy,
1279 ) {
1280 unimplemented!()
1281 }
1282
1283 unsafe fn read_acceleration_structure_compact_size(
1284 &mut self,
1285 _acceleration_structure: &super::AccelerationStructure,
1286 _buf: &super::Buffer,
1287 ) {
1288 unimplemented!()
1289 }
1290
1291 unsafe fn set_acceleration_structure_dependencies(
1292 _command_buffers: &[&super::CommandBuffer],
1293 _dependencies: &[&super::AccelerationStructure],
1294 ) {
1295 unimplemented!()
1296 }
1297}