1use super::{conv, Command as C};
2use arrayvec::ArrayVec;
3use std::{mem, ops::Range};
4
5#[derive(Clone, Copy, Debug, Default)]
6struct TextureSlotDesc {
7 tex_target: super::BindTarget,
8 sampler_index: Option<u8>,
9}
10
11#[derive(Default)]
12pub(super) struct State {
13 topology: u32,
14 primitive: super::PrimitiveState,
15 index_format: wgt::IndexFormat,
16 index_offset: wgt::BufferAddress,
17 vertex_buffers:
18 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
19 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
20 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
21 stencil: super::StencilState,
22 depth_bias: wgt::DepthBiasState,
23 alpha_to_coverage_enabled: bool,
24 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
25 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
26 render_size: wgt::Extent3d,
27 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
28 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
29 has_pass_label: bool,
30 instance_vbuf_mask: usize,
31 dirty_vbuf_mask: usize,
32 active_first_instance: u32,
33 push_offset_to_uniform: ArrayVec<super::UniformDesc, { super::MAX_PUSH_CONSTANTS }>,
34}
35
36impl super::CommandBuffer {
37 fn clear(&mut self) {
38 self.label = None;
39 self.commands.clear();
40 self.data_bytes.clear();
41 self.queries.clear();
42 }
43
44 fn add_marker(&mut self, marker: &str) -> Range<u32> {
45 let start = self.data_bytes.len() as u32;
46 self.data_bytes.extend(marker.as_bytes());
47 start..self.data_bytes.len() as u32
48 }
49
50 fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
51 let data_raw = unsafe {
52 std::slice::from_raw_parts(
53 data.as_ptr() as *const _,
54 data.len() * mem::size_of::<u32>(),
55 )
56 };
57 let start = self.data_bytes.len();
58 assert!(start < u32::MAX as usize);
59 self.data_bytes.extend_from_slice(data_raw);
60 let end = self.data_bytes.len();
61 assert!(end < u32::MAX as usize);
62 (start as u32)..(end as u32)
63 }
64}
65
66impl super::CommandEncoder {
67 fn rebind_stencil_func(&mut self) {
68 fn make(s: &super::StencilSide, face: u32) -> C {
69 C::SetStencilFunc {
70 face,
71 function: s.function,
72 reference: s.reference,
73 read_mask: s.mask_read,
74 }
75 }
76
77 let s = &self.state.stencil;
78 if s.front.function == s.back.function
79 && s.front.mask_read == s.back.mask_read
80 && s.front.reference == s.back.reference
81 {
82 self.cmd_buffer
83 .commands
84 .push(make(&s.front, glow::FRONT_AND_BACK));
85 } else {
86 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
87 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
88 }
89 }
90
91 fn rebind_vertex_data(&mut self, first_instance: u32) {
92 if self
93 .private_caps
94 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
95 {
96 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
97 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
98 continue;
99 }
100 let (buffer_desc, vb) = match *pair {
101 (_, None) => continue,
103 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
104 };
105 let instance_offset = match buffer_desc.step {
106 wgt::VertexStepMode::Vertex => 0,
107 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
108 };
109
110 self.cmd_buffer.commands.push(C::SetVertexBuffer {
111 index: index as u32,
112 buffer: super::BufferBinding {
113 raw: vb.raw,
114 offset: vb.offset + instance_offset as wgt::BufferAddress,
115 },
116 buffer_desc,
117 });
118 self.state.dirty_vbuf_mask ^= 1 << index;
119 }
120 } else {
121 let mut vbuf_mask = 0;
122 for attribute in self.state.vertex_attributes.iter() {
123 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
124 continue;
125 }
126 let (buffer_desc, vb) =
127 match self.state.vertex_buffers[attribute.buffer_index as usize] {
128 (_, None) => continue,
130 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
131 };
132
133 let mut attribute_desc = attribute.clone();
134 attribute_desc.offset += vb.offset as u32;
135 if buffer_desc.step == wgt::VertexStepMode::Instance {
136 attribute_desc.offset += buffer_desc.stride * first_instance;
137 }
138
139 self.cmd_buffer.commands.push(C::SetVertexAttribute {
140 buffer: Some(vb.raw),
141 buffer_desc,
142 attribute_desc,
143 });
144 vbuf_mask |= 1 << attribute.buffer_index;
145 }
146 self.state.dirty_vbuf_mask ^= vbuf_mask;
147 }
148 }
149
150 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
151 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
152 if dirty_textures & (1 << texture_index) != 0
153 || slot
154 .sampler_index
155 .map_or(false, |si| dirty_samplers & (1 << si) != 0)
156 {
157 let sampler = slot
158 .sampler_index
159 .and_then(|si| self.state.samplers[si as usize]);
160 self.cmd_buffer
161 .commands
162 .push(C::BindSampler(texture_index as u32, sampler));
163 }
164 }
165 }
166
167 fn prepare_draw(&mut self, first_instance: u32) {
168 if first_instance != self.state.active_first_instance {
169 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
171 self.state.active_first_instance = first_instance;
172 }
173 if self.state.dirty_vbuf_mask != 0 {
174 self.rebind_vertex_data(first_instance);
175 }
176 }
177
178 fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
179 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
180
181 self.state.push_offset_to_uniform.clear();
182 self.state
183 .push_offset_to_uniform
184 .extend(inner.uniforms.iter().cloned());
185
186 let mut dirty_textures = 0u32;
188 for (texture_index, (slot, &sampler_index)) in self
189 .state
190 .texture_slots
191 .iter_mut()
192 .zip(inner.sampler_map.iter())
193 .enumerate()
194 {
195 if slot.sampler_index != sampler_index {
196 slot.sampler_index = sampler_index;
197 dirty_textures |= 1 << texture_index;
198 }
199 }
200 if dirty_textures != 0 {
201 self.rebind_sampler_states(dirty_textures, 0);
202 }
203 }
204}
205
206impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
207 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
208 self.state = State::default();
209 self.cmd_buffer.label = label.map(str::to_string);
210 Ok(())
211 }
212 unsafe fn discard_encoding(&mut self) {
213 self.cmd_buffer.clear();
214 }
215 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
216 Ok(mem::take(&mut self.cmd_buffer))
217 }
218 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
219 }
221
222 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
223 where
224 T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
225 {
226 if !self
227 .private_caps
228 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
229 {
230 return;
231 }
232 for bar in barriers {
233 if !bar
235 .usage
236 .start
237 .contains(crate::BufferUses::STORAGE_READ_WRITE)
238 {
239 continue;
240 }
241 self.cmd_buffer
242 .commands
243 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.end));
244 }
245 }
246
247 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
248 where
249 T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
250 {
251 if !self
252 .private_caps
253 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
254 {
255 return;
256 }
257
258 let mut combined_usage = crate::TextureUses::empty();
259 for bar in barriers {
260 if !bar
262 .usage
263 .start
264 .contains(crate::TextureUses::STORAGE_READ_WRITE)
265 {
266 continue;
267 }
268 combined_usage |= bar.usage.end;
271 }
272
273 if !combined_usage.is_empty() {
274 self.cmd_buffer
275 .commands
276 .push(C::TextureBarrier(combined_usage));
277 }
278 }
279
280 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
281 self.cmd_buffer.commands.push(C::ClearBuffer {
282 dst: buffer.clone(),
283 dst_target: buffer.target,
284 range,
285 });
286 }
287
288 unsafe fn copy_buffer_to_buffer<T>(
289 &mut self,
290 src: &super::Buffer,
291 dst: &super::Buffer,
292 regions: T,
293 ) where
294 T: Iterator<Item = crate::BufferCopy>,
295 {
296 let (src_target, dst_target) = if src.target == dst.target {
297 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
298 } else {
299 (src.target, dst.target)
300 };
301 for copy in regions {
302 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
303 src: src.clone(),
304 src_target,
305 dst: dst.clone(),
306 dst_target,
307 copy,
308 })
309 }
310 }
311
312 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
313 unsafe fn copy_external_image_to_texture<T>(
314 &mut self,
315 src: &wgt::ImageCopyExternalImage,
316 dst: &super::Texture,
317 dst_premultiplication: bool,
318 regions: T,
319 ) where
320 T: Iterator<Item = crate::TextureCopy>,
321 {
322 let (dst_raw, dst_target) = dst.inner.as_native();
323 for copy in regions {
324 self.cmd_buffer
325 .commands
326 .push(C::CopyExternalImageToTexture {
327 src: src.clone(),
328 dst: dst_raw,
329 dst_target,
330 dst_format: dst.format,
331 dst_premultiplication,
332 copy,
333 })
334 }
335 }
336
337 unsafe fn copy_texture_to_texture<T>(
338 &mut self,
339 src: &super::Texture,
340 _src_usage: crate::TextureUses,
341 dst: &super::Texture,
342 regions: T,
343 ) where
344 T: Iterator<Item = crate::TextureCopy>,
345 {
346 let (src_raw, src_target) = src.inner.as_native();
347 let (dst_raw, dst_target) = dst.inner.as_native();
348 for mut copy in regions {
349 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
350 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
351 src: src_raw,
352 src_target,
353 dst: dst_raw,
354 dst_target,
355 copy,
356 dst_is_cubemap: dst.is_cubemap,
357 })
358 }
359 }
360
361 unsafe fn copy_buffer_to_texture<T>(
362 &mut self,
363 src: &super::Buffer,
364 dst: &super::Texture,
365 regions: T,
366 ) where
367 T: Iterator<Item = crate::BufferTextureCopy>,
368 {
369 let (dst_raw, dst_target) = dst.inner.as_native();
370
371 for mut copy in regions {
372 copy.clamp_size_to_virtual(&dst.copy_size);
373 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
374 src: src.clone(),
375 src_target: src.target,
376 dst: dst_raw,
377 dst_target,
378 dst_format: dst.format,
379 copy,
380 })
381 }
382 }
383
384 unsafe fn copy_texture_to_buffer<T>(
385 &mut self,
386 src: &super::Texture,
387 _src_usage: crate::TextureUses,
388 dst: &super::Buffer,
389 regions: T,
390 ) where
391 T: Iterator<Item = crate::BufferTextureCopy>,
392 {
393 let (src_raw, src_target) = src.inner.as_native();
394 for mut copy in regions {
395 copy.clamp_size_to_virtual(&src.copy_size);
396 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
397 src: src_raw,
398 src_target,
399 src_format: src.format,
400 dst: dst.clone(),
401 dst_target: dst.target,
402 copy,
403 })
404 }
405 }
406
407 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
408 let query = set.queries[index as usize];
409 self.cmd_buffer
410 .commands
411 .push(C::BeginQuery(query, set.target));
412 }
413 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
414 self.cmd_buffer.commands.push(C::EndQuery(set.target));
415 }
416 unsafe fn write_timestamp(&mut self, _set: &super::QuerySet, _index: u32) {
417 unimplemented!()
418 }
419 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
420 }
422 unsafe fn copy_query_results(
423 &mut self,
424 set: &super::QuerySet,
425 range: Range<u32>,
426 buffer: &super::Buffer,
427 offset: wgt::BufferAddress,
428 _stride: wgt::BufferSize,
429 ) {
430 let start = self.cmd_buffer.queries.len();
431 self.cmd_buffer
432 .queries
433 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
434 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
435 self.cmd_buffer.commands.push(C::CopyQueryResults {
436 query_range,
437 dst: buffer.clone(),
438 dst_target: buffer.target,
439 dst_offset: offset,
440 });
441 }
442
443 unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
446 self.state.render_size = desc.extent;
447 self.state.resolve_attachments.clear();
448 self.state.invalidate_attachments.clear();
449 if let Some(label) = desc.label {
450 let range = self.cmd_buffer.add_marker(label);
451 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
452 self.state.has_pass_label = true;
453 }
454
455 let rendering_to_external_framebuffer = desc
456 .color_attachments
457 .iter()
458 .filter_map(|at| at.as_ref())
459 .any(|at| match at.target.view.inner {
460 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
461 super::TextureInner::ExternalFramebuffer { .. } => true,
462 _ => false,
463 });
464
465 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
466 panic!("Multiple render attachments with external framebuffers are not supported.");
467 }
468
469 match desc
470 .color_attachments
471 .first()
472 .filter(|at| at.is_some())
473 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
474 {
475 Some(&super::TextureInner::DefaultRenderbuffer) => {
477 self.cmd_buffer
478 .commands
479 .push(C::ResetFramebuffer { is_default: true });
480 }
481 _ => {
482 self.cmd_buffer
484 .commands
485 .push(C::ResetFramebuffer { is_default: false });
486
487 for (i, cat) in desc.color_attachments.iter().enumerate() {
488 if let Some(cat) = cat.as_ref() {
489 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
490 self.cmd_buffer.commands.push(C::BindAttachment {
491 attachment,
492 view: cat.target.view.clone(),
493 });
494 if let Some(ref rat) = cat.resolve_target {
495 self.state
496 .resolve_attachments
497 .push((attachment, rat.view.clone()));
498 }
499 if !cat.ops.contains(crate::AttachmentOps::STORE) {
500 self.state.invalidate_attachments.push(attachment);
501 }
502 }
503 }
504 if let Some(ref dsat) = desc.depth_stencil_attachment {
505 let aspects = dsat.target.view.aspects;
506 let attachment = match aspects {
507 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
508 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
509 _ => glow::DEPTH_STENCIL_ATTACHMENT,
510 };
511 self.cmd_buffer.commands.push(C::BindAttachment {
512 attachment,
513 view: dsat.target.view.clone(),
514 });
515 if aspects.contains(crate::FormatAspects::DEPTH)
516 && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
517 {
518 self.state
519 .invalidate_attachments
520 .push(glow::DEPTH_ATTACHMENT);
521 }
522 if aspects.contains(crate::FormatAspects::STENCIL)
523 && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
524 {
525 self.state
526 .invalidate_attachments
527 .push(glow::STENCIL_ATTACHMENT);
528 }
529 }
530
531 if !rendering_to_external_framebuffer {
532 self.cmd_buffer
534 .commands
535 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
536 }
537 }
538 }
539
540 let rect = crate::Rect {
541 x: 0,
542 y: 0,
543 w: desc.extent.width as i32,
544 h: desc.extent.height as i32,
545 };
546 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
547 self.cmd_buffer.commands.push(C::SetViewport {
548 rect,
549 depth: 0.0..1.0,
550 });
551
552 for (i, cat) in desc
554 .color_attachments
555 .iter()
556 .filter_map(|at| at.as_ref())
557 .enumerate()
558 {
559 if !cat.ops.contains(crate::AttachmentOps::LOAD) {
560 let c = &cat.clear_value;
561 self.cmd_buffer.commands.push(
562 match cat.target.view.format.sample_type(None).unwrap() {
563 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
564 draw_buffer: i as u32,
565 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
566 is_srgb: cat.target.view.format.is_srgb(),
567 },
568 wgt::TextureSampleType::Uint => C::ClearColorU(
569 i as u32,
570 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
571 ),
572 wgt::TextureSampleType::Sint => C::ClearColorI(
573 i as u32,
574 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
575 ),
576 wgt::TextureSampleType::Depth => unreachable!(),
577 },
578 );
579 }
580 }
581 if let Some(ref dsat) = desc.depth_stencil_attachment {
582 let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
583 let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
584
585 if clear_depth && clear_stencil {
586 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
587 dsat.clear_value.0,
588 dsat.clear_value.1,
589 ));
590 } else if clear_depth {
591 self.cmd_buffer
592 .commands
593 .push(C::ClearDepth(dsat.clear_value.0));
594 } else if clear_stencil {
595 self.cmd_buffer
596 .commands
597 .push(C::ClearStencil(dsat.clear_value.1));
598 }
599 }
600 }
601 unsafe fn end_render_pass(&mut self) {
602 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
603 self.cmd_buffer.commands.push(C::ResolveAttachment {
604 attachment,
605 dst,
606 size: self.state.render_size,
607 });
608 }
609 if !self.state.invalidate_attachments.is_empty() {
610 self.cmd_buffer.commands.push(C::InvalidateAttachments(
611 self.state.invalidate_attachments.clone(),
612 ));
613 self.state.invalidate_attachments.clear();
614 }
615 if self.state.has_pass_label {
616 self.cmd_buffer.commands.push(C::PopDebugGroup);
617 self.state.has_pass_label = false;
618 }
619 self.state.instance_vbuf_mask = 0;
620 self.state.dirty_vbuf_mask = 0;
621 self.state.active_first_instance = 0;
622 self.state.color_targets.clear();
623 for vat in &self.state.vertex_attributes {
624 self.cmd_buffer
625 .commands
626 .push(C::UnsetVertexAttribute(vat.location));
627 }
628 self.state.vertex_attributes.clear();
629 self.state.primitive = super::PrimitiveState::default();
630 }
631
632 unsafe fn set_bind_group(
633 &mut self,
634 layout: &super::PipelineLayout,
635 index: u32,
636 group: &super::BindGroup,
637 dynamic_offsets: &[wgt::DynamicOffset],
638 ) {
639 let mut do_index = 0;
640 let mut dirty_textures = 0u32;
641 let mut dirty_samplers = 0u32;
642 let group_info = &layout.group_infos[index as usize];
643
644 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
645 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
646 match *raw_binding {
647 super::RawBinding::Buffer {
648 raw,
649 offset: base_offset,
650 size,
651 } => {
652 let mut offset = base_offset;
653 let target = match binding_layout.ty {
654 wgt::BindingType::Buffer {
655 ty,
656 has_dynamic_offset,
657 min_binding_size: _,
658 } => {
659 if has_dynamic_offset {
660 offset += dynamic_offsets[do_index] as i32;
661 do_index += 1;
662 }
663 match ty {
664 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
665 wgt::BufferBindingType::Storage { .. } => {
666 glow::SHADER_STORAGE_BUFFER
667 }
668 }
669 }
670 _ => unreachable!(),
671 };
672 self.cmd_buffer.commands.push(C::BindBuffer {
673 target,
674 slot,
675 buffer: raw,
676 offset,
677 size,
678 });
679 }
680 super::RawBinding::Sampler(sampler) => {
681 dirty_samplers |= 1 << slot;
682 self.state.samplers[slot as usize] = Some(sampler);
683 }
684 super::RawBinding::Texture {
685 raw,
686 target,
687 aspects,
688 } => {
689 dirty_textures |= 1 << slot;
690 self.state.texture_slots[slot as usize].tex_target = target;
691 self.cmd_buffer.commands.push(C::BindTexture {
692 slot,
693 texture: raw,
694 target,
695 aspects,
696 });
697 }
698 super::RawBinding::Image(ref binding) => {
699 self.cmd_buffer.commands.push(C::BindImage {
700 slot,
701 binding: binding.clone(),
702 });
703 }
704 }
705 }
706
707 self.rebind_sampler_states(dirty_textures, dirty_samplers);
708 }
709
710 unsafe fn set_push_constants(
711 &mut self,
712 _layout: &super::PipelineLayout,
713 _stages: wgt::ShaderStages,
714 start_offset: u32,
715 data: &[u32],
716 ) {
717 let range = self.cmd_buffer.add_push_constant_data(data);
718
719 let end = start_offset + data.len() as u32 * 4;
720 let mut offset = start_offset;
721 while offset < end {
722 let uniform = self.state.push_offset_to_uniform[offset as usize / 4].clone();
723 let size = uniform.size;
724 if uniform.location.is_none() {
725 panic!("No uniform for push constant");
726 }
727 self.cmd_buffer.commands.push(C::SetPushConstants {
728 uniform,
729 offset: range.start + offset,
730 });
731 offset += size;
732 }
733 }
734
735 unsafe fn insert_debug_marker(&mut self, label: &str) {
736 let range = self.cmd_buffer.add_marker(label);
737 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
738 }
739 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
740 let range = self.cmd_buffer.add_marker(group_label);
741 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
742 }
743 unsafe fn end_debug_marker(&mut self) {
744 self.cmd_buffer.commands.push(C::PopDebugGroup);
745 }
746
747 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
748 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
749
750 if self
751 .private_caps
752 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
753 {
754 for vat in pipeline.vertex_attributes.iter() {
755 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
756 self.cmd_buffer.commands.push(C::SetVertexAttribute {
758 buffer: None,
759 buffer_desc: vb.clone(),
760 attribute_desc: vat.clone(),
761 });
762 }
763 } else {
764 for vat in &self.state.vertex_attributes {
765 self.cmd_buffer
766 .commands
767 .push(C::UnsetVertexAttribute(vat.location));
768 }
769 self.state.vertex_attributes.clear();
770
771 self.state.dirty_vbuf_mask = 0;
772 for vat in pipeline.vertex_attributes.iter() {
774 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
776 self.state.vertex_attributes.push(vat.clone());
777 }
778 }
779
780 self.state.instance_vbuf_mask = 0;
781 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
783 .state
784 .vertex_buffers
785 .iter_mut()
786 .zip(pipeline.vertex_buffers.iter())
787 .enumerate()
788 {
789 if pipe_desc.step == wgt::VertexStepMode::Instance {
790 self.state.instance_vbuf_mask |= 1 << index;
791 }
792 if state_desc != pipe_desc {
793 self.state.dirty_vbuf_mask |= 1 << index;
794 *state_desc = pipe_desc.clone();
795 }
796 }
797
798 self.set_pipeline_inner(&pipeline.inner);
799
800 let prim_state = conv::map_primitive_state(&pipeline.primitive);
802 if prim_state != self.state.primitive {
803 self.cmd_buffer
804 .commands
805 .push(C::SetPrimitive(prim_state.clone()));
806 self.state.primitive = prim_state;
807 }
808
809 let mut aspects = crate::FormatAspects::empty();
811 if pipeline.depth_bias != self.state.depth_bias {
812 self.state.depth_bias = pipeline.depth_bias;
813 self.cmd_buffer
814 .commands
815 .push(C::SetDepthBias(pipeline.depth_bias));
816 }
817 if let Some(ref depth) = pipeline.depth {
818 aspects |= crate::FormatAspects::DEPTH;
819 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
820 }
821 if let Some(ref stencil) = pipeline.stencil {
822 aspects |= crate::FormatAspects::STENCIL;
823 self.state.stencil = stencil.clone();
824 self.rebind_stencil_func();
825 if stencil.front.ops == stencil.back.ops
826 && stencil.front.mask_write == stencil.back.mask_write
827 {
828 self.cmd_buffer.commands.push(C::SetStencilOps {
829 face: glow::FRONT_AND_BACK,
830 write_mask: stencil.front.mask_write,
831 ops: stencil.front.ops.clone(),
832 });
833 } else {
834 self.cmd_buffer.commands.push(C::SetStencilOps {
835 face: glow::FRONT,
836 write_mask: stencil.front.mask_write,
837 ops: stencil.front.ops.clone(),
838 });
839 self.cmd_buffer.commands.push(C::SetStencilOps {
840 face: glow::BACK,
841 write_mask: stencil.back.mask_write,
842 ops: stencil.back.ops.clone(),
843 });
844 }
845 }
846 self.cmd_buffer
847 .commands
848 .push(C::ConfigureDepthStencil(aspects));
849
850 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
852 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
853 self.cmd_buffer
854 .commands
855 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
856 }
857
858 if self.state.color_targets[..] != pipeline.color_targets[..] {
860 if pipeline
861 .color_targets
862 .iter()
863 .skip(1)
864 .any(|ct| *ct != pipeline.color_targets[0])
865 {
866 for (index, ct) in pipeline.color_targets.iter().enumerate() {
867 self.cmd_buffer.commands.push(C::SetColorTarget {
868 draw_buffer_index: Some(index as u32),
869 desc: ct.clone(),
870 });
871 }
872 } else {
873 self.cmd_buffer.commands.push(C::SetColorTarget {
874 draw_buffer_index: None,
875 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
876 });
877 }
878 }
879 self.state.color_targets.clear();
880 for ct in pipeline.color_targets.iter() {
881 self.state.color_targets.push(ct.clone());
882 }
883 }
884
885 unsafe fn set_index_buffer<'a>(
886 &mut self,
887 binding: crate::BufferBinding<'a, super::Api>,
888 format: wgt::IndexFormat,
889 ) {
890 self.state.index_offset = binding.offset;
891 self.state.index_format = format;
892 self.cmd_buffer
893 .commands
894 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
895 }
896 unsafe fn set_vertex_buffer<'a>(
897 &mut self,
898 index: u32,
899 binding: crate::BufferBinding<'a, super::Api>,
900 ) {
901 self.state.dirty_vbuf_mask |= 1 << index;
902 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
903 *vb = Some(super::BufferBinding {
904 raw: binding.buffer.raw.unwrap(),
905 offset: binding.offset,
906 });
907 }
908 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
909 self.cmd_buffer.commands.push(C::SetViewport {
910 rect: crate::Rect {
911 x: rect.x as i32,
912 y: rect.y as i32,
913 w: rect.w as i32,
914 h: rect.h as i32,
915 },
916 depth,
917 });
918 }
919 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
920 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
921 x: rect.x as i32,
922 y: rect.y as i32,
923 w: rect.w as i32,
924 h: rect.h as i32,
925 }));
926 }
927 unsafe fn set_stencil_reference(&mut self, value: u32) {
928 self.state.stencil.front.reference = value;
929 self.state.stencil.back.reference = value;
930 self.rebind_stencil_func();
931 }
932 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
933 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
934 }
935
936 unsafe fn draw(
937 &mut self,
938 start_vertex: u32,
939 vertex_count: u32,
940 start_instance: u32,
941 instance_count: u32,
942 ) {
943 self.prepare_draw(start_instance);
944 self.cmd_buffer.commands.push(C::Draw {
945 topology: self.state.topology,
946 start_vertex,
947 vertex_count,
948 instance_count,
949 });
950 }
951 unsafe fn draw_indexed(
952 &mut self,
953 start_index: u32,
954 index_count: u32,
955 base_vertex: i32,
956 start_instance: u32,
957 instance_count: u32,
958 ) {
959 self.prepare_draw(start_instance);
960 let (index_size, index_type) = match self.state.index_format {
961 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
962 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
963 };
964 let index_offset = self.state.index_offset + index_size * start_index as wgt::BufferAddress;
965 self.cmd_buffer.commands.push(C::DrawIndexed {
966 topology: self.state.topology,
967 index_type,
968 index_offset,
969 index_count,
970 base_vertex,
971 instance_count,
972 });
973 }
974 unsafe fn draw_indirect(
975 &mut self,
976 buffer: &super::Buffer,
977 offset: wgt::BufferAddress,
978 draw_count: u32,
979 ) {
980 self.prepare_draw(0);
981 for draw in 0..draw_count as wgt::BufferAddress {
982 let indirect_offset =
983 offset + draw * mem::size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
984 self.cmd_buffer.commands.push(C::DrawIndirect {
985 topology: self.state.topology,
986 indirect_buf: buffer.raw.unwrap(),
987 indirect_offset,
988 });
989 }
990 }
991 unsafe fn draw_indexed_indirect(
992 &mut self,
993 buffer: &super::Buffer,
994 offset: wgt::BufferAddress,
995 draw_count: u32,
996 ) {
997 self.prepare_draw(0);
998 let index_type = match self.state.index_format {
999 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1000 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1001 };
1002 for draw in 0..draw_count as wgt::BufferAddress {
1003 let indirect_offset = offset
1004 + draw * mem::size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1005 self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1006 topology: self.state.topology,
1007 index_type,
1008 indirect_buf: buffer.raw.unwrap(),
1009 indirect_offset,
1010 });
1011 }
1012 }
1013 unsafe fn draw_indirect_count(
1014 &mut self,
1015 _buffer: &super::Buffer,
1016 _offset: wgt::BufferAddress,
1017 _count_buffer: &super::Buffer,
1018 _count_offset: wgt::BufferAddress,
1019 _max_count: u32,
1020 ) {
1021 unreachable!()
1022 }
1023 unsafe fn draw_indexed_indirect_count(
1024 &mut self,
1025 _buffer: &super::Buffer,
1026 _offset: wgt::BufferAddress,
1027 _count_buffer: &super::Buffer,
1028 _count_offset: wgt::BufferAddress,
1029 _max_count: u32,
1030 ) {
1031 unreachable!()
1032 }
1033
1034 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {
1037 if let Some(label) = desc.label {
1038 let range = self.cmd_buffer.add_marker(label);
1039 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1040 self.state.has_pass_label = true;
1041 }
1042 }
1043 unsafe fn end_compute_pass(&mut self) {
1044 if self.state.has_pass_label {
1045 self.cmd_buffer.commands.push(C::PopDebugGroup);
1046 self.state.has_pass_label = false;
1047 }
1048 }
1049
1050 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1051 self.set_pipeline_inner(&pipeline.inner);
1052 }
1053
1054 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1055 self.cmd_buffer.commands.push(C::Dispatch(count));
1056 }
1057 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1058 self.cmd_buffer.commands.push(C::DispatchIndirect {
1059 indirect_buf: buffer.raw.unwrap(),
1060 indirect_offset: offset,
1061 });
1062 }
1063}