wgpu_hal/vulkan/
command.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::ext, vk};
5
6use std::{mem, ops::Range, slice};
7
8const ALLOCATION_GRANULARITY: u32 = 16;
9const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
10
11impl super::Texture {
12    fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
13    where
14        T: Iterator<Item = crate::BufferTextureCopy>,
15    {
16        let (block_width, block_height) = self.format.block_dimensions();
17        let format = self.format;
18        let copy_size = self.copy_size;
19        regions.map(move |r| {
20            let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
21            let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
22            vk::BufferImageCopy {
23                buffer_offset: r.buffer_layout.offset,
24                buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
25                    let block_size = format
26                        .block_size(Some(r.texture_base.aspect.map()))
27                        .unwrap();
28                    block_width * (bpr / block_size)
29                }),
30                buffer_image_height: r
31                    .buffer_layout
32                    .rows_per_image
33                    .map_or(0, |rpi| rpi * block_height),
34                image_subresource,
35                image_offset,
36                image_extent: conv::map_copy_extent(&extent),
37            }
38        })
39    }
40}
41
42impl super::DeviceShared {
43    fn debug_messenger(&self) -> Option<&ext::DebugUtils> {
44        Some(&self.instance.debug_utils.as_ref()?.extension)
45    }
46}
47
48impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
49    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
50        if self.free.is_empty() {
51            let vk_info = vk::CommandBufferAllocateInfo::builder()
52                .command_pool(self.raw)
53                .command_buffer_count(ALLOCATION_GRANULARITY)
54                .build();
55            let cmd_buf_vec = unsafe { self.device.raw.allocate_command_buffers(&vk_info)? };
56            self.free.extend(cmd_buf_vec);
57        }
58        let raw = self.free.pop().unwrap();
59
60        // Set the name unconditionally, since there might be a
61        // previous name assigned to this.
62        unsafe {
63            self.device.set_object_name(
64                vk::ObjectType::COMMAND_BUFFER,
65                raw,
66                label.unwrap_or_default(),
67            )
68        };
69
70        // Reset this in case the last renderpass was never ended.
71        self.rpass_debug_marker_active = false;
72
73        let vk_info = vk::CommandBufferBeginInfo::builder()
74            .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
75            .build();
76        unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }?;
77        self.active = raw;
78
79        Ok(())
80    }
81
82    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
83        let raw = self.active;
84        self.active = vk::CommandBuffer::null();
85        unsafe { self.device.raw.end_command_buffer(raw) }?;
86        Ok(super::CommandBuffer { raw })
87    }
88
89    unsafe fn discard_encoding(&mut self) {
90        self.discarded.push(self.active);
91        self.active = vk::CommandBuffer::null();
92    }
93
94    unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
95    where
96        I: Iterator<Item = super::CommandBuffer>,
97    {
98        self.temp.clear();
99        self.free
100            .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
101        self.free.append(&mut self.discarded);
102        let _ = unsafe {
103            self.device
104                .raw
105                .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
106        };
107    }
108
109    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
110    where
111        T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
112    {
113        //Note: this is done so that we never end up with empty stage flags
114        let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
115        let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
116        let vk_barriers = &mut self.temp.buffer_barriers;
117        vk_barriers.clear();
118
119        for bar in barriers {
120            let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.start);
121            src_stages |= src_stage;
122            let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.end);
123            dst_stages |= dst_stage;
124
125            vk_barriers.push(
126                vk::BufferMemoryBarrier::builder()
127                    .buffer(bar.buffer.raw)
128                    .size(vk::WHOLE_SIZE)
129                    .src_access_mask(src_access)
130                    .dst_access_mask(dst_access)
131                    .build(),
132            )
133        }
134
135        if !vk_barriers.is_empty() {
136            unsafe {
137                self.device.raw.cmd_pipeline_barrier(
138                    self.active,
139                    src_stages,
140                    dst_stages,
141                    vk::DependencyFlags::empty(),
142                    &[],
143                    vk_barriers,
144                    &[],
145                )
146            };
147        }
148    }
149
150    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
151    where
152        T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
153    {
154        let mut src_stages = vk::PipelineStageFlags::empty();
155        let mut dst_stages = vk::PipelineStageFlags::empty();
156        let vk_barriers = &mut self.temp.image_barriers;
157        vk_barriers.clear();
158
159        for bar in barriers {
160            let range = conv::map_subresource_range_combined_aspect(
161                &bar.range,
162                bar.texture.format,
163                &self.device.private_caps,
164            );
165            let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.start);
166            let src_layout = conv::derive_image_layout(bar.usage.start, bar.texture.format);
167            src_stages |= src_stage;
168            let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.end);
169            let dst_layout = conv::derive_image_layout(bar.usage.end, bar.texture.format);
170            dst_stages |= dst_stage;
171
172            vk_barriers.push(
173                vk::ImageMemoryBarrier::builder()
174                    .image(bar.texture.raw)
175                    .subresource_range(range)
176                    .src_access_mask(src_access)
177                    .dst_access_mask(dst_access)
178                    .old_layout(src_layout)
179                    .new_layout(dst_layout)
180                    .build(),
181            );
182        }
183
184        if !vk_barriers.is_empty() {
185            unsafe {
186                self.device.raw.cmd_pipeline_barrier(
187                    self.active,
188                    src_stages,
189                    dst_stages,
190                    vk::DependencyFlags::empty(),
191                    &[],
192                    &[],
193                    vk_barriers,
194                )
195            };
196        }
197    }
198
199    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
200        let range_size = range.end - range.start;
201        if self.device.workarounds.contains(
202            super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
203        ) && range_size >= 4096
204            && range.start % 16 != 0
205        {
206            let rounded_start = wgt::math::align_to(range.start, 16);
207            let prefix_size = rounded_start - range.start;
208
209            unsafe {
210                self.device.raw.cmd_fill_buffer(
211                    self.active,
212                    buffer.raw,
213                    range.start,
214                    prefix_size,
215                    0,
216                )
217            };
218
219            // This will never be zero, as rounding can only add up to 12 bytes, and the total size is 4096.
220            let suffix_size = range.end - rounded_start;
221
222            unsafe {
223                self.device.raw.cmd_fill_buffer(
224                    self.active,
225                    buffer.raw,
226                    rounded_start,
227                    suffix_size,
228                    0,
229                )
230            };
231        } else {
232            unsafe {
233                self.device
234                    .raw
235                    .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
236            };
237        }
238    }
239
240    unsafe fn copy_buffer_to_buffer<T>(
241        &mut self,
242        src: &super::Buffer,
243        dst: &super::Buffer,
244        regions: T,
245    ) where
246        T: Iterator<Item = crate::BufferCopy>,
247    {
248        let vk_regions_iter = regions.map(|r| vk::BufferCopy {
249            src_offset: r.src_offset,
250            dst_offset: r.dst_offset,
251            size: r.size.get(),
252        });
253
254        unsafe {
255            self.device.raw.cmd_copy_buffer(
256                self.active,
257                src.raw,
258                dst.raw,
259                &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
260            )
261        };
262    }
263
264    unsafe fn copy_texture_to_texture<T>(
265        &mut self,
266        src: &super::Texture,
267        src_usage: crate::TextureUses,
268        dst: &super::Texture,
269        regions: T,
270    ) where
271        T: Iterator<Item = crate::TextureCopy>,
272    {
273        let src_layout = conv::derive_image_layout(src_usage, src.format);
274
275        let vk_regions_iter = regions.map(|r| {
276            let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
277            let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
278            let extent = r
279                .size
280                .min(&r.src_base.max_copy_size(&src.copy_size))
281                .min(&r.dst_base.max_copy_size(&dst.copy_size));
282            vk::ImageCopy {
283                src_subresource,
284                src_offset,
285                dst_subresource,
286                dst_offset,
287                extent: conv::map_copy_extent(&extent),
288            }
289        });
290
291        unsafe {
292            self.device.raw.cmd_copy_image(
293                self.active,
294                src.raw,
295                src_layout,
296                dst.raw,
297                DST_IMAGE_LAYOUT,
298                &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
299            )
300        };
301    }
302
303    unsafe fn copy_buffer_to_texture<T>(
304        &mut self,
305        src: &super::Buffer,
306        dst: &super::Texture,
307        regions: T,
308    ) where
309        T: Iterator<Item = crate::BufferTextureCopy>,
310    {
311        let vk_regions_iter = dst.map_buffer_copies(regions);
312
313        unsafe {
314            self.device.raw.cmd_copy_buffer_to_image(
315                self.active,
316                src.raw,
317                dst.raw,
318                DST_IMAGE_LAYOUT,
319                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
320            )
321        };
322    }
323
324    unsafe fn copy_texture_to_buffer<T>(
325        &mut self,
326        src: &super::Texture,
327        src_usage: crate::TextureUses,
328        dst: &super::Buffer,
329        regions: T,
330    ) where
331        T: Iterator<Item = crate::BufferTextureCopy>,
332    {
333        let src_layout = conv::derive_image_layout(src_usage, src.format);
334        let vk_regions_iter = src.map_buffer_copies(regions);
335
336        unsafe {
337            self.device.raw.cmd_copy_image_to_buffer(
338                self.active,
339                src.raw,
340                src_layout,
341                dst.raw,
342                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
343            )
344        };
345    }
346
347    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
348        unsafe {
349            self.device.raw.cmd_begin_query(
350                self.active,
351                set.raw,
352                index,
353                vk::QueryControlFlags::empty(),
354            )
355        };
356    }
357    unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
358        unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
359    }
360    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
361        unsafe {
362            self.device.raw.cmd_write_timestamp(
363                self.active,
364                vk::PipelineStageFlags::BOTTOM_OF_PIPE,
365                set.raw,
366                index,
367            )
368        };
369    }
370    unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
371        unsafe {
372            self.device.raw.cmd_reset_query_pool(
373                self.active,
374                set.raw,
375                range.start,
376                range.end - range.start,
377            )
378        };
379    }
380    unsafe fn copy_query_results(
381        &mut self,
382        set: &super::QuerySet,
383        range: Range<u32>,
384        buffer: &super::Buffer,
385        offset: wgt::BufferAddress,
386        stride: wgt::BufferSize,
387    ) {
388        unsafe {
389            self.device.raw.cmd_copy_query_pool_results(
390                self.active,
391                set.raw,
392                range.start,
393                range.end - range.start,
394                buffer.raw,
395                offset,
396                stride.get(),
397                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
398            )
399        };
400    }
401
402    // render
403
404    unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
405        let mut vk_clear_values =
406            ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
407        let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
408        let mut rp_key = super::RenderPassKey::default();
409        let mut fb_key = super::FramebufferKey {
410            attachments: ArrayVec::default(),
411            extent: desc.extent,
412            sample_count: desc.sample_count,
413        };
414        let caps = &self.device.private_caps;
415
416        for cat in desc.color_attachments {
417            if let Some(cat) = cat.as_ref() {
418                vk_clear_values.push(vk::ClearValue {
419                    color: unsafe { cat.make_vk_clear_color() },
420                });
421                vk_image_views.push(cat.target.view.raw);
422                let color = super::ColorAttachmentKey {
423                    base: cat.target.make_attachment_key(cat.ops, caps),
424                    resolve: cat.resolve_target.as_ref().map(|target| {
425                        target.make_attachment_key(crate::AttachmentOps::STORE, caps)
426                    }),
427                };
428
429                rp_key.colors.push(Some(color));
430                fb_key.attachments.push(cat.target.view.attachment.clone());
431                if let Some(ref at) = cat.resolve_target {
432                    vk_clear_values.push(unsafe { mem::zeroed() });
433                    vk_image_views.push(at.view.raw);
434                    fb_key.attachments.push(at.view.attachment.clone());
435                }
436
437                // Assert this attachment is valid for the detected multiview, as a sanity check
438                // The driver crash for this is really bad on AMD, so the check is worth it
439                if let Some(multiview) = desc.multiview {
440                    assert_eq!(cat.target.view.layers, multiview);
441                    if let Some(ref resolve_target) = cat.resolve_target {
442                        assert_eq!(resolve_target.view.layers, multiview);
443                    }
444                }
445            } else {
446                rp_key.colors.push(None);
447            }
448        }
449        if let Some(ref ds) = desc.depth_stencil_attachment {
450            vk_clear_values.push(vk::ClearValue {
451                depth_stencil: vk::ClearDepthStencilValue {
452                    depth: ds.clear_value.0,
453                    stencil: ds.clear_value.1,
454                },
455            });
456            vk_image_views.push(ds.target.view.raw);
457            rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
458                base: ds.target.make_attachment_key(ds.depth_ops, caps),
459                stencil_ops: ds.stencil_ops,
460            });
461            fb_key.attachments.push(ds.target.view.attachment.clone());
462
463            // Assert this attachment is valid for the detected multiview, as a sanity check
464            // The driver crash for this is really bad on AMD, so the check is worth it
465            if let Some(multiview) = desc.multiview {
466                assert_eq!(ds.target.view.layers, multiview);
467            }
468        }
469        rp_key.sample_count = fb_key.sample_count;
470        rp_key.multiview = desc.multiview;
471
472        let render_area = vk::Rect2D {
473            offset: vk::Offset2D { x: 0, y: 0 },
474            extent: vk::Extent2D {
475                width: desc.extent.width,
476                height: desc.extent.height,
477            },
478        };
479        let vk_viewports = [vk::Viewport {
480            x: 0.0,
481            y: if self.device.private_caps.flip_y_requires_shift {
482                desc.extent.height as f32
483            } else {
484                0.0
485            },
486            width: desc.extent.width as f32,
487            height: -(desc.extent.height as f32),
488            min_depth: 0.0,
489            max_depth: 1.0,
490        }];
491
492        let raw_pass = self.device.make_render_pass(rp_key).unwrap();
493        let raw_framebuffer = self
494            .device
495            .make_framebuffer(fb_key, raw_pass, desc.label)
496            .unwrap();
497
498        let mut vk_info = vk::RenderPassBeginInfo::builder()
499            .render_pass(raw_pass)
500            .render_area(render_area)
501            .clear_values(&vk_clear_values)
502            .framebuffer(raw_framebuffer);
503        let mut vk_attachment_info = if caps.imageless_framebuffers {
504            Some(
505                vk::RenderPassAttachmentBeginInfo::builder()
506                    .attachments(&vk_image_views)
507                    .build(),
508            )
509        } else {
510            None
511        };
512        if let Some(attachment_info) = vk_attachment_info.as_mut() {
513            vk_info = vk_info.push_next(attachment_info);
514        }
515
516        if let Some(label) = desc.label {
517            unsafe { self.begin_debug_marker(label) };
518            self.rpass_debug_marker_active = true;
519        }
520
521        unsafe {
522            self.device
523                .raw
524                .cmd_set_viewport(self.active, 0, &vk_viewports);
525            self.device
526                .raw
527                .cmd_set_scissor(self.active, 0, &[render_area]);
528            self.device.raw.cmd_begin_render_pass(
529                self.active,
530                &vk_info,
531                vk::SubpassContents::INLINE,
532            );
533        };
534
535        self.bind_point = vk::PipelineBindPoint::GRAPHICS;
536    }
537    unsafe fn end_render_pass(&mut self) {
538        unsafe {
539            self.device.raw.cmd_end_render_pass(self.active);
540            if self.rpass_debug_marker_active {
541                self.end_debug_marker();
542                self.rpass_debug_marker_active = false;
543            }
544        }
545    }
546
547    unsafe fn set_bind_group(
548        &mut self,
549        layout: &super::PipelineLayout,
550        index: u32,
551        group: &super::BindGroup,
552        dynamic_offsets: &[wgt::DynamicOffset],
553    ) {
554        let sets = [*group.set.raw()];
555        unsafe {
556            self.device.raw.cmd_bind_descriptor_sets(
557                self.active,
558                self.bind_point,
559                layout.raw,
560                index,
561                &sets,
562                dynamic_offsets,
563            )
564        };
565    }
566    unsafe fn set_push_constants(
567        &mut self,
568        layout: &super::PipelineLayout,
569        stages: wgt::ShaderStages,
570        offset: u32,
571        data: &[u32],
572    ) {
573        unsafe {
574            self.device.raw.cmd_push_constants(
575                self.active,
576                layout.raw,
577                conv::map_shader_stage(stages),
578                offset,
579                slice::from_raw_parts(data.as_ptr() as _, data.len() * 4),
580            )
581        };
582    }
583
584    unsafe fn insert_debug_marker(&mut self, label: &str) {
585        if let Some(ext) = self.device.debug_messenger() {
586            let cstr = self.temp.make_c_str(label);
587            let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
588            unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
589        }
590    }
591    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
592        if let Some(ext) = self.device.debug_messenger() {
593            let cstr = self.temp.make_c_str(group_label);
594            let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
595            unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
596        }
597    }
598    unsafe fn end_debug_marker(&mut self) {
599        if let Some(ext) = self.device.debug_messenger() {
600            unsafe { ext.cmd_end_debug_utils_label(self.active) };
601        }
602    }
603
604    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
605        unsafe {
606            self.device.raw.cmd_bind_pipeline(
607                self.active,
608                vk::PipelineBindPoint::GRAPHICS,
609                pipeline.raw,
610            )
611        };
612    }
613
614    unsafe fn set_index_buffer<'a>(
615        &mut self,
616        binding: crate::BufferBinding<'a, super::Api>,
617        format: wgt::IndexFormat,
618    ) {
619        unsafe {
620            self.device.raw.cmd_bind_index_buffer(
621                self.active,
622                binding.buffer.raw,
623                binding.offset,
624                conv::map_index_format(format),
625            )
626        };
627    }
628    unsafe fn set_vertex_buffer<'a>(
629        &mut self,
630        index: u32,
631        binding: crate::BufferBinding<'a, super::Api>,
632    ) {
633        let vk_buffers = [binding.buffer.raw];
634        let vk_offsets = [binding.offset];
635        unsafe {
636            self.device
637                .raw
638                .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
639        };
640    }
641    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
642        let vk_viewports = [vk::Viewport {
643            x: rect.x,
644            y: if self.device.private_caps.flip_y_requires_shift {
645                rect.y + rect.h
646            } else {
647                rect.y
648            },
649            width: rect.w,
650            height: -rect.h, // flip Y
651            min_depth: depth_range.start,
652            max_depth: depth_range.end,
653        }];
654        unsafe {
655            self.device
656                .raw
657                .cmd_set_viewport(self.active, 0, &vk_viewports)
658        };
659    }
660    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
661        let vk_scissors = [vk::Rect2D {
662            offset: vk::Offset2D {
663                x: rect.x as i32,
664                y: rect.y as i32,
665            },
666            extent: vk::Extent2D {
667                width: rect.w,
668                height: rect.h,
669            },
670        }];
671        unsafe {
672            self.device
673                .raw
674                .cmd_set_scissor(self.active, 0, &vk_scissors)
675        };
676    }
677    unsafe fn set_stencil_reference(&mut self, value: u32) {
678        unsafe {
679            self.device.raw.cmd_set_stencil_reference(
680                self.active,
681                vk::StencilFaceFlags::FRONT_AND_BACK,
682                value,
683            )
684        };
685    }
686    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
687        unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
688    }
689
690    unsafe fn draw(
691        &mut self,
692        start_vertex: u32,
693        vertex_count: u32,
694        start_instance: u32,
695        instance_count: u32,
696    ) {
697        unsafe {
698            self.device.raw.cmd_draw(
699                self.active,
700                vertex_count,
701                instance_count,
702                start_vertex,
703                start_instance,
704            )
705        };
706    }
707    unsafe fn draw_indexed(
708        &mut self,
709        start_index: u32,
710        index_count: u32,
711        base_vertex: i32,
712        start_instance: u32,
713        instance_count: u32,
714    ) {
715        unsafe {
716            self.device.raw.cmd_draw_indexed(
717                self.active,
718                index_count,
719                instance_count,
720                start_index,
721                base_vertex,
722                start_instance,
723            )
724        };
725    }
726    unsafe fn draw_indirect(
727        &mut self,
728        buffer: &super::Buffer,
729        offset: wgt::BufferAddress,
730        draw_count: u32,
731    ) {
732        unsafe {
733            self.device.raw.cmd_draw_indirect(
734                self.active,
735                buffer.raw,
736                offset,
737                draw_count,
738                mem::size_of::<wgt::DrawIndirectArgs>() as u32,
739            )
740        };
741    }
742    unsafe fn draw_indexed_indirect(
743        &mut self,
744        buffer: &super::Buffer,
745        offset: wgt::BufferAddress,
746        draw_count: u32,
747    ) {
748        unsafe {
749            self.device.raw.cmd_draw_indexed_indirect(
750                self.active,
751                buffer.raw,
752                offset,
753                draw_count,
754                mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
755            )
756        };
757    }
758    unsafe fn draw_indirect_count(
759        &mut self,
760        buffer: &super::Buffer,
761        offset: wgt::BufferAddress,
762        count_buffer: &super::Buffer,
763        count_offset: wgt::BufferAddress,
764        max_count: u32,
765    ) {
766        let stride = mem::size_of::<wgt::DrawIndirectArgs>() as u32;
767        match self.device.extension_fns.draw_indirect_count {
768            Some(ref t) => {
769                unsafe {
770                    t.cmd_draw_indirect_count(
771                        self.active,
772                        buffer.raw,
773                        offset,
774                        count_buffer.raw,
775                        count_offset,
776                        max_count,
777                        stride,
778                    )
779                };
780            }
781            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
782        }
783    }
784    unsafe fn draw_indexed_indirect_count(
785        &mut self,
786        buffer: &super::Buffer,
787        offset: wgt::BufferAddress,
788        count_buffer: &super::Buffer,
789        count_offset: wgt::BufferAddress,
790        max_count: u32,
791    ) {
792        let stride = mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
793        match self.device.extension_fns.draw_indirect_count {
794            Some(ref t) => {
795                unsafe {
796                    t.cmd_draw_indexed_indirect_count(
797                        self.active,
798                        buffer.raw,
799                        offset,
800                        count_buffer.raw,
801                        count_offset,
802                        max_count,
803                        stride,
804                    )
805                };
806            }
807            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
808        }
809    }
810
811    // compute
812
813    unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {
814        self.bind_point = vk::PipelineBindPoint::COMPUTE;
815        if let Some(label) = desc.label {
816            unsafe { self.begin_debug_marker(label) };
817            self.rpass_debug_marker_active = true;
818        }
819    }
820    unsafe fn end_compute_pass(&mut self) {
821        if self.rpass_debug_marker_active {
822            unsafe { self.end_debug_marker() };
823            self.rpass_debug_marker_active = false
824        }
825    }
826
827    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
828        unsafe {
829            self.device.raw.cmd_bind_pipeline(
830                self.active,
831                vk::PipelineBindPoint::COMPUTE,
832                pipeline.raw,
833            )
834        };
835    }
836
837    unsafe fn dispatch(&mut self, count: [u32; 3]) {
838        unsafe {
839            self.device
840                .raw
841                .cmd_dispatch(self.active, count[0], count[1], count[2])
842        };
843    }
844    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
845        unsafe {
846            self.device
847                .raw
848                .cmd_dispatch_indirect(self.active, buffer.raw, offset)
849        }
850    }
851}
852
853#[test]
854fn check_dst_image_layout() {
855    assert_eq!(
856        conv::derive_image_layout(crate::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
857        DST_IMAGE_LAYOUT
858    );
859}