1#![allow(clippy::trivially_copy_pass_by_ref)]
2use crate::prelude::*;
3use crate::vk;
4use crate::RawPtr;
5use std::mem;
6use std::os::raw::c_void;
7use std::ptr;
8
9#[derive(Clone)]
11pub struct Device {
12 pub(crate) handle: vk::Device,
13
14 pub(crate) device_fn_1_0: vk::DeviceFnV1_0,
15 pub(crate) device_fn_1_1: vk::DeviceFnV1_1,
16 pub(crate) device_fn_1_2: vk::DeviceFnV1_2,
17 pub(crate) device_fn_1_3: vk::DeviceFnV1_3,
18}
19
20impl Device {
21 pub unsafe fn load(instance_fn: &vk::InstanceFnV1_0, device: vk::Device) -> Self {
22 let load_fn = |name: &std::ffi::CStr| {
23 mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr()))
24 };
25
26 Self {
27 handle: device,
28
29 device_fn_1_0: vk::DeviceFnV1_0::load(load_fn),
30 device_fn_1_1: vk::DeviceFnV1_1::load(load_fn),
31 device_fn_1_2: vk::DeviceFnV1_2::load(load_fn),
32 device_fn_1_3: vk::DeviceFnV1_3::load(load_fn),
33 }
34 }
35
36 #[inline]
37 pub fn handle(&self) -> vk::Device {
38 self.handle
39 }
40}
41
42#[allow(non_camel_case_types)]
44impl Device {
45 #[inline]
46 pub fn fp_v1_3(&self) -> &vk::DeviceFnV1_3 {
47 &self.device_fn_1_3
48 }
49
50 #[inline]
52 pub unsafe fn create_private_data_slot(
53 &self,
54 create_info: &vk::PrivateDataSlotCreateInfo,
55 allocation_callbacks: Option<&vk::AllocationCallbacks>,
56 ) -> VkResult<vk::PrivateDataSlot> {
57 let mut private_data_slot = mem::zeroed();
58 (self.device_fn_1_3.create_private_data_slot)(
59 self.handle,
60 create_info,
61 allocation_callbacks.as_raw_ptr(),
62 &mut private_data_slot,
63 )
64 .result_with_success(private_data_slot)
65 }
66
67 #[inline]
69 pub unsafe fn destroy_private_data_slot(
70 &self,
71 private_data_slot: vk::PrivateDataSlot,
72 allocation_callbacks: Option<&vk::AllocationCallbacks>,
73 ) {
74 (self.device_fn_1_3.destroy_private_data_slot)(
75 self.handle,
76 private_data_slot,
77 allocation_callbacks.as_raw_ptr(),
78 )
79 }
80
81 #[inline]
83 pub unsafe fn set_private_data<T: vk::Handle>(
84 &self,
85 object: T,
86 private_data_slot: vk::PrivateDataSlot,
87 data: u64,
88 ) -> VkResult<()> {
89 (self.device_fn_1_3.set_private_data)(
90 self.handle,
91 T::TYPE,
92 object.as_raw(),
93 private_data_slot,
94 data,
95 )
96 .result()
97 }
98
99 #[inline]
101 pub unsafe fn get_private_data<T: vk::Handle>(
102 &self,
103 object: T,
104 private_data_slot: vk::PrivateDataSlot,
105 ) -> u64 {
106 let mut data = mem::zeroed();
107 (self.device_fn_1_3.get_private_data)(
108 self.handle,
109 T::TYPE,
110 object.as_raw(),
111 private_data_slot,
112 &mut data,
113 );
114 data
115 }
116
117 #[inline]
119 pub unsafe fn cmd_pipeline_barrier2(
120 &self,
121 command_buffer: vk::CommandBuffer,
122 dependency_info: &vk::DependencyInfo,
123 ) {
124 (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info)
125 }
126
127 #[inline]
129 pub unsafe fn cmd_reset_event2(
130 &self,
131 command_buffer: vk::CommandBuffer,
132 event: vk::Event,
133 stage_mask: vk::PipelineStageFlags2,
134 ) {
135 (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask)
136 }
137
138 #[inline]
140 pub unsafe fn cmd_set_event2(
141 &self,
142 command_buffer: vk::CommandBuffer,
143 event: vk::Event,
144 dependency_info: &vk::DependencyInfo,
145 ) {
146 (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info)
147 }
148
149 #[inline]
151 pub unsafe fn cmd_wait_events2(
152 &self,
153 command_buffer: vk::CommandBuffer,
154 events: &[vk::Event],
155 dependency_infos: &[vk::DependencyInfo],
156 ) {
157 assert_eq!(events.len(), dependency_infos.len());
158 (self.device_fn_1_3.cmd_wait_events2)(
159 command_buffer,
160 events.len() as u32,
161 events.as_ptr(),
162 dependency_infos.as_ptr(),
163 )
164 }
165
166 #[inline]
168 pub unsafe fn cmd_write_timestamp2(
169 &self,
170 command_buffer: vk::CommandBuffer,
171 stage: vk::PipelineStageFlags2,
172 query_pool: vk::QueryPool,
173 query: u32,
174 ) {
175 (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
176 }
177
178 #[inline]
180 pub unsafe fn queue_submit2(
181 &self,
182 queue: vk::Queue,
183 submits: &[vk::SubmitInfo2],
184 fence: vk::Fence,
185 ) -> VkResult<()> {
186 (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
187 .result()
188 }
189
190 #[inline]
192 pub unsafe fn cmd_copy_buffer2(
193 &self,
194 command_buffer: vk::CommandBuffer,
195 copy_buffer_info: &vk::CopyBufferInfo2,
196 ) {
197 (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info)
198 }
199 #[inline]
201 pub unsafe fn cmd_copy_image2(
202 &self,
203 command_buffer: vk::CommandBuffer,
204 copy_image_info: &vk::CopyImageInfo2,
205 ) {
206 (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info)
207 }
208 #[inline]
210 pub unsafe fn cmd_copy_buffer_to_image2(
211 &self,
212 command_buffer: vk::CommandBuffer,
213 copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2,
214 ) {
215 (self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info)
216 }
217 #[inline]
219 pub unsafe fn cmd_copy_image_to_buffer2(
220 &self,
221 command_buffer: vk::CommandBuffer,
222 copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2,
223 ) {
224 (self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info)
225 }
226 #[inline]
228 pub unsafe fn cmd_blit_image2(
229 &self,
230 command_buffer: vk::CommandBuffer,
231 blit_image_info: &vk::BlitImageInfo2,
232 ) {
233 (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info)
234 }
235 #[inline]
237 pub unsafe fn cmd_resolve_image2(
238 &self,
239 command_buffer: vk::CommandBuffer,
240 resolve_image_info: &vk::ResolveImageInfo2,
241 ) {
242 (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info)
243 }
244
245 #[inline]
247 pub unsafe fn cmd_begin_rendering(
248 &self,
249 command_buffer: vk::CommandBuffer,
250 rendering_info: &vk::RenderingInfo,
251 ) {
252 (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info)
253 }
254
255 #[inline]
257 pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
258 (self.device_fn_1_3.cmd_end_rendering)(command_buffer)
259 }
260
261 #[inline]
263 pub unsafe fn cmd_set_cull_mode(
264 &self,
265 command_buffer: vk::CommandBuffer,
266 cull_mode: vk::CullModeFlags,
267 ) {
268 (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode)
269 }
270
271 #[inline]
273 pub unsafe fn cmd_set_front_face(
274 &self,
275 command_buffer: vk::CommandBuffer,
276 front_face: vk::FrontFace,
277 ) {
278 (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face)
279 }
280
281 #[inline]
283 pub unsafe fn cmd_set_primitive_topology(
284 &self,
285 command_buffer: vk::CommandBuffer,
286 primitive_topology: vk::PrimitiveTopology,
287 ) {
288 (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
289 }
290
291 #[inline]
293 pub unsafe fn cmd_set_viewport_with_count(
294 &self,
295 command_buffer: vk::CommandBuffer,
296 viewports: &[vk::Viewport],
297 ) {
298 (self.device_fn_1_3.cmd_set_viewport_with_count)(
299 command_buffer,
300 viewports.len() as u32,
301 viewports.as_ptr(),
302 )
303 }
304
305 #[inline]
307 pub unsafe fn cmd_set_scissor_with_count(
308 &self,
309 command_buffer: vk::CommandBuffer,
310 scissors: &[vk::Rect2D],
311 ) {
312 (self.device_fn_1_3.cmd_set_scissor_with_count)(
313 command_buffer,
314 scissors.len() as u32,
315 scissors.as_ptr(),
316 )
317 }
318
319 #[inline]
321 pub unsafe fn cmd_bind_vertex_buffers2(
322 &self,
323 command_buffer: vk::CommandBuffer,
324 first_binding: u32,
325 buffers: &[vk::Buffer],
326 offsets: &[vk::DeviceSize],
327 sizes: Option<&[vk::DeviceSize]>,
328 strides: Option<&[vk::DeviceSize]>,
329 ) {
330 assert_eq!(offsets.len(), buffers.len());
331 let p_sizes = if let Some(sizes) = sizes {
332 assert_eq!(sizes.len(), buffers.len());
333 sizes.as_ptr()
334 } else {
335 ptr::null()
336 };
337 let p_strides = if let Some(strides) = strides {
338 assert_eq!(strides.len(), buffers.len());
339 strides.as_ptr()
340 } else {
341 ptr::null()
342 };
343 (self.device_fn_1_3.cmd_bind_vertex_buffers2)(
344 command_buffer,
345 first_binding,
346 buffers.len() as u32,
347 buffers.as_ptr(),
348 offsets.as_ptr(),
349 p_sizes,
350 p_strides,
351 )
352 }
353
354 #[inline]
356 pub unsafe fn cmd_set_depth_test_enable(
357 &self,
358 command_buffer: vk::CommandBuffer,
359 depth_test_enable: bool,
360 ) {
361 (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
362 }
363
364 #[inline]
366 pub unsafe fn cmd_set_depth_write_enable(
367 &self,
368 command_buffer: vk::CommandBuffer,
369 depth_write_enable: bool,
370 ) {
371 (self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into())
372 }
373
374 #[inline]
376 pub unsafe fn cmd_set_depth_compare_op(
377 &self,
378 command_buffer: vk::CommandBuffer,
379 depth_compare_op: vk::CompareOp,
380 ) {
381 (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op)
382 }
383
384 #[inline]
386 pub unsafe fn cmd_set_depth_bounds_test_enable(
387 &self,
388 command_buffer: vk::CommandBuffer,
389 depth_bounds_test_enable: bool,
390 ) {
391 (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
392 command_buffer,
393 depth_bounds_test_enable.into(),
394 )
395 }
396
397 #[inline]
399 pub unsafe fn cmd_set_stencil_test_enable(
400 &self,
401 command_buffer: vk::CommandBuffer,
402 stencil_test_enable: bool,
403 ) {
404 (self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into())
405 }
406
407 #[inline]
409 pub unsafe fn cmd_set_stencil_op(
410 &self,
411 command_buffer: vk::CommandBuffer,
412 face_mask: vk::StencilFaceFlags,
413 fail_op: vk::StencilOp,
414 pass_op: vk::StencilOp,
415 depth_fail_op: vk::StencilOp,
416 compare_op: vk::CompareOp,
417 ) {
418 (self.device_fn_1_3.cmd_set_stencil_op)(
419 command_buffer,
420 face_mask,
421 fail_op,
422 pass_op,
423 depth_fail_op,
424 compare_op,
425 )
426 }
427
428 #[inline]
430 pub unsafe fn cmd_set_rasterizer_discard_enable(
431 &self,
432 command_buffer: vk::CommandBuffer,
433 rasterizer_discard_enable: bool,
434 ) {
435 (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
436 command_buffer,
437 rasterizer_discard_enable.into(),
438 )
439 }
440
441 #[inline]
443 pub unsafe fn cmd_set_depth_bias_enable(
444 &self,
445 command_buffer: vk::CommandBuffer,
446 depth_bias_enable: bool,
447 ) {
448 (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
449 }
450
451 #[inline]
453 pub unsafe fn cmd_set_primitive_restart_enable(
454 &self,
455 command_buffer: vk::CommandBuffer,
456 primitive_restart_enable: bool,
457 ) {
458 (self.device_fn_1_3.cmd_set_primitive_restart_enable)(
459 command_buffer,
460 primitive_restart_enable.into(),
461 )
462 }
463
464 #[inline]
466 pub unsafe fn get_device_buffer_memory_requirements(
467 &self,
468 memory_requirements: &vk::DeviceBufferMemoryRequirements,
469 out: &mut vk::MemoryRequirements2,
470 ) {
471 (self.device_fn_1_3.get_device_buffer_memory_requirements)(
472 self.handle,
473 memory_requirements,
474 out,
475 )
476 }
477
478 #[inline]
480 pub unsafe fn get_device_image_memory_requirements(
481 &self,
482 memory_requirements: &vk::DeviceImageMemoryRequirements,
483 out: &mut vk::MemoryRequirements2,
484 ) {
485 (self.device_fn_1_3.get_device_image_memory_requirements)(
486 self.handle,
487 memory_requirements,
488 out,
489 )
490 }
491
492 #[inline]
494 pub unsafe fn get_device_image_sparse_memory_requirements_len(
495 &self,
496 memory_requirements: &vk::DeviceImageMemoryRequirements,
497 ) -> usize {
498 let mut count = 0;
499 (self
500 .device_fn_1_3
501 .get_device_image_sparse_memory_requirements)(
502 self.handle,
503 memory_requirements,
504 &mut count,
505 std::ptr::null_mut(),
506 );
507 count as usize
508 }
509
510 #[inline]
515 pub unsafe fn get_device_image_sparse_memory_requirements(
516 &self,
517 memory_requirements: &vk::DeviceImageMemoryRequirements,
518 out: &mut [vk::SparseImageMemoryRequirements2],
519 ) {
520 let mut count = out.len() as u32;
521 (self
522 .device_fn_1_3
523 .get_device_image_sparse_memory_requirements)(
524 self.handle,
525 memory_requirements,
526 &mut count,
527 out.as_mut_ptr(),
528 );
529 assert_eq!(count as usize, out.len());
530 }
531}
532
533#[allow(non_camel_case_types)]
535impl Device {
536 #[inline]
537 pub fn fp_v1_2(&self) -> &vk::DeviceFnV1_2 {
538 &self.device_fn_1_2
539 }
540
541 #[inline]
543 pub unsafe fn cmd_draw_indirect_count(
544 &self,
545 command_buffer: vk::CommandBuffer,
546 buffer: vk::Buffer,
547 offset: vk::DeviceSize,
548 count_buffer: vk::Buffer,
549 count_buffer_offset: vk::DeviceSize,
550 max_draw_count: u32,
551 stride: u32,
552 ) {
553 (self.device_fn_1_2.cmd_draw_indirect_count)(
554 command_buffer,
555 buffer,
556 offset,
557 count_buffer,
558 count_buffer_offset,
559 max_draw_count,
560 stride,
561 );
562 }
563
564 #[inline]
566 pub unsafe fn cmd_draw_indexed_indirect_count(
567 &self,
568 command_buffer: vk::CommandBuffer,
569 buffer: vk::Buffer,
570 offset: vk::DeviceSize,
571 count_buffer: vk::Buffer,
572 count_buffer_offset: vk::DeviceSize,
573 max_draw_count: u32,
574 stride: u32,
575 ) {
576 (self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
577 command_buffer,
578 buffer,
579 offset,
580 count_buffer,
581 count_buffer_offset,
582 max_draw_count,
583 stride,
584 );
585 }
586
587 #[inline]
589 pub unsafe fn create_render_pass2(
590 &self,
591 create_info: &vk::RenderPassCreateInfo2,
592 allocation_callbacks: Option<&vk::AllocationCallbacks>,
593 ) -> VkResult<vk::RenderPass> {
594 let mut renderpass = mem::zeroed();
595 (self.device_fn_1_2.create_render_pass2)(
596 self.handle(),
597 create_info,
598 allocation_callbacks.as_raw_ptr(),
599 &mut renderpass,
600 )
601 .result_with_success(renderpass)
602 }
603
604 #[inline]
606 pub unsafe fn cmd_begin_render_pass2(
607 &self,
608 command_buffer: vk::CommandBuffer,
609 render_pass_begin_info: &vk::RenderPassBeginInfo,
610 subpass_begin_info: &vk::SubpassBeginInfo,
611 ) {
612 (self.device_fn_1_2.cmd_begin_render_pass2)(
613 command_buffer,
614 render_pass_begin_info,
615 subpass_begin_info,
616 );
617 }
618
619 #[inline]
621 pub unsafe fn cmd_next_subpass2(
622 &self,
623 command_buffer: vk::CommandBuffer,
624 subpass_begin_info: &vk::SubpassBeginInfo,
625 subpass_end_info: &vk::SubpassEndInfo,
626 ) {
627 (self.device_fn_1_2.cmd_next_subpass2)(
628 command_buffer,
629 subpass_begin_info,
630 subpass_end_info,
631 );
632 }
633
634 #[inline]
636 pub unsafe fn cmd_end_render_pass2(
637 &self,
638 command_buffer: vk::CommandBuffer,
639 subpass_end_info: &vk::SubpassEndInfo,
640 ) {
641 (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
642 }
643
644 #[inline]
646 pub unsafe fn reset_query_pool(
647 &self,
648 query_pool: vk::QueryPool,
649 first_query: u32,
650 query_count: u32,
651 ) {
652 (self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count);
653 }
654
655 #[inline]
657 pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
658 let mut value = 0;
659 (self.device_fn_1_2.get_semaphore_counter_value)(self.handle(), semaphore, &mut value)
660 .result_with_success(value)
661 }
662
663 #[inline]
665 pub unsafe fn wait_semaphores(
666 &self,
667 wait_info: &vk::SemaphoreWaitInfo,
668 timeout: u64,
669 ) -> VkResult<()> {
670 (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result()
671 }
672
673 #[inline]
675 pub unsafe fn signal_semaphore(&self, signal_info: &vk::SemaphoreSignalInfo) -> VkResult<()> {
676 (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result()
677 }
678
679 #[inline]
681 pub unsafe fn get_buffer_device_address(
682 &self,
683 info: &vk::BufferDeviceAddressInfo,
684 ) -> vk::DeviceAddress {
685 (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info)
686 }
687
688 #[inline]
690 pub unsafe fn get_buffer_opaque_capture_address(
691 &self,
692 info: &vk::BufferDeviceAddressInfo,
693 ) -> u64 {
694 (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info)
695 }
696
697 #[inline]
699 pub unsafe fn get_device_memory_opaque_capture_address(
700 &self,
701 info: &vk::DeviceMemoryOpaqueCaptureAddressInfo,
702 ) -> u64 {
703 (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
704 }
705}
706
707#[allow(non_camel_case_types)]
709impl Device {
710 #[inline]
711 pub fn fp_v1_1(&self) -> &vk::DeviceFnV1_1 {
712 &self.device_fn_1_1
713 }
714
715 #[inline]
717 pub unsafe fn bind_buffer_memory2(
718 &self,
719 bind_infos: &[vk::BindBufferMemoryInfo],
720 ) -> VkResult<()> {
721 (self.device_fn_1_1.bind_buffer_memory2)(
722 self.handle(),
723 bind_infos.len() as _,
724 bind_infos.as_ptr(),
725 )
726 .result()
727 }
728
729 #[inline]
731 pub unsafe fn bind_image_memory2(
732 &self,
733 bind_infos: &[vk::BindImageMemoryInfo],
734 ) -> VkResult<()> {
735 (self.device_fn_1_1.bind_image_memory2)(
736 self.handle(),
737 bind_infos.len() as _,
738 bind_infos.as_ptr(),
739 )
740 .result()
741 }
742
743 #[inline]
745 pub unsafe fn get_device_group_peer_memory_features(
746 &self,
747 heap_index: u32,
748 local_device_index: u32,
749 remote_device_index: u32,
750 ) -> vk::PeerMemoryFeatureFlags {
751 let mut peer_memory_features = mem::zeroed();
752 (self.device_fn_1_1.get_device_group_peer_memory_features)(
753 self.handle(),
754 heap_index,
755 local_device_index,
756 remote_device_index,
757 &mut peer_memory_features,
758 );
759 peer_memory_features
760 }
761
762 #[inline]
764 pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
765 (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
766 }
767
768 #[inline]
770 pub unsafe fn cmd_dispatch_base(
771 &self,
772 command_buffer: vk::CommandBuffer,
773 base_group_x: u32,
774 base_group_y: u32,
775 base_group_z: u32,
776 group_count_x: u32,
777 group_count_y: u32,
778 group_count_z: u32,
779 ) {
780 (self.device_fn_1_1.cmd_dispatch_base)(
781 command_buffer,
782 base_group_x,
783 base_group_y,
784 base_group_z,
785 group_count_x,
786 group_count_y,
787 group_count_z,
788 );
789 }
790
791 #[inline]
793 pub unsafe fn get_image_memory_requirements2(
794 &self,
795 info: &vk::ImageMemoryRequirementsInfo2,
796 out: &mut vk::MemoryRequirements2,
797 ) {
798 (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
799 }
800
801 #[inline]
803 pub unsafe fn get_buffer_memory_requirements2(
804 &self,
805 info: &vk::BufferMemoryRequirementsInfo2,
806 out: &mut vk::MemoryRequirements2,
807 ) {
808 (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
809 }
810
811 #[inline]
813 pub unsafe fn get_image_sparse_memory_requirements2_len(
814 &self,
815 info: &vk::ImageSparseMemoryRequirementsInfo2,
816 ) -> usize {
817 let mut count = 0;
818 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
819 self.handle(),
820 info,
821 &mut count,
822 ptr::null_mut(),
823 );
824 count as usize
825 }
826
827 #[inline]
832 pub unsafe fn get_image_sparse_memory_requirements2(
833 &self,
834 info: &vk::ImageSparseMemoryRequirementsInfo2,
835 out: &mut [vk::SparseImageMemoryRequirements2],
836 ) {
837 let mut count = out.len() as u32;
838 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
839 self.handle(),
840 info,
841 &mut count,
842 out.as_mut_ptr(),
843 );
844 assert_eq!(count as usize, out.len());
845 }
846
847 #[inline]
849 pub unsafe fn trim_command_pool(
850 &self,
851 command_pool: vk::CommandPool,
852 flags: vk::CommandPoolTrimFlags,
853 ) {
854 (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
855 }
856
857 #[inline]
859 pub unsafe fn get_device_queue2(&self, queue_info: &vk::DeviceQueueInfo2) -> vk::Queue {
860 let mut queue = mem::zeroed();
861 (self.device_fn_1_1.get_device_queue2)(self.handle(), queue_info, &mut queue);
862 queue
863 }
864
865 #[inline]
867 pub unsafe fn create_sampler_ycbcr_conversion(
868 &self,
869 create_info: &vk::SamplerYcbcrConversionCreateInfo,
870 allocation_callbacks: Option<&vk::AllocationCallbacks>,
871 ) -> VkResult<vk::SamplerYcbcrConversion> {
872 let mut ycbcr_conversion = mem::zeroed();
873 (self.device_fn_1_1.create_sampler_ycbcr_conversion)(
874 self.handle(),
875 create_info,
876 allocation_callbacks.as_raw_ptr(),
877 &mut ycbcr_conversion,
878 )
879 .result_with_success(ycbcr_conversion)
880 }
881
882 #[inline]
884 pub unsafe fn destroy_sampler_ycbcr_conversion(
885 &self,
886 ycbcr_conversion: vk::SamplerYcbcrConversion,
887 allocation_callbacks: Option<&vk::AllocationCallbacks>,
888 ) {
889 (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
890 self.handle(),
891 ycbcr_conversion,
892 allocation_callbacks.as_raw_ptr(),
893 );
894 }
895
896 #[inline]
898 pub unsafe fn create_descriptor_update_template(
899 &self,
900 create_info: &vk::DescriptorUpdateTemplateCreateInfo,
901 allocation_callbacks: Option<&vk::AllocationCallbacks>,
902 ) -> VkResult<vk::DescriptorUpdateTemplate> {
903 let mut descriptor_update_template = mem::zeroed();
904 (self.device_fn_1_1.create_descriptor_update_template)(
905 self.handle(),
906 create_info,
907 allocation_callbacks.as_raw_ptr(),
908 &mut descriptor_update_template,
909 )
910 .result_with_success(descriptor_update_template)
911 }
912
913 #[inline]
915 pub unsafe fn destroy_descriptor_update_template(
916 &self,
917 descriptor_update_template: vk::DescriptorUpdateTemplate,
918 allocation_callbacks: Option<&vk::AllocationCallbacks>,
919 ) {
920 (self.device_fn_1_1.destroy_descriptor_update_template)(
921 self.handle(),
922 descriptor_update_template,
923 allocation_callbacks.as_raw_ptr(),
924 );
925 }
926
927 #[inline]
929 pub unsafe fn update_descriptor_set_with_template(
930 &self,
931 descriptor_set: vk::DescriptorSet,
932 descriptor_update_template: vk::DescriptorUpdateTemplate,
933 data: *const c_void,
934 ) {
935 (self.device_fn_1_1.update_descriptor_set_with_template)(
936 self.handle(),
937 descriptor_set,
938 descriptor_update_template,
939 data,
940 );
941 }
942
943 #[inline]
945 pub unsafe fn get_descriptor_set_layout_support(
946 &self,
947 create_info: &vk::DescriptorSetLayoutCreateInfo,
948 out: &mut vk::DescriptorSetLayoutSupport,
949 ) {
950 (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
951 }
952}
953
954#[allow(non_camel_case_types)]
956impl Device {
957 #[inline]
958 pub fn fp_v1_0(&self) -> &vk::DeviceFnV1_0 {
959 &self.device_fn_1_0
960 }
961
962 #[inline]
964 pub unsafe fn destroy_device(&self, allocation_callbacks: Option<&vk::AllocationCallbacks>) {
965 (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
966 }
967
968 #[inline]
970 pub unsafe fn destroy_sampler(
971 &self,
972 sampler: vk::Sampler,
973 allocation_callbacks: Option<&vk::AllocationCallbacks>,
974 ) {
975 (self.device_fn_1_0.destroy_sampler)(
976 self.handle(),
977 sampler,
978 allocation_callbacks.as_raw_ptr(),
979 );
980 }
981
982 #[inline]
984 pub unsafe fn free_memory(
985 &self,
986 memory: vk::DeviceMemory,
987 allocation_callbacks: Option<&vk::AllocationCallbacks>,
988 ) {
989 (self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr());
990 }
991
992 #[inline]
994 pub unsafe fn free_command_buffers(
995 &self,
996 command_pool: vk::CommandPool,
997 command_buffers: &[vk::CommandBuffer],
998 ) {
999 (self.device_fn_1_0.free_command_buffers)(
1000 self.handle(),
1001 command_pool,
1002 command_buffers.len() as u32,
1003 command_buffers.as_ptr(),
1004 );
1005 }
1006
1007 #[inline]
1009 pub unsafe fn create_event(
1010 &self,
1011 create_info: &vk::EventCreateInfo,
1012 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1013 ) -> VkResult<vk::Event> {
1014 let mut event = mem::zeroed();
1015 (self.device_fn_1_0.create_event)(
1016 self.handle(),
1017 create_info,
1018 allocation_callbacks.as_raw_ptr(),
1019 &mut event,
1020 )
1021 .result_with_success(event)
1022 }
1023
1024 #[inline]
1028 pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
1029 let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
1030 match err_code {
1031 vk::Result::EVENT_SET => Ok(true),
1032 vk::Result::EVENT_RESET => Ok(false),
1033 _ => Err(err_code),
1034 }
1035 }
1036
1037 #[inline]
1039 pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
1040 (self.device_fn_1_0.set_event)(self.handle(), event).result()
1041 }
1042
1043 #[inline]
1045 pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
1046 (self.device_fn_1_0.reset_event)(self.handle(), event).result()
1047 }
1048 #[inline]
1050 pub unsafe fn cmd_set_event(
1051 &self,
1052 command_buffer: vk::CommandBuffer,
1053 event: vk::Event,
1054 stage_mask: vk::PipelineStageFlags,
1055 ) {
1056 (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
1057 }
1058 #[inline]
1060 pub unsafe fn cmd_reset_event(
1061 &self,
1062 command_buffer: vk::CommandBuffer,
1063 event: vk::Event,
1064 stage_mask: vk::PipelineStageFlags,
1065 ) {
1066 (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
1067 }
1068
1069 #[inline]
1071 pub unsafe fn cmd_wait_events(
1072 &self,
1073 command_buffer: vk::CommandBuffer,
1074 events: &[vk::Event],
1075 src_stage_mask: vk::PipelineStageFlags,
1076 dst_stage_mask: vk::PipelineStageFlags,
1077 memory_barriers: &[vk::MemoryBarrier],
1078 buffer_memory_barriers: &[vk::BufferMemoryBarrier],
1079 image_memory_barriers: &[vk::ImageMemoryBarrier],
1080 ) {
1081 (self.device_fn_1_0.cmd_wait_events)(
1082 command_buffer,
1083 events.len() as _,
1084 events.as_ptr(),
1085 src_stage_mask,
1086 dst_stage_mask,
1087 memory_barriers.len() as _,
1088 memory_barriers.as_ptr(),
1089 buffer_memory_barriers.len() as _,
1090 buffer_memory_barriers.as_ptr(),
1091 image_memory_barriers.len() as _,
1092 image_memory_barriers.as_ptr(),
1093 );
1094 }
1095
1096 #[inline]
1098 pub unsafe fn destroy_fence(
1099 &self,
1100 fence: vk::Fence,
1101 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1102 ) {
1103 (self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr());
1104 }
1105
1106 #[inline]
1108 pub unsafe fn destroy_event(
1109 &self,
1110 event: vk::Event,
1111 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1112 ) {
1113 (self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr());
1114 }
1115
1116 #[inline]
1118 pub unsafe fn destroy_image(
1119 &self,
1120 image: vk::Image,
1121 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1122 ) {
1123 (self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr());
1124 }
1125
1126 #[inline]
1128 pub unsafe fn destroy_command_pool(
1129 &self,
1130 pool: vk::CommandPool,
1131 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1132 ) {
1133 (self.device_fn_1_0.destroy_command_pool)(
1134 self.handle(),
1135 pool,
1136 allocation_callbacks.as_raw_ptr(),
1137 );
1138 }
1139
1140 #[inline]
1142 pub unsafe fn destroy_image_view(
1143 &self,
1144 image_view: vk::ImageView,
1145 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1146 ) {
1147 (self.device_fn_1_0.destroy_image_view)(
1148 self.handle(),
1149 image_view,
1150 allocation_callbacks.as_raw_ptr(),
1151 );
1152 }
1153
1154 #[inline]
1156 pub unsafe fn destroy_render_pass(
1157 &self,
1158 renderpass: vk::RenderPass,
1159 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1160 ) {
1161 (self.device_fn_1_0.destroy_render_pass)(
1162 self.handle(),
1163 renderpass,
1164 allocation_callbacks.as_raw_ptr(),
1165 );
1166 }
1167
1168 #[inline]
1170 pub unsafe fn destroy_framebuffer(
1171 &self,
1172 framebuffer: vk::Framebuffer,
1173 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1174 ) {
1175 (self.device_fn_1_0.destroy_framebuffer)(
1176 self.handle(),
1177 framebuffer,
1178 allocation_callbacks.as_raw_ptr(),
1179 );
1180 }
1181
1182 #[inline]
1184 pub unsafe fn destroy_pipeline_layout(
1185 &self,
1186 pipeline_layout: vk::PipelineLayout,
1187 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1188 ) {
1189 (self.device_fn_1_0.destroy_pipeline_layout)(
1190 self.handle(),
1191 pipeline_layout,
1192 allocation_callbacks.as_raw_ptr(),
1193 );
1194 }
1195
1196 #[inline]
1198 pub unsafe fn destroy_pipeline_cache(
1199 &self,
1200 pipeline_cache: vk::PipelineCache,
1201 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1202 ) {
1203 (self.device_fn_1_0.destroy_pipeline_cache)(
1204 self.handle(),
1205 pipeline_cache,
1206 allocation_callbacks.as_raw_ptr(),
1207 );
1208 }
1209
1210 #[inline]
1212 pub unsafe fn destroy_buffer(
1213 &self,
1214 buffer: vk::Buffer,
1215 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1216 ) {
1217 (self.device_fn_1_0.destroy_buffer)(
1218 self.handle(),
1219 buffer,
1220 allocation_callbacks.as_raw_ptr(),
1221 );
1222 }
1223
1224 #[inline]
1226 pub unsafe fn destroy_shader_module(
1227 &self,
1228 shader: vk::ShaderModule,
1229 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1230 ) {
1231 (self.device_fn_1_0.destroy_shader_module)(
1232 self.handle(),
1233 shader,
1234 allocation_callbacks.as_raw_ptr(),
1235 );
1236 }
1237
1238 #[inline]
1240 pub unsafe fn destroy_pipeline(
1241 &self,
1242 pipeline: vk::Pipeline,
1243 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1244 ) {
1245 (self.device_fn_1_0.destroy_pipeline)(
1246 self.handle(),
1247 pipeline,
1248 allocation_callbacks.as_raw_ptr(),
1249 );
1250 }
1251
1252 #[inline]
1254 pub unsafe fn destroy_semaphore(
1255 &self,
1256 semaphore: vk::Semaphore,
1257 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1258 ) {
1259 (self.device_fn_1_0.destroy_semaphore)(
1260 self.handle(),
1261 semaphore,
1262 allocation_callbacks.as_raw_ptr(),
1263 );
1264 }
1265
1266 #[inline]
1268 pub unsafe fn destroy_descriptor_pool(
1269 &self,
1270 pool: vk::DescriptorPool,
1271 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1272 ) {
1273 (self.device_fn_1_0.destroy_descriptor_pool)(
1274 self.handle(),
1275 pool,
1276 allocation_callbacks.as_raw_ptr(),
1277 );
1278 }
1279
1280 #[inline]
1282 pub unsafe fn destroy_query_pool(
1283 &self,
1284 pool: vk::QueryPool,
1285 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1286 ) {
1287 (self.device_fn_1_0.destroy_query_pool)(
1288 self.handle(),
1289 pool,
1290 allocation_callbacks.as_raw_ptr(),
1291 );
1292 }
1293
1294 #[inline]
1296 pub unsafe fn destroy_descriptor_set_layout(
1297 &self,
1298 layout: vk::DescriptorSetLayout,
1299 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1300 ) {
1301 (self.device_fn_1_0.destroy_descriptor_set_layout)(
1302 self.handle(),
1303 layout,
1304 allocation_callbacks.as_raw_ptr(),
1305 );
1306 }
1307
1308 #[inline]
1310 pub unsafe fn free_descriptor_sets(
1311 &self,
1312 pool: vk::DescriptorPool,
1313 descriptor_sets: &[vk::DescriptorSet],
1314 ) -> VkResult<()> {
1315 (self.device_fn_1_0.free_descriptor_sets)(
1316 self.handle(),
1317 pool,
1318 descriptor_sets.len() as u32,
1319 descriptor_sets.as_ptr(),
1320 )
1321 .result()
1322 }
1323
1324 #[inline]
1326 pub unsafe fn update_descriptor_sets(
1327 &self,
1328 descriptor_writes: &[vk::WriteDescriptorSet],
1329 descriptor_copies: &[vk::CopyDescriptorSet],
1330 ) {
1331 (self.device_fn_1_0.update_descriptor_sets)(
1332 self.handle(),
1333 descriptor_writes.len() as u32,
1334 descriptor_writes.as_ptr(),
1335 descriptor_copies.len() as u32,
1336 descriptor_copies.as_ptr(),
1337 );
1338 }
1339
1340 #[inline]
1342 pub unsafe fn create_sampler(
1343 &self,
1344 create_info: &vk::SamplerCreateInfo,
1345 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1346 ) -> VkResult<vk::Sampler> {
1347 let mut sampler = mem::zeroed();
1348 (self.device_fn_1_0.create_sampler)(
1349 self.handle(),
1350 create_info,
1351 allocation_callbacks.as_raw_ptr(),
1352 &mut sampler,
1353 )
1354 .result_with_success(sampler)
1355 }
1356
1357 #[inline]
1359 pub unsafe fn cmd_blit_image(
1360 &self,
1361 command_buffer: vk::CommandBuffer,
1362 src_image: vk::Image,
1363 src_image_layout: vk::ImageLayout,
1364 dst_image: vk::Image,
1365 dst_image_layout: vk::ImageLayout,
1366 regions: &[vk::ImageBlit],
1367 filter: vk::Filter,
1368 ) {
1369 (self.device_fn_1_0.cmd_blit_image)(
1370 command_buffer,
1371 src_image,
1372 src_image_layout,
1373 dst_image,
1374 dst_image_layout,
1375 regions.len() as _,
1376 regions.as_ptr(),
1377 filter,
1378 );
1379 }
1380
1381 #[inline]
1383 pub unsafe fn cmd_resolve_image(
1384 &self,
1385 command_buffer: vk::CommandBuffer,
1386 src_image: vk::Image,
1387 src_image_layout: vk::ImageLayout,
1388 dst_image: vk::Image,
1389 dst_image_layout: vk::ImageLayout,
1390 regions: &[vk::ImageResolve],
1391 ) {
1392 (self.device_fn_1_0.cmd_resolve_image)(
1393 command_buffer,
1394 src_image,
1395 src_image_layout,
1396 dst_image,
1397 dst_image_layout,
1398 regions.len() as u32,
1399 regions.as_ptr(),
1400 );
1401 }
1402
1403 #[inline]
1405 pub unsafe fn cmd_fill_buffer(
1406 &self,
1407 command_buffer: vk::CommandBuffer,
1408 buffer: vk::Buffer,
1409 offset: vk::DeviceSize,
1410 size: vk::DeviceSize,
1411 data: u32,
1412 ) {
1413 (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
1414 }
1415
1416 #[inline]
1418 pub unsafe fn cmd_update_buffer(
1419 &self,
1420 command_buffer: vk::CommandBuffer,
1421 buffer: vk::Buffer,
1422 offset: vk::DeviceSize,
1423 data: &[u8],
1424 ) {
1425 (self.device_fn_1_0.cmd_update_buffer)(
1426 command_buffer,
1427 buffer,
1428 offset,
1429 data.len() as u64,
1430 data.as_ptr() as _,
1431 );
1432 }
1433
1434 #[inline]
1436 pub unsafe fn cmd_copy_buffer(
1437 &self,
1438 command_buffer: vk::CommandBuffer,
1439 src_buffer: vk::Buffer,
1440 dst_buffer: vk::Buffer,
1441 regions: &[vk::BufferCopy],
1442 ) {
1443 (self.device_fn_1_0.cmd_copy_buffer)(
1444 command_buffer,
1445 src_buffer,
1446 dst_buffer,
1447 regions.len() as u32,
1448 regions.as_ptr(),
1449 );
1450 }
1451
1452 #[inline]
1454 pub unsafe fn cmd_copy_image_to_buffer(
1455 &self,
1456 command_buffer: vk::CommandBuffer,
1457 src_image: vk::Image,
1458 src_image_layout: vk::ImageLayout,
1459 dst_buffer: vk::Buffer,
1460 regions: &[vk::BufferImageCopy],
1461 ) {
1462 (self.device_fn_1_0.cmd_copy_image_to_buffer)(
1463 command_buffer,
1464 src_image,
1465 src_image_layout,
1466 dst_buffer,
1467 regions.len() as u32,
1468 regions.as_ptr(),
1469 );
1470 }
1471
1472 #[inline]
1474 pub unsafe fn cmd_copy_buffer_to_image(
1475 &self,
1476 command_buffer: vk::CommandBuffer,
1477 src_buffer: vk::Buffer,
1478 dst_image: vk::Image,
1479 dst_image_layout: vk::ImageLayout,
1480 regions: &[vk::BufferImageCopy],
1481 ) {
1482 (self.device_fn_1_0.cmd_copy_buffer_to_image)(
1483 command_buffer,
1484 src_buffer,
1485 dst_image,
1486 dst_image_layout,
1487 regions.len() as u32,
1488 regions.as_ptr(),
1489 );
1490 }
1491
1492 #[inline]
1494 pub unsafe fn cmd_copy_image(
1495 &self,
1496 command_buffer: vk::CommandBuffer,
1497 src_image: vk::Image,
1498 src_image_layout: vk::ImageLayout,
1499 dst_image: vk::Image,
1500 dst_image_layout: vk::ImageLayout,
1501 regions: &[vk::ImageCopy],
1502 ) {
1503 (self.device_fn_1_0.cmd_copy_image)(
1504 command_buffer,
1505 src_image,
1506 src_image_layout,
1507 dst_image,
1508 dst_image_layout,
1509 regions.len() as u32,
1510 regions.as_ptr(),
1511 );
1512 }
1513
1514 #[inline]
1516 pub unsafe fn allocate_descriptor_sets(
1517 &self,
1518 allocate_info: &vk::DescriptorSetAllocateInfo,
1519 ) -> VkResult<Vec<vk::DescriptorSet>> {
1520 let mut desc_set = Vec::with_capacity(allocate_info.descriptor_set_count as usize);
1521 (self.device_fn_1_0.allocate_descriptor_sets)(
1522 self.handle(),
1523 allocate_info,
1524 desc_set.as_mut_ptr(),
1525 )
1526 .result()?;
1527
1528 desc_set.set_len(allocate_info.descriptor_set_count as usize);
1529 Ok(desc_set)
1530 }
1531
1532 #[inline]
1534 pub unsafe fn create_descriptor_set_layout(
1535 &self,
1536 create_info: &vk::DescriptorSetLayoutCreateInfo,
1537 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1538 ) -> VkResult<vk::DescriptorSetLayout> {
1539 let mut layout = mem::zeroed();
1540 (self.device_fn_1_0.create_descriptor_set_layout)(
1541 self.handle(),
1542 create_info,
1543 allocation_callbacks.as_raw_ptr(),
1544 &mut layout,
1545 )
1546 .result_with_success(layout)
1547 }
1548
1549 #[inline]
1551 pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
1552 (self.device_fn_1_0.device_wait_idle)(self.handle()).result()
1553 }
1554
1555 #[inline]
1557 pub unsafe fn create_descriptor_pool(
1558 &self,
1559 create_info: &vk::DescriptorPoolCreateInfo,
1560 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1561 ) -> VkResult<vk::DescriptorPool> {
1562 let mut pool = mem::zeroed();
1563 (self.device_fn_1_0.create_descriptor_pool)(
1564 self.handle(),
1565 create_info,
1566 allocation_callbacks.as_raw_ptr(),
1567 &mut pool,
1568 )
1569 .result_with_success(pool)
1570 }
1571
1572 #[inline]
1574 pub unsafe fn reset_descriptor_pool(
1575 &self,
1576 pool: vk::DescriptorPool,
1577 flags: vk::DescriptorPoolResetFlags,
1578 ) -> VkResult<()> {
1579 (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result()
1580 }
1581
1582 #[inline]
1584 pub unsafe fn reset_command_pool(
1585 &self,
1586 command_pool: vk::CommandPool,
1587 flags: vk::CommandPoolResetFlags,
1588 ) -> VkResult<()> {
1589 (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
1590 }
1591
1592 #[inline]
1594 pub unsafe fn reset_command_buffer(
1595 &self,
1596 command_buffer: vk::CommandBuffer,
1597 flags: vk::CommandBufferResetFlags,
1598 ) -> VkResult<()> {
1599 (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result()
1600 }
1601
1602 #[inline]
1604 pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
1605 (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
1606 .result()
1607 }
1608
1609 #[inline]
1611 pub unsafe fn cmd_bind_index_buffer(
1612 &self,
1613 command_buffer: vk::CommandBuffer,
1614 buffer: vk::Buffer,
1615 offset: vk::DeviceSize,
1616 index_type: vk::IndexType,
1617 ) {
1618 (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
1619 }
1620
1621 #[inline]
1623 pub unsafe fn cmd_clear_color_image(
1624 &self,
1625 command_buffer: vk::CommandBuffer,
1626 image: vk::Image,
1627 image_layout: vk::ImageLayout,
1628 clear_color_value: &vk::ClearColorValue,
1629 ranges: &[vk::ImageSubresourceRange],
1630 ) {
1631 (self.device_fn_1_0.cmd_clear_color_image)(
1632 command_buffer,
1633 image,
1634 image_layout,
1635 clear_color_value,
1636 ranges.len() as u32,
1637 ranges.as_ptr(),
1638 );
1639 }
1640
1641 #[inline]
1643 pub unsafe fn cmd_clear_depth_stencil_image(
1644 &self,
1645 command_buffer: vk::CommandBuffer,
1646 image: vk::Image,
1647 image_layout: vk::ImageLayout,
1648 clear_depth_stencil_value: &vk::ClearDepthStencilValue,
1649 ranges: &[vk::ImageSubresourceRange],
1650 ) {
1651 (self.device_fn_1_0.cmd_clear_depth_stencil_image)(
1652 command_buffer,
1653 image,
1654 image_layout,
1655 clear_depth_stencil_value,
1656 ranges.len() as u32,
1657 ranges.as_ptr(),
1658 );
1659 }
1660
1661 #[inline]
1663 pub unsafe fn cmd_clear_attachments(
1664 &self,
1665 command_buffer: vk::CommandBuffer,
1666 attachments: &[vk::ClearAttachment],
1667 rects: &[vk::ClearRect],
1668 ) {
1669 (self.device_fn_1_0.cmd_clear_attachments)(
1670 command_buffer,
1671 attachments.len() as u32,
1672 attachments.as_ptr(),
1673 rects.len() as u32,
1674 rects.as_ptr(),
1675 );
1676 }
1677
1678 #[inline]
1680 pub unsafe fn cmd_draw_indexed(
1681 &self,
1682 command_buffer: vk::CommandBuffer,
1683 index_count: u32,
1684 instance_count: u32,
1685 first_index: u32,
1686 vertex_offset: i32,
1687 first_instance: u32,
1688 ) {
1689 (self.device_fn_1_0.cmd_draw_indexed)(
1690 command_buffer,
1691 index_count,
1692 instance_count,
1693 first_index,
1694 vertex_offset,
1695 first_instance,
1696 );
1697 }
1698
1699 #[inline]
1701 pub unsafe fn cmd_draw_indexed_indirect(
1702 &self,
1703 command_buffer: vk::CommandBuffer,
1704 buffer: vk::Buffer,
1705 offset: vk::DeviceSize,
1706 draw_count: u32,
1707 stride: u32,
1708 ) {
1709 (self.device_fn_1_0.cmd_draw_indexed_indirect)(
1710 command_buffer,
1711 buffer,
1712 offset,
1713 draw_count,
1714 stride,
1715 );
1716 }
1717
1718 #[inline]
1720 pub unsafe fn cmd_execute_commands(
1721 &self,
1722 primary_command_buffer: vk::CommandBuffer,
1723 secondary_command_buffers: &[vk::CommandBuffer],
1724 ) {
1725 (self.device_fn_1_0.cmd_execute_commands)(
1726 primary_command_buffer,
1727 secondary_command_buffers.len() as u32,
1728 secondary_command_buffers.as_ptr(),
1729 );
1730 }
1731
1732 #[inline]
1734 pub unsafe fn cmd_bind_descriptor_sets(
1735 &self,
1736 command_buffer: vk::CommandBuffer,
1737 pipeline_bind_point: vk::PipelineBindPoint,
1738 layout: vk::PipelineLayout,
1739 first_set: u32,
1740 descriptor_sets: &[vk::DescriptorSet],
1741 dynamic_offsets: &[u32],
1742 ) {
1743 (self.device_fn_1_0.cmd_bind_descriptor_sets)(
1744 command_buffer,
1745 pipeline_bind_point,
1746 layout,
1747 first_set,
1748 descriptor_sets.len() as u32,
1749 descriptor_sets.as_ptr(),
1750 dynamic_offsets.len() as u32,
1751 dynamic_offsets.as_ptr(),
1752 );
1753 }
1754
1755 #[inline]
1757 pub unsafe fn cmd_copy_query_pool_results(
1758 &self,
1759 command_buffer: vk::CommandBuffer,
1760 query_pool: vk::QueryPool,
1761 first_query: u32,
1762 query_count: u32,
1763 dst_buffer: vk::Buffer,
1764 dst_offset: vk::DeviceSize,
1765 stride: vk::DeviceSize,
1766 flags: vk::QueryResultFlags,
1767 ) {
1768 (self.device_fn_1_0.cmd_copy_query_pool_results)(
1769 command_buffer,
1770 query_pool,
1771 first_query,
1772 query_count,
1773 dst_buffer,
1774 dst_offset,
1775 stride,
1776 flags,
1777 );
1778 }
1779
1780 #[inline]
1782 pub unsafe fn cmd_push_constants(
1783 &self,
1784 command_buffer: vk::CommandBuffer,
1785 layout: vk::PipelineLayout,
1786 stage_flags: vk::ShaderStageFlags,
1787 offset: u32,
1788 constants: &[u8],
1789 ) {
1790 (self.device_fn_1_0.cmd_push_constants)(
1791 command_buffer,
1792 layout,
1793 stage_flags,
1794 offset,
1795 constants.len() as _,
1796 constants.as_ptr() as _,
1797 );
1798 }
1799
1800 #[inline]
1802 pub unsafe fn cmd_begin_render_pass(
1803 &self,
1804 command_buffer: vk::CommandBuffer,
1805 render_pass_begin: &vk::RenderPassBeginInfo,
1806 contents: vk::SubpassContents,
1807 ) {
1808 (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
1809 }
1810
1811 #[inline]
1813 pub unsafe fn cmd_next_subpass(
1814 &self,
1815 command_buffer: vk::CommandBuffer,
1816 contents: vk::SubpassContents,
1817 ) {
1818 (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
1819 }
1820
1821 #[inline]
1823 pub unsafe fn cmd_bind_pipeline(
1824 &self,
1825 command_buffer: vk::CommandBuffer,
1826 pipeline_bind_point: vk::PipelineBindPoint,
1827 pipeline: vk::Pipeline,
1828 ) {
1829 (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
1830 }
1831
1832 #[inline]
1834 pub unsafe fn cmd_set_scissor(
1835 &self,
1836 command_buffer: vk::CommandBuffer,
1837 first_scissor: u32,
1838 scissors: &[vk::Rect2D],
1839 ) {
1840 (self.device_fn_1_0.cmd_set_scissor)(
1841 command_buffer,
1842 first_scissor,
1843 scissors.len() as u32,
1844 scissors.as_ptr(),
1845 );
1846 }
1847
1848 #[inline]
1850 pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
1851 (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
1852 }
1853
1854 #[inline]
1856 pub unsafe fn cmd_bind_vertex_buffers(
1857 &self,
1858 command_buffer: vk::CommandBuffer,
1859 first_binding: u32,
1860 buffers: &[vk::Buffer],
1861 offsets: &[vk::DeviceSize],
1862 ) {
1863 debug_assert_eq!(buffers.len(), offsets.len());
1864 (self.device_fn_1_0.cmd_bind_vertex_buffers)(
1865 command_buffer,
1866 first_binding,
1867 buffers.len() as u32,
1868 buffers.as_ptr(),
1869 offsets.as_ptr(),
1870 );
1871 }
1872
1873 #[inline]
1875 pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
1876 (self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
1877 }
1878
1879 #[inline]
1881 pub unsafe fn cmd_draw(
1882 &self,
1883 command_buffer: vk::CommandBuffer,
1884 vertex_count: u32,
1885 instance_count: u32,
1886 first_vertex: u32,
1887 first_instance: u32,
1888 ) {
1889 (self.device_fn_1_0.cmd_draw)(
1890 command_buffer,
1891 vertex_count,
1892 instance_count,
1893 first_vertex,
1894 first_instance,
1895 );
1896 }
1897
1898 #[inline]
1900 pub unsafe fn cmd_draw_indirect(
1901 &self,
1902 command_buffer: vk::CommandBuffer,
1903 buffer: vk::Buffer,
1904 offset: vk::DeviceSize,
1905 draw_count: u32,
1906 stride: u32,
1907 ) {
1908 (self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
1909 }
1910
1911 #[inline]
1913 pub unsafe fn cmd_dispatch(
1914 &self,
1915 command_buffer: vk::CommandBuffer,
1916 group_count_x: u32,
1917 group_count_y: u32,
1918 group_count_z: u32,
1919 ) {
1920 (self.device_fn_1_0.cmd_dispatch)(
1921 command_buffer,
1922 group_count_x,
1923 group_count_y,
1924 group_count_z,
1925 );
1926 }
1927
1928 #[inline]
1930 pub unsafe fn cmd_dispatch_indirect(
1931 &self,
1932 command_buffer: vk::CommandBuffer,
1933 buffer: vk::Buffer,
1934 offset: vk::DeviceSize,
1935 ) {
1936 (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
1937 }
1938
1939 #[inline]
1941 pub unsafe fn cmd_set_viewport(
1942 &self,
1943 command_buffer: vk::CommandBuffer,
1944 first_viewport: u32,
1945 viewports: &[vk::Viewport],
1946 ) {
1947 (self.device_fn_1_0.cmd_set_viewport)(
1948 command_buffer,
1949 first_viewport,
1950 viewports.len() as u32,
1951 viewports.as_ptr(),
1952 );
1953 }
1954
1955 #[inline]
1957 pub unsafe fn cmd_set_depth_bias(
1958 &self,
1959 command_buffer: vk::CommandBuffer,
1960 constant_factor: f32,
1961 clamp: f32,
1962 slope_factor: f32,
1963 ) {
1964 (self.device_fn_1_0.cmd_set_depth_bias)(
1965 command_buffer,
1966 constant_factor,
1967 clamp,
1968 slope_factor,
1969 );
1970 }
1971
1972 #[inline]
1974 pub unsafe fn cmd_set_blend_constants(
1975 &self,
1976 command_buffer: vk::CommandBuffer,
1977 blend_constants: &[f32; 4],
1978 ) {
1979 (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
1980 }
1981
1982 #[inline]
1984 pub unsafe fn cmd_set_depth_bounds(
1985 &self,
1986 command_buffer: vk::CommandBuffer,
1987 min_depth_bounds: f32,
1988 max_depth_bounds: f32,
1989 ) {
1990 (self.device_fn_1_0.cmd_set_depth_bounds)(
1991 command_buffer,
1992 min_depth_bounds,
1993 max_depth_bounds,
1994 );
1995 }
1996
1997 #[inline]
1999 pub unsafe fn cmd_set_stencil_compare_mask(
2000 &self,
2001 command_buffer: vk::CommandBuffer,
2002 face_mask: vk::StencilFaceFlags,
2003 compare_mask: u32,
2004 ) {
2005 (self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
2006 }
2007
2008 #[inline]
2010 pub unsafe fn cmd_set_stencil_write_mask(
2011 &self,
2012 command_buffer: vk::CommandBuffer,
2013 face_mask: vk::StencilFaceFlags,
2014 write_mask: u32,
2015 ) {
2016 (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
2017 }
2018
2019 #[inline]
2021 pub unsafe fn cmd_set_stencil_reference(
2022 &self,
2023 command_buffer: vk::CommandBuffer,
2024 face_mask: vk::StencilFaceFlags,
2025 reference: u32,
2026 ) {
2027 (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
2028 }
2029
2030 #[inline]
2032 pub unsafe fn get_query_pool_results<T>(
2033 &self,
2034 query_pool: vk::QueryPool,
2035 first_query: u32,
2036 query_count: u32,
2037 data: &mut [T],
2038 flags: vk::QueryResultFlags,
2039 ) -> VkResult<()> {
2040 let data_length = query_count as usize;
2041 assert!(
2042 data_length <= data.len(),
2043 "query_count was higher than the length of the slice"
2044 );
2045 let data_size = mem::size_of::<T>() * data_length;
2046 (self.device_fn_1_0.get_query_pool_results)(
2047 self.handle(),
2048 query_pool,
2049 first_query,
2050 query_count,
2051 data_size,
2052 data.as_mut_ptr().cast(),
2053 mem::size_of::<T>() as _,
2054 flags,
2055 )
2056 .result()
2057 }
2058
2059 #[inline]
2061 pub unsafe fn cmd_begin_query(
2062 &self,
2063 command_buffer: vk::CommandBuffer,
2064 query_pool: vk::QueryPool,
2065 query: u32,
2066 flags: vk::QueryControlFlags,
2067 ) {
2068 (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
2069 }
2070
2071 #[inline]
2073 pub unsafe fn cmd_end_query(
2074 &self,
2075 command_buffer: vk::CommandBuffer,
2076 query_pool: vk::QueryPool,
2077 query: u32,
2078 ) {
2079 (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
2080 }
2081
2082 #[inline]
2084 pub unsafe fn cmd_reset_query_pool(
2085 &self,
2086 command_buffer: vk::CommandBuffer,
2087 pool: vk::QueryPool,
2088 first_query: u32,
2089 query_count: u32,
2090 ) {
2091 (self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count);
2092 }
2093
2094 #[inline]
2096 pub unsafe fn cmd_write_timestamp(
2097 &self,
2098 command_buffer: vk::CommandBuffer,
2099 pipeline_stage: vk::PipelineStageFlags,
2100 query_pool: vk::QueryPool,
2101 query: u32,
2102 ) {
2103 (self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query);
2104 }
2105
2106 #[inline]
2108 pub unsafe fn create_semaphore(
2109 &self,
2110 create_info: &vk::SemaphoreCreateInfo,
2111 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2112 ) -> VkResult<vk::Semaphore> {
2113 let mut semaphore = mem::zeroed();
2114 (self.device_fn_1_0.create_semaphore)(
2115 self.handle(),
2116 create_info,
2117 allocation_callbacks.as_raw_ptr(),
2118 &mut semaphore,
2119 )
2120 .result_with_success(semaphore)
2121 }
2122
2123 #[inline]
2125 pub unsafe fn create_graphics_pipelines(
2126 &self,
2127 pipeline_cache: vk::PipelineCache,
2128 create_infos: &[vk::GraphicsPipelineCreateInfo],
2129 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2130 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2131 let mut pipelines = Vec::with_capacity(create_infos.len());
2132 let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
2133 self.handle(),
2134 pipeline_cache,
2135 create_infos.len() as u32,
2136 create_infos.as_ptr(),
2137 allocation_callbacks.as_raw_ptr(),
2138 pipelines.as_mut_ptr(),
2139 );
2140 pipelines.set_len(create_infos.len());
2141 match err_code {
2142 vk::Result::SUCCESS => Ok(pipelines),
2143 _ => Err((pipelines, err_code)),
2144 }
2145 }
2146
2147 #[inline]
2149 pub unsafe fn create_compute_pipelines(
2150 &self,
2151 pipeline_cache: vk::PipelineCache,
2152 create_infos: &[vk::ComputePipelineCreateInfo],
2153 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2154 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2155 let mut pipelines = Vec::with_capacity(create_infos.len());
2156 let err_code = (self.device_fn_1_0.create_compute_pipelines)(
2157 self.handle(),
2158 pipeline_cache,
2159 create_infos.len() as u32,
2160 create_infos.as_ptr(),
2161 allocation_callbacks.as_raw_ptr(),
2162 pipelines.as_mut_ptr(),
2163 );
2164 pipelines.set_len(create_infos.len());
2165 match err_code {
2166 vk::Result::SUCCESS => Ok(pipelines),
2167 _ => Err((pipelines, err_code)),
2168 }
2169 }
2170
2171 #[inline]
2173 pub unsafe fn create_buffer(
2174 &self,
2175 create_info: &vk::BufferCreateInfo,
2176 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2177 ) -> VkResult<vk::Buffer> {
2178 let mut buffer = mem::zeroed();
2179 (self.device_fn_1_0.create_buffer)(
2180 self.handle(),
2181 create_info,
2182 allocation_callbacks.as_raw_ptr(),
2183 &mut buffer,
2184 )
2185 .result_with_success(buffer)
2186 }
2187
2188 #[inline]
2190 pub unsafe fn create_pipeline_layout(
2191 &self,
2192 create_info: &vk::PipelineLayoutCreateInfo,
2193 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2194 ) -> VkResult<vk::PipelineLayout> {
2195 let mut pipeline_layout = mem::zeroed();
2196 (self.device_fn_1_0.create_pipeline_layout)(
2197 self.handle(),
2198 create_info,
2199 allocation_callbacks.as_raw_ptr(),
2200 &mut pipeline_layout,
2201 )
2202 .result_with_success(pipeline_layout)
2203 }
2204
2205 #[inline]
2207 pub unsafe fn create_pipeline_cache(
2208 &self,
2209 create_info: &vk::PipelineCacheCreateInfo,
2210 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2211 ) -> VkResult<vk::PipelineCache> {
2212 let mut pipeline_cache = mem::zeroed();
2213 (self.device_fn_1_0.create_pipeline_cache)(
2214 self.handle(),
2215 create_info,
2216 allocation_callbacks.as_raw_ptr(),
2217 &mut pipeline_cache,
2218 )
2219 .result_with_success(pipeline_cache)
2220 }
2221
2222 #[inline]
2224 pub unsafe fn get_pipeline_cache_data(
2225 &self,
2226 pipeline_cache: vk::PipelineCache,
2227 ) -> VkResult<Vec<u8>> {
2228 read_into_uninitialized_vector(|count, data| {
2229 (self.device_fn_1_0.get_pipeline_cache_data)(
2230 self.handle(),
2231 pipeline_cache,
2232 count,
2233 data as _,
2234 )
2235 })
2236 }
2237
2238 #[inline]
2240 pub unsafe fn merge_pipeline_caches(
2241 &self,
2242 dst_cache: vk::PipelineCache,
2243 src_caches: &[vk::PipelineCache],
2244 ) -> VkResult<()> {
2245 (self.device_fn_1_0.merge_pipeline_caches)(
2246 self.handle(),
2247 dst_cache,
2248 src_caches.len() as u32,
2249 src_caches.as_ptr(),
2250 )
2251 .result()
2252 }
2253
2254 #[inline]
2256 pub unsafe fn map_memory(
2257 &self,
2258 memory: vk::DeviceMemory,
2259 offset: vk::DeviceSize,
2260 size: vk::DeviceSize,
2261 flags: vk::MemoryMapFlags,
2262 ) -> VkResult<*mut c_void> {
2263 let mut data: *mut c_void = ptr::null_mut();
2264 (self.device_fn_1_0.map_memory)(self.handle(), memory, offset, size, flags, &mut data)
2265 .result_with_success(data)
2266 }
2267
2268 #[inline]
2270 pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
2271 (self.device_fn_1_0.unmap_memory)(self.handle(), memory);
2272 }
2273
2274 #[inline]
2276 pub unsafe fn invalidate_mapped_memory_ranges(
2277 &self,
2278 ranges: &[vk::MappedMemoryRange],
2279 ) -> VkResult<()> {
2280 (self.device_fn_1_0.invalidate_mapped_memory_ranges)(
2281 self.handle(),
2282 ranges.len() as u32,
2283 ranges.as_ptr(),
2284 )
2285 .result()
2286 }
2287
2288 #[inline]
2290 pub unsafe fn flush_mapped_memory_ranges(
2291 &self,
2292 ranges: &[vk::MappedMemoryRange],
2293 ) -> VkResult<()> {
2294 (self.device_fn_1_0.flush_mapped_memory_ranges)(
2295 self.handle(),
2296 ranges.len() as u32,
2297 ranges.as_ptr(),
2298 )
2299 .result()
2300 }
2301
2302 #[inline]
2304 pub unsafe fn create_framebuffer(
2305 &self,
2306 create_info: &vk::FramebufferCreateInfo,
2307 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2308 ) -> VkResult<vk::Framebuffer> {
2309 let mut framebuffer = mem::zeroed();
2310 (self.device_fn_1_0.create_framebuffer)(
2311 self.handle(),
2312 create_info,
2313 allocation_callbacks.as_raw_ptr(),
2314 &mut framebuffer,
2315 )
2316 .result_with_success(framebuffer)
2317 }
2318
2319 #[inline]
2321 pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
2322 let mut queue = mem::zeroed();
2323 (self.device_fn_1_0.get_device_queue)(
2324 self.handle(),
2325 queue_family_index,
2326 queue_index,
2327 &mut queue,
2328 );
2329 queue
2330 }
2331
2332 #[inline]
2334 pub unsafe fn cmd_pipeline_barrier(
2335 &self,
2336 command_buffer: vk::CommandBuffer,
2337 src_stage_mask: vk::PipelineStageFlags,
2338 dst_stage_mask: vk::PipelineStageFlags,
2339 dependency_flags: vk::DependencyFlags,
2340 memory_barriers: &[vk::MemoryBarrier],
2341 buffer_memory_barriers: &[vk::BufferMemoryBarrier],
2342 image_memory_barriers: &[vk::ImageMemoryBarrier],
2343 ) {
2344 (self.device_fn_1_0.cmd_pipeline_barrier)(
2345 command_buffer,
2346 src_stage_mask,
2347 dst_stage_mask,
2348 dependency_flags,
2349 memory_barriers.len() as u32,
2350 memory_barriers.as_ptr(),
2351 buffer_memory_barriers.len() as u32,
2352 buffer_memory_barriers.as_ptr(),
2353 image_memory_barriers.len() as u32,
2354 image_memory_barriers.as_ptr(),
2355 );
2356 }
2357
2358 #[inline]
2360 pub unsafe fn create_render_pass(
2361 &self,
2362 create_info: &vk::RenderPassCreateInfo,
2363 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2364 ) -> VkResult<vk::RenderPass> {
2365 let mut renderpass = mem::zeroed();
2366 (self.device_fn_1_0.create_render_pass)(
2367 self.handle(),
2368 create_info,
2369 allocation_callbacks.as_raw_ptr(),
2370 &mut renderpass,
2371 )
2372 .result_with_success(renderpass)
2373 }
2374
2375 #[inline]
2377 pub unsafe fn begin_command_buffer(
2378 &self,
2379 command_buffer: vk::CommandBuffer,
2380 begin_info: &vk::CommandBufferBeginInfo,
2381 ) -> VkResult<()> {
2382 (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result()
2383 }
2384
2385 #[inline]
2387 pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
2388 (self.device_fn_1_0.end_command_buffer)(command_buffer).result()
2389 }
2390
2391 #[inline]
2393 pub unsafe fn wait_for_fences(
2394 &self,
2395 fences: &[vk::Fence],
2396 wait_all: bool,
2397 timeout: u64,
2398 ) -> VkResult<()> {
2399 (self.device_fn_1_0.wait_for_fences)(
2400 self.handle(),
2401 fences.len() as u32,
2402 fences.as_ptr(),
2403 wait_all as u32,
2404 timeout,
2405 )
2406 .result()
2407 }
2408
2409 #[inline]
2411 pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
2412 let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
2413 match err_code {
2414 vk::Result::SUCCESS => Ok(true),
2415 vk::Result::NOT_READY => Ok(false),
2416 _ => Err(err_code),
2417 }
2418 }
2419
2420 #[inline]
2422 pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
2423 (self.device_fn_1_0.queue_wait_idle)(queue).result()
2424 }
2425
2426 #[inline]
2428 pub unsafe fn queue_submit(
2429 &self,
2430 queue: vk::Queue,
2431 submits: &[vk::SubmitInfo],
2432 fence: vk::Fence,
2433 ) -> VkResult<()> {
2434 (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
2435 .result()
2436 }
2437
2438 #[inline]
2440 pub unsafe fn queue_bind_sparse(
2441 &self,
2442 queue: vk::Queue,
2443 bind_info: &[vk::BindSparseInfo],
2444 fence: vk::Fence,
2445 ) -> VkResult<()> {
2446 (self.device_fn_1_0.queue_bind_sparse)(
2447 queue,
2448 bind_info.len() as u32,
2449 bind_info.as_ptr(),
2450 fence,
2451 )
2452 .result()
2453 }
2454
2455 #[inline]
2457 pub unsafe fn create_buffer_view(
2458 &self,
2459 create_info: &vk::BufferViewCreateInfo,
2460 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2461 ) -> VkResult<vk::BufferView> {
2462 let mut buffer_view = mem::zeroed();
2463 (self.device_fn_1_0.create_buffer_view)(
2464 self.handle(),
2465 create_info,
2466 allocation_callbacks.as_raw_ptr(),
2467 &mut buffer_view,
2468 )
2469 .result_with_success(buffer_view)
2470 }
2471
2472 #[inline]
2474 pub unsafe fn destroy_buffer_view(
2475 &self,
2476 buffer_view: vk::BufferView,
2477 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2478 ) {
2479 (self.device_fn_1_0.destroy_buffer_view)(
2480 self.handle(),
2481 buffer_view,
2482 allocation_callbacks.as_raw_ptr(),
2483 );
2484 }
2485
2486 #[inline]
2488 pub unsafe fn create_image_view(
2489 &self,
2490 create_info: &vk::ImageViewCreateInfo,
2491 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2492 ) -> VkResult<vk::ImageView> {
2493 let mut image_view = mem::zeroed();
2494 (self.device_fn_1_0.create_image_view)(
2495 self.handle(),
2496 create_info,
2497 allocation_callbacks.as_raw_ptr(),
2498 &mut image_view,
2499 )
2500 .result_with_success(image_view)
2501 }
2502
2503 #[inline]
2505 pub unsafe fn allocate_command_buffers(
2506 &self,
2507 allocate_info: &vk::CommandBufferAllocateInfo,
2508 ) -> VkResult<Vec<vk::CommandBuffer>> {
2509 let mut buffers = Vec::with_capacity(allocate_info.command_buffer_count as usize);
2510 (self.device_fn_1_0.allocate_command_buffers)(
2511 self.handle(),
2512 allocate_info,
2513 buffers.as_mut_ptr(),
2514 )
2515 .result()?;
2516 buffers.set_len(allocate_info.command_buffer_count as usize);
2517 Ok(buffers)
2518 }
2519
2520 #[inline]
2522 pub unsafe fn create_command_pool(
2523 &self,
2524 create_info: &vk::CommandPoolCreateInfo,
2525 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2526 ) -> VkResult<vk::CommandPool> {
2527 let mut pool = mem::zeroed();
2528 (self.device_fn_1_0.create_command_pool)(
2529 self.handle(),
2530 create_info,
2531 allocation_callbacks.as_raw_ptr(),
2532 &mut pool,
2533 )
2534 .result_with_success(pool)
2535 }
2536
2537 #[inline]
2539 pub unsafe fn create_query_pool(
2540 &self,
2541 create_info: &vk::QueryPoolCreateInfo,
2542 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2543 ) -> VkResult<vk::QueryPool> {
2544 let mut pool = mem::zeroed();
2545 (self.device_fn_1_0.create_query_pool)(
2546 self.handle(),
2547 create_info,
2548 allocation_callbacks.as_raw_ptr(),
2549 &mut pool,
2550 )
2551 .result_with_success(pool)
2552 }
2553
2554 #[inline]
2556 pub unsafe fn create_image(
2557 &self,
2558 create_info: &vk::ImageCreateInfo,
2559 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2560 ) -> VkResult<vk::Image> {
2561 let mut image = mem::zeroed();
2562 (self.device_fn_1_0.create_image)(
2563 self.handle(),
2564 create_info,
2565 allocation_callbacks.as_raw_ptr(),
2566 &mut image,
2567 )
2568 .result_with_success(image)
2569 }
2570
2571 #[inline]
2573 pub unsafe fn get_image_subresource_layout(
2574 &self,
2575 image: vk::Image,
2576 subresource: vk::ImageSubresource,
2577 ) -> vk::SubresourceLayout {
2578 let mut layout = mem::zeroed();
2579 (self.device_fn_1_0.get_image_subresource_layout)(
2580 self.handle(),
2581 image,
2582 &subresource,
2583 &mut layout,
2584 );
2585 layout
2586 }
2587
2588 #[inline]
2590 pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
2591 let mut mem_req = mem::zeroed();
2592 (self.device_fn_1_0.get_image_memory_requirements)(self.handle(), image, &mut mem_req);
2593 mem_req
2594 }
2595
2596 #[inline]
2598 pub unsafe fn get_buffer_memory_requirements(
2599 &self,
2600 buffer: vk::Buffer,
2601 ) -> vk::MemoryRequirements {
2602 let mut mem_req = mem::zeroed();
2603 (self.device_fn_1_0.get_buffer_memory_requirements)(self.handle(), buffer, &mut mem_req);
2604 mem_req
2605 }
2606
2607 #[inline]
2609 pub unsafe fn allocate_memory(
2610 &self,
2611 allocate_info: &vk::MemoryAllocateInfo,
2612 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2613 ) -> VkResult<vk::DeviceMemory> {
2614 let mut memory = mem::zeroed();
2615 (self.device_fn_1_0.allocate_memory)(
2616 self.handle(),
2617 allocate_info,
2618 allocation_callbacks.as_raw_ptr(),
2619 &mut memory,
2620 )
2621 .result_with_success(memory)
2622 }
2623
2624 #[inline]
2626 pub unsafe fn create_shader_module(
2627 &self,
2628 create_info: &vk::ShaderModuleCreateInfo,
2629 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2630 ) -> VkResult<vk::ShaderModule> {
2631 let mut shader = mem::zeroed();
2632 (self.device_fn_1_0.create_shader_module)(
2633 self.handle(),
2634 create_info,
2635 allocation_callbacks.as_raw_ptr(),
2636 &mut shader,
2637 )
2638 .result_with_success(shader)
2639 }
2640
2641 #[inline]
2643 pub unsafe fn create_fence(
2644 &self,
2645 create_info: &vk::FenceCreateInfo,
2646 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2647 ) -> VkResult<vk::Fence> {
2648 let mut fence = mem::zeroed();
2649 (self.device_fn_1_0.create_fence)(
2650 self.handle(),
2651 create_info,
2652 allocation_callbacks.as_raw_ptr(),
2653 &mut fence,
2654 )
2655 .result_with_success(fence)
2656 }
2657
2658 #[inline]
2660 pub unsafe fn bind_buffer_memory(
2661 &self,
2662 buffer: vk::Buffer,
2663 device_memory: vk::DeviceMemory,
2664 offset: vk::DeviceSize,
2665 ) -> VkResult<()> {
2666 (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
2667 .result()
2668 }
2669
2670 #[inline]
2672 pub unsafe fn bind_image_memory(
2673 &self,
2674 image: vk::Image,
2675 device_memory: vk::DeviceMemory,
2676 offset: vk::DeviceSize,
2677 ) -> VkResult<()> {
2678 (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result()
2679 }
2680
2681 #[inline]
2683 pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
2684 let mut granularity = mem::zeroed();
2685 (self.device_fn_1_0.get_render_area_granularity)(
2686 self.handle(),
2687 render_pass,
2688 &mut granularity,
2689 );
2690 granularity
2691 }
2692
2693 #[inline]
2695 pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
2696 let mut committed_memory_in_bytes = 0;
2697 (self.device_fn_1_0.get_device_memory_commitment)(
2698 self.handle(),
2699 memory,
2700 &mut committed_memory_in_bytes,
2701 );
2702 committed_memory_in_bytes
2703 }
2704
2705 #[inline]
2707 pub unsafe fn get_image_sparse_memory_requirements(
2708 &self,
2709 image: vk::Image,
2710 ) -> Vec<vk::SparseImageMemoryRequirements> {
2711 read_into_uninitialized_vector(|count, data| {
2712 (self.device_fn_1_0.get_image_sparse_memory_requirements)(
2713 self.handle(),
2714 image,
2715 count,
2716 data,
2717 );
2718 vk::Result::SUCCESS
2719 })
2720 .unwrap()
2722 }
2723}