1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::khr, vk};
5use parking_lot::Mutex;
6
7use std::{
8 borrow::Cow,
9 collections::{hash_map::Entry, BTreeMap},
10 ffi::{CStr, CString},
11 num::NonZeroU32,
12 ptr,
13 sync::Arc,
14};
15
16impl super::DeviceShared {
17 pub(super) unsafe fn set_object_name(
18 &self,
19 object_type: vk::ObjectType,
20 object: impl vk::Handle,
21 name: &str,
22 ) {
23 let extension = match self.instance.debug_utils {
24 Some(ref debug_utils) => &debug_utils.extension,
25 None => return,
26 };
27
28 let mut buffer: [u8; 64] = [0u8; 64];
31 let buffer_vec: Vec<u8>;
32
33 let name_bytes = if name.len() < buffer.len() {
35 buffer[..name.len()].copy_from_slice(name.as_bytes());
37 buffer[name.len()] = 0;
39 &buffer[..name.len() + 1]
40 } else {
41 buffer_vec = name
44 .as_bytes()
45 .iter()
46 .cloned()
47 .chain(std::iter::once(0))
48 .collect();
49 &buffer_vec
50 };
51
52 let name = unsafe { CStr::from_bytes_with_nul_unchecked(name_bytes) };
53
54 let _result = unsafe {
55 extension.set_debug_utils_object_name(
56 self.raw.handle(),
57 &vk::DebugUtilsObjectNameInfoEXT::builder()
58 .object_type(object_type)
59 .object_handle(object.as_raw())
60 .object_name(name),
61 )
62 };
63 }
64
65 pub fn make_render_pass(
66 &self,
67 key: super::RenderPassKey,
68 ) -> Result<vk::RenderPass, crate::DeviceError> {
69 Ok(match self.render_passes.lock().entry(key) {
70 Entry::Occupied(e) => *e.get(),
71 Entry::Vacant(e) => {
72 let mut vk_attachments = Vec::new();
73 let mut color_refs = Vec::with_capacity(e.key().colors.len());
74 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
75 let mut ds_ref = None;
76 let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
77 let unused = vk::AttachmentReference {
78 attachment: vk::ATTACHMENT_UNUSED,
79 layout: vk::ImageLayout::UNDEFINED,
80 };
81 for cat in e.key().colors.iter() {
82 let (color_ref, resolve_ref) = if let Some(cat) = cat.as_ref() {
83 let color_ref = vk::AttachmentReference {
84 attachment: vk_attachments.len() as u32,
85 layout: cat.base.layout,
86 };
87 vk_attachments.push({
88 let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
89 vk::AttachmentDescription::builder()
90 .format(cat.base.format)
91 .samples(samples)
92 .load_op(load_op)
93 .store_op(store_op)
94 .initial_layout(cat.base.layout)
95 .final_layout(cat.base.layout)
96 .build()
97 });
98 let resolve_ref = if let Some(ref rat) = cat.resolve {
99 let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
100 let vk_attachment = vk::AttachmentDescription::builder()
101 .format(rat.format)
102 .samples(vk::SampleCountFlags::TYPE_1)
103 .load_op(load_op)
104 .store_op(store_op)
105 .initial_layout(rat.layout)
106 .final_layout(rat.layout)
107 .build();
108 vk_attachments.push(vk_attachment);
109
110 vk::AttachmentReference {
111 attachment: vk_attachments.len() as u32 - 1,
112 layout: rat.layout,
113 }
114 } else {
115 unused
116 };
117
118 (color_ref, resolve_ref)
119 } else {
120 (unused, unused)
121 };
122
123 color_refs.push(color_ref);
124 resolve_refs.push(resolve_ref);
125 }
126
127 if let Some(ref ds) = e.key().depth_stencil {
128 ds_ref = Some(vk::AttachmentReference {
129 attachment: vk_attachments.len() as u32,
130 layout: ds.base.layout,
131 });
132 let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
133 let (stencil_load_op, stencil_store_op) =
134 conv::map_attachment_ops(ds.stencil_ops);
135 let vk_attachment = vk::AttachmentDescription::builder()
136 .format(ds.base.format)
137 .samples(samples)
138 .load_op(load_op)
139 .store_op(store_op)
140 .stencil_load_op(stencil_load_op)
141 .stencil_store_op(stencil_store_op)
142 .initial_layout(ds.base.layout)
143 .final_layout(ds.base.layout)
144 .build();
145 vk_attachments.push(vk_attachment);
146 }
147
148 let vk_subpasses = [{
149 let mut vk_subpass = vk::SubpassDescription::builder()
150 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
151 .color_attachments(&color_refs)
152 .resolve_attachments(&resolve_refs);
153
154 if self
155 .workarounds
156 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
157 && resolve_refs.is_empty()
158 {
159 vk_subpass.p_resolve_attachments = ptr::null();
160 }
161
162 if let Some(ref reference) = ds_ref {
163 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
164 }
165 vk_subpass.build()
166 }];
167
168 let mut vk_info = vk::RenderPassCreateInfo::builder()
169 .attachments(&vk_attachments)
170 .subpasses(&vk_subpasses);
171
172 let mut multiview_info;
173 let mask;
174 if let Some(multiview) = e.key().multiview {
175 assert!(multiview.get() <= 8);
177 assert!(multiview.get() > 1);
178
179 mask = [(1 << multiview.get()) - 1];
183
184 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::builder()
186 .view_masks(&mask)
187 .correlation_masks(&mask)
188 .build();
189 vk_info = vk_info.push_next(&mut multiview_info);
190 }
191
192 let raw = unsafe { self.raw.create_render_pass(&vk_info, None)? };
193
194 *e.insert(raw)
195 }
196 })
197 }
198
199 pub fn make_framebuffer(
200 &self,
201 key: super::FramebufferKey,
202 raw_pass: vk::RenderPass,
203 pass_label: crate::Label,
204 ) -> Result<vk::Framebuffer, crate::DeviceError> {
205 Ok(match self.framebuffers.lock().entry(key) {
206 Entry::Occupied(e) => *e.get(),
207 Entry::Vacant(e) => {
208 let vk_views = e
209 .key()
210 .attachments
211 .iter()
212 .map(|at| at.raw)
213 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
214 let vk_view_formats = e
215 .key()
216 .attachments
217 .iter()
218 .map(|at| self.private_caps.map_texture_format(at.view_format))
219 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
220 let vk_view_formats_list = e
221 .key()
222 .attachments
223 .iter()
224 .map(|at| at.raw_view_formats.clone())
225 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
226
227 let vk_image_infos = e
228 .key()
229 .attachments
230 .iter()
231 .enumerate()
232 .map(|(i, at)| {
233 let mut info = vk::FramebufferAttachmentImageInfo::builder()
234 .usage(conv::map_texture_usage(at.view_usage))
235 .flags(at.raw_image_flags)
236 .width(e.key().extent.width)
237 .height(e.key().extent.height)
238 .layer_count(e.key().extent.depth_or_array_layers);
239 if vk_view_formats_list[i].is_empty() {
241 info = info.view_formats(&vk_view_formats[i..i + 1]);
242 } else {
243 info = info.view_formats(&vk_view_formats_list[i]);
244 };
245 info.build()
246 })
247 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
248
249 let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::builder()
250 .attachment_image_infos(&vk_image_infos)
251 .build();
252 let mut vk_info = vk::FramebufferCreateInfo::builder()
253 .render_pass(raw_pass)
254 .width(e.key().extent.width)
255 .height(e.key().extent.height)
256 .layers(e.key().extent.depth_or_array_layers);
257
258 if self.private_caps.imageless_framebuffers {
259 vk_info = vk_info
261 .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
262 .push_next(&mut vk_attachment_info);
263 vk_info.attachment_count = e.key().attachments.len() as u32;
264 } else {
265 vk_info = vk_info.attachments(&vk_views);
266 }
267
268 *e.insert(unsafe {
269 let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
270 if let Some(label) = pass_label {
271 self.set_object_name(vk::ObjectType::FRAMEBUFFER, raw, label);
272 }
273 raw
274 })
275 }
276 })
277 }
278
279 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
280 &self,
281 buffer: &'a super::Buffer,
282 ranges: I,
283 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
284 let block = buffer.block.as_ref()?.lock();
285 let mask = self.private_caps.non_coherent_map_mask;
286 Some(ranges.map(move |range| {
287 vk::MappedMemoryRange::builder()
288 .memory(*block.memory())
289 .offset((block.offset() + range.start) & !mask)
290 .size((range.end - range.start + mask) & !mask)
291 .build()
292 }))
293 }
294
295 unsafe fn free_resources(&self) {
296 for &raw in self.render_passes.lock().values() {
297 unsafe { self.raw.destroy_render_pass(raw, None) };
298 }
299 for &raw in self.framebuffers.lock().values() {
300 unsafe { self.raw.destroy_framebuffer(raw, None) };
301 }
302 if self.handle_is_owned {
303 unsafe { self.raw.destroy_device(None) };
304 }
305 }
306}
307
308impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
309 unsafe fn allocate_memory(
310 &self,
311 size: u64,
312 memory_type: u32,
313 flags: gpu_alloc::AllocationFlags,
314 ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
315 let mut info = vk::MemoryAllocateInfo::builder()
316 .allocation_size(size)
317 .memory_type_index(memory_type);
318
319 let mut info_flags;
320
321 if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
322 info_flags = vk::MemoryAllocateFlagsInfo::builder()
323 .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
324 info = info.push_next(&mut info_flags);
325 }
326
327 match unsafe { self.raw.allocate_memory(&info, None) } {
328 Ok(memory) => Ok(memory),
329 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
330 Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
331 }
332 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
333 Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
334 }
335 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => panic!("Too many objects"),
336 Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
337 }
338 }
339
340 unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
341 unsafe { self.raw.free_memory(memory, None) };
342 }
343
344 unsafe fn map_memory(
345 &self,
346 memory: &mut vk::DeviceMemory,
347 offset: u64,
348 size: u64,
349 ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
350 match unsafe {
351 self.raw
352 .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
353 } {
354 Ok(ptr) => Ok(ptr::NonNull::new(ptr as *mut u8)
355 .expect("Pointer to memory mapping must not be null")),
356 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
357 Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
358 }
359 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
360 Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
361 }
362 Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
363 Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
364 }
365 }
366
367 unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
368 unsafe { self.raw.unmap_memory(*memory) };
369 }
370
371 unsafe fn invalidate_memory_ranges(
372 &self,
373 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
374 ) -> Result<(), gpu_alloc::OutOfMemory> {
375 unimplemented!()
377 }
378
379 unsafe fn flush_memory_ranges(
380 &self,
381 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
382 ) -> Result<(), gpu_alloc::OutOfMemory> {
383 unimplemented!()
385 }
386}
387
388impl
389 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
390 for super::DeviceShared
391{
392 unsafe fn create_descriptor_pool(
393 &self,
394 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
395 max_sets: u32,
396 flags: gpu_descriptor::DescriptorPoolCreateFlags,
397 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
398 let unfiltered_counts = [
400 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
401 (
402 vk::DescriptorType::SAMPLED_IMAGE,
403 descriptor_count.sampled_image,
404 ),
405 (
406 vk::DescriptorType::STORAGE_IMAGE,
407 descriptor_count.storage_image,
408 ),
409 (
410 vk::DescriptorType::UNIFORM_BUFFER,
411 descriptor_count.uniform_buffer,
412 ),
413 (
414 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
415 descriptor_count.uniform_buffer_dynamic,
416 ),
417 (
418 vk::DescriptorType::STORAGE_BUFFER,
419 descriptor_count.storage_buffer,
420 ),
421 (
422 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
423 descriptor_count.storage_buffer_dynamic,
424 ),
425 ];
426
427 let filtered_counts = unfiltered_counts
428 .iter()
429 .cloned()
430 .filter(|&(_, count)| count != 0)
431 .map(|(ty, count)| vk::DescriptorPoolSize {
432 ty,
433 descriptor_count: count,
434 })
435 .collect::<ArrayVec<_, 8>>();
436
437 let mut vk_flags =
438 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
439 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
440 } else {
441 vk::DescriptorPoolCreateFlags::empty()
442 };
443 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
444 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
445 }
446 let vk_info = vk::DescriptorPoolCreateInfo::builder()
447 .max_sets(max_sets)
448 .flags(vk_flags)
449 .pool_sizes(&filtered_counts)
450 .build();
451
452 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
453 Ok(pool) => Ok(pool),
454 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
455 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
456 }
457 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
458 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
459 }
460 Err(vk::Result::ERROR_FRAGMENTATION) => {
461 Err(gpu_descriptor::CreatePoolError::Fragmentation)
462 }
463 Err(other) => {
464 log::error!("create_descriptor_pool: {:?}", other);
465 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
466 }
467 }
468 }
469
470 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
471 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
472 }
473
474 unsafe fn alloc_descriptor_sets<'a>(
475 &self,
476 pool: &mut vk::DescriptorPool,
477 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
478 sets: &mut impl Extend<vk::DescriptorSet>,
479 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
480 let result = unsafe {
481 self.raw.allocate_descriptor_sets(
482 &vk::DescriptorSetAllocateInfo::builder()
483 .descriptor_pool(*pool)
484 .set_layouts(
485 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
486 layouts.cloned(),
487 ),
488 )
489 .build(),
490 )
491 };
492
493 match result {
494 Ok(vk_sets) => {
495 sets.extend(vk_sets);
496 Ok(())
497 }
498 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
499 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
500 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
501 }
502 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
503 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
504 }
505 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
506 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
507 }
508 Err(other) => {
509 log::error!("allocate_descriptor_sets: {:?}", other);
510 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
511 }
512 }
513 }
514
515 unsafe fn dealloc_descriptor_sets<'a>(
516 &self,
517 pool: &mut vk::DescriptorPool,
518 sets: impl Iterator<Item = vk::DescriptorSet>,
519 ) {
520 let result = unsafe {
521 self.raw.free_descriptor_sets(
522 *pool,
523 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
524 )
525 };
526 match result {
527 Ok(()) => {}
528 Err(err) => log::error!("free_descriptor_sets: {:?}", err),
529 }
530 }
531}
532
533struct CompiledStage {
534 create_info: vk::PipelineShaderStageCreateInfo,
535 _entry_point: CString,
536 temp_raw_module: Option<vk::ShaderModule>,
537}
538
539impl super::Device {
540 pub(super) unsafe fn create_swapchain(
541 &self,
542 surface: &mut super::Surface,
543 config: &crate::SurfaceConfiguration,
544 provided_old_swapchain: Option<super::Swapchain>,
545 ) -> Result<super::Swapchain, crate::SurfaceError> {
546 profiling::scope!("Device::create_swapchain");
547 let functor = khr::Swapchain::new(&surface.instance.raw, &self.shared.raw);
548
549 let old_swapchain = match provided_old_swapchain {
550 Some(osc) => osc.raw,
551 None => vk::SwapchainKHR::null(),
552 };
553
554 let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
555 vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
558 } else {
559 vk::ColorSpaceKHR::SRGB_NONLINEAR
560 };
561
562 let original_format = self.shared.private_caps.map_texture_format(config.format);
563 let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
564 let mut raw_view_formats: Vec<vk::Format> = vec![];
565 let mut wgt_view_formats = vec![];
566 if !config.view_formats.is_empty() {
567 raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
568 raw_view_formats = config
569 .view_formats
570 .iter()
571 .map(|f| self.shared.private_caps.map_texture_format(*f))
572 .collect();
573 raw_view_formats.push(original_format);
574
575 wgt_view_formats = config.view_formats.clone();
576 wgt_view_formats.push(config.format);
577 }
578
579 let mut info = vk::SwapchainCreateInfoKHR::builder()
580 .flags(raw_flags)
581 .surface(surface.raw)
582 .min_image_count(config.swap_chain_size)
583 .image_format(original_format)
584 .image_color_space(color_space)
585 .image_extent(vk::Extent2D {
586 width: config.extent.width,
587 height: config.extent.height,
588 })
589 .image_array_layers(config.extent.depth_or_array_layers)
590 .image_usage(conv::map_texture_usage(config.usage))
591 .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
592 .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
593 .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
594 .present_mode(conv::map_present_mode(config.present_mode))
595 .clipped(true)
596 .old_swapchain(old_swapchain);
597
598 let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
599 if !raw_view_formats.is_empty() {
600 format_list_info = format_list_info.view_formats(&raw_view_formats);
601 info = info.push_next(&mut format_list_info);
602 }
603
604 let result = {
605 profiling::scope!("vkCreateSwapchainKHR");
606 unsafe { functor.create_swapchain(&info, None) }
607 };
608
609 if old_swapchain != vk::SwapchainKHR::null() {
611 unsafe { functor.destroy_swapchain(old_swapchain, None) }
612 }
613
614 let raw = match result {
615 Ok(swapchain) => swapchain,
616 Err(error) => {
617 return Err(match error {
618 vk::Result::ERROR_SURFACE_LOST_KHR => crate::SurfaceError::Lost,
619 vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
620 crate::SurfaceError::Other("Native window is in use")
621 }
622 other => crate::DeviceError::from(other).into(),
623 })
624 }
625 };
626
627 let images =
628 unsafe { functor.get_swapchain_images(raw) }.map_err(crate::DeviceError::from)?;
629
630 let vk_info = vk::FenceCreateInfo::builder().build();
631 let fence = unsafe { self.shared.raw.create_fence(&vk_info, None) }
632 .map_err(crate::DeviceError::from)?;
633
634 Ok(super::Swapchain {
635 raw,
636 raw_flags,
637 functor,
638 device: Arc::clone(&self.shared),
639 fence,
640 images,
641 config: config.clone(),
642 view_formats: wgt_view_formats,
643 })
644 }
645
646 pub unsafe fn texture_from_raw(
653 vk_image: vk::Image,
654 desc: &crate::TextureDescriptor,
655 drop_guard: Option<crate::DropGuard>,
656 ) -> super::Texture {
657 let mut raw_flags = vk::ImageCreateFlags::empty();
658 let mut view_formats = vec![];
659 for tf in desc.view_formats.iter() {
660 if *tf == desc.format {
661 continue;
662 }
663 view_formats.push(*tf);
664 }
665 if !view_formats.is_empty() {
666 raw_flags |=
667 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
668 view_formats.push(desc.format)
669 }
670
671 super::Texture {
672 raw: vk_image,
673 drop_guard,
674 block: None,
675 usage: desc.usage,
676 format: desc.format,
677 raw_flags: vk::ImageCreateFlags::empty(),
678 copy_size: desc.copy_extent(),
679 view_formats,
680 }
681 }
682
683 pub unsafe fn buffer_from_raw(vk_buffer: vk::Buffer) -> super::Buffer {
688 super::Buffer {
689 raw: vk_buffer,
690 block: None,
691 }
692 }
693
694 fn create_shader_module_impl(
695 &self,
696 spv: &[u32],
697 ) -> Result<vk::ShaderModule, crate::DeviceError> {
698 let vk_info = vk::ShaderModuleCreateInfo::builder()
699 .flags(vk::ShaderModuleCreateFlags::empty())
700 .code(spv);
701
702 let raw = unsafe {
703 profiling::scope!("vkCreateShaderModule");
704 self.shared.raw.create_shader_module(&vk_info, None)?
705 };
706 Ok(raw)
707 }
708
709 fn compile_stage(
710 &self,
711 stage: &crate::ProgrammableStage<super::Api>,
712 naga_stage: naga::ShaderStage,
713 binding_map: &naga::back::spv::BindingMap,
714 ) -> Result<CompiledStage, crate::PipelineError> {
715 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
716 let vk_module = match *stage.module {
717 super::ShaderModule::Raw(raw) => raw,
718 super::ShaderModule::Intermediate {
719 ref naga_shader,
720 runtime_checks,
721 } => {
722 let pipeline_options = naga::back::spv::PipelineOptions {
723 entry_point: stage.entry_point.to_string(),
724 shader_stage: naga_stage,
725 };
726 let needs_temp_options = !runtime_checks || !binding_map.is_empty();
727 let mut temp_options;
728 let options = if needs_temp_options {
729 temp_options = self.naga_options.clone();
730 if !runtime_checks {
731 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
732 index: naga::proc::BoundsCheckPolicy::Unchecked,
733 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
734 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
735 image_store: naga::proc::BoundsCheckPolicy::Unchecked,
736 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
737 };
738 }
739 if !binding_map.is_empty() {
740 temp_options.binding_map = binding_map.clone();
741 }
742 &temp_options
743 } else {
744 &self.naga_options
745 };
746 let spv = {
747 profiling::scope!("naga::spv::write_vec");
748 naga::back::spv::write_vec(
749 &naga_shader.module,
750 &naga_shader.info,
751 options,
752 Some(&pipeline_options),
753 )
754 }
755 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
756 self.create_shader_module_impl(&spv)?
757 }
758 };
759
760 let entry_point = CString::new(stage.entry_point).unwrap();
761 let create_info = vk::PipelineShaderStageCreateInfo::builder()
762 .stage(conv::map_shader_stage(stage_flags))
763 .module(vk_module)
764 .name(&entry_point)
765 .build();
766
767 Ok(CompiledStage {
768 create_info,
769 _entry_point: entry_point,
770 temp_raw_module: match *stage.module {
771 super::ShaderModule::Raw(_) => None,
772 super::ShaderModule::Intermediate { .. } => Some(vk_module),
773 },
774 })
775 }
776
777 pub fn queue_family_index(&self) -> u32 {
783 self.shared.family_index
784 }
785
786 pub fn queue_index(&self) -> u32 {
787 self.shared.queue_index
788 }
789
790 pub fn raw_device(&self) -> &ash::Device {
791 &self.shared.raw
792 }
793
794 pub fn raw_physical_device(&self) -> ash::vk::PhysicalDevice {
795 self.shared.physical_device
796 }
797
798 pub fn raw_queue(&self) -> ash::vk::Queue {
799 self.shared.raw_queue
800 }
801
802 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
803 &self.shared.enabled_extensions
804 }
805
806 pub fn shared_instance(&self) -> &super::InstanceShared {
807 &self.shared.instance
808 }
809}
810
811impl crate::Device<super::Api> for super::Device {
812 unsafe fn exit(self, queue: super::Queue) {
813 unsafe { self.mem_allocator.into_inner().cleanup(&*self.shared) };
814 unsafe { self.desc_allocator.into_inner().cleanup(&*self.shared) };
815 for &sem in queue.relay_semaphores.iter() {
816 unsafe { self.shared.raw.destroy_semaphore(sem, None) };
817 }
818 unsafe { self.shared.free_resources() };
819 }
820
821 unsafe fn create_buffer(
822 &self,
823 desc: &crate::BufferDescriptor,
824 ) -> Result<super::Buffer, crate::DeviceError> {
825 let vk_info = vk::BufferCreateInfo::builder()
826 .size(desc.size)
827 .usage(conv::map_buffer_usage(desc.usage))
828 .sharing_mode(vk::SharingMode::EXCLUSIVE);
829
830 let raw = unsafe { self.shared.raw.create_buffer(&vk_info, None)? };
831 let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
832
833 let mut alloc_usage = if desc
834 .usage
835 .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
836 {
837 let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
838 flags.set(
840 gpu_alloc::UsageFlags::DOWNLOAD,
841 desc.usage.contains(crate::BufferUses::MAP_READ),
842 );
843 flags.set(
844 gpu_alloc::UsageFlags::UPLOAD,
845 desc.usage.contains(crate::BufferUses::MAP_WRITE),
846 );
847 flags
848 } else {
849 gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
850 };
851 alloc_usage.set(
852 gpu_alloc::UsageFlags::TRANSIENT,
853 desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
854 );
855
856 let block = unsafe {
857 self.mem_allocator.lock().alloc(
858 &*self.shared,
859 gpu_alloc::Request {
860 size: req.size,
861 align_mask: req.alignment - 1,
862 usage: alloc_usage,
863 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
864 },
865 )?
866 };
867
868 unsafe {
869 self.shared
870 .raw
871 .bind_buffer_memory(raw, *block.memory(), block.offset())?
872 };
873
874 if let Some(label) = desc.label {
875 unsafe {
876 self.shared
877 .set_object_name(vk::ObjectType::BUFFER, raw, label)
878 };
879 }
880
881 Ok(super::Buffer {
882 raw,
883 block: Some(Mutex::new(block)),
884 })
885 }
886 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
887 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
888 if let Some(block) = buffer.block {
889 unsafe {
890 self.mem_allocator
891 .lock()
892 .dealloc(&*self.shared, block.into_inner())
893 };
894 }
895 }
896
897 unsafe fn map_buffer(
898 &self,
899 buffer: &super::Buffer,
900 range: crate::MemoryRange,
901 ) -> Result<crate::BufferMapping, crate::DeviceError> {
902 if let Some(ref block) = buffer.block {
903 let size = range.end - range.start;
904 let mut block = block.lock();
905 let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
906 let is_coherent = block
907 .props()
908 .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
909 Ok(crate::BufferMapping { ptr, is_coherent })
910 } else {
911 Err(crate::DeviceError::OutOfMemory)
912 }
913 }
914 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) -> Result<(), crate::DeviceError> {
915 if let Some(ref block) = buffer.block {
916 unsafe { block.lock().unmap(&*self.shared) };
917 Ok(())
918 } else {
919 Err(crate::DeviceError::OutOfMemory)
920 }
921 }
922
923 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
924 where
925 I: Iterator<Item = crate::MemoryRange>,
926 {
927 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
928 unsafe {
929 self.shared
930 .raw
931 .flush_mapped_memory_ranges(
932 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
933 )
934 }
935 .unwrap();
936 }
937 }
938 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
939 where
940 I: Iterator<Item = crate::MemoryRange>,
941 {
942 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
943 unsafe {
944 self.shared
945 .raw
946 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
947 [vk::MappedMemoryRange; 32],
948 >::from_iter(vk_ranges))
949 }
950 .unwrap();
951 }
952 }
953
954 unsafe fn create_texture(
955 &self,
956 desc: &crate::TextureDescriptor,
957 ) -> Result<super::Texture, crate::DeviceError> {
958 let copy_size = desc.copy_extent();
959
960 let mut raw_flags = vk::ImageCreateFlags::empty();
961 if desc.is_cube_compatible() {
962 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
963 }
964
965 let original_format = self.shared.private_caps.map_texture_format(desc.format);
966 let mut vk_view_formats = vec![];
967 let mut wgt_view_formats = vec![];
968 if !desc.view_formats.is_empty() {
969 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
970 wgt_view_formats = desc.view_formats.clone();
971 wgt_view_formats.push(desc.format);
972
973 if self.shared_instance().driver_api_version >= vk::API_VERSION_1_2
974 || self
975 .enabled_device_extensions()
976 .contains(&vk::KhrImageFormatListFn::name())
977 {
978 vk_view_formats = desc
979 .view_formats
980 .iter()
981 .map(|f| self.shared.private_caps.map_texture_format(*f))
982 .collect();
983 vk_view_formats.push(original_format)
984 }
985 }
986
987 let mut vk_info = vk::ImageCreateInfo::builder()
988 .flags(raw_flags)
989 .image_type(conv::map_texture_dimension(desc.dimension))
990 .format(original_format)
991 .extent(conv::map_copy_extent(©_size))
992 .mip_levels(desc.mip_level_count)
993 .array_layers(desc.array_layer_count())
994 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
995 .tiling(vk::ImageTiling::OPTIMAL)
996 .usage(conv::map_texture_usage(desc.usage))
997 .sharing_mode(vk::SharingMode::EXCLUSIVE)
998 .initial_layout(vk::ImageLayout::UNDEFINED);
999
1000 let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
1001 if !vk_view_formats.is_empty() {
1002 format_list_info = format_list_info.view_formats(&vk_view_formats);
1003 vk_info = vk_info.push_next(&mut format_list_info);
1004 }
1005
1006 let raw = unsafe { self.shared.raw.create_image(&vk_info, None)? };
1007 let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
1008
1009 let block = unsafe {
1010 self.mem_allocator.lock().alloc(
1011 &*self.shared,
1012 gpu_alloc::Request {
1013 size: req.size,
1014 align_mask: req.alignment - 1,
1015 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1016 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1017 },
1018 )?
1019 };
1020
1021 unsafe {
1022 self.shared
1023 .raw
1024 .bind_image_memory(raw, *block.memory(), block.offset())?
1025 };
1026
1027 if let Some(label) = desc.label {
1028 unsafe {
1029 self.shared
1030 .set_object_name(vk::ObjectType::IMAGE, raw, label)
1031 };
1032 }
1033
1034 Ok(super::Texture {
1035 raw,
1036 drop_guard: None,
1037 block: Some(block),
1038 usage: desc.usage,
1039 format: desc.format,
1040 raw_flags,
1041 copy_size,
1042 view_formats: wgt_view_formats,
1043 })
1044 }
1045 unsafe fn destroy_texture(&self, texture: super::Texture) {
1046 if texture.drop_guard.is_none() {
1047 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1048 }
1049 if let Some(block) = texture.block {
1050 unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1051 }
1052 }
1053
1054 unsafe fn create_texture_view(
1055 &self,
1056 texture: &super::Texture,
1057 desc: &crate::TextureViewDescriptor,
1058 ) -> Result<super::TextureView, crate::DeviceError> {
1059 let subresource_range = conv::map_subresource_range(&desc.range, desc.format);
1060 let mut vk_info = vk::ImageViewCreateInfo::builder()
1061 .flags(vk::ImageViewCreateFlags::empty())
1062 .image(texture.raw)
1063 .view_type(conv::map_view_dimension(desc.dimension))
1064 .format(self.shared.private_caps.map_texture_format(desc.format))
1065 .subresource_range(subresource_range);
1066 let layers =
1067 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1068
1069 let mut image_view_info;
1070 let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1071 image_view_info = vk::ImageViewUsageCreateInfo::builder()
1072 .usage(conv::map_texture_usage(desc.usage))
1073 .build();
1074 vk_info = vk_info.push_next(&mut image_view_info);
1075 desc.usage
1076 } else {
1077 texture.usage
1078 };
1079
1080 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }?;
1081
1082 if let Some(label) = desc.label {
1083 unsafe {
1084 self.shared
1085 .set_object_name(vk::ObjectType::IMAGE_VIEW, raw, label)
1086 };
1087 }
1088
1089 let attachment = super::FramebufferAttachment {
1090 raw: if self.shared.private_caps.imageless_framebuffers {
1091 vk::ImageView::null()
1092 } else {
1093 raw
1094 },
1095 raw_image_flags: texture.raw_flags,
1096 view_usage,
1097 view_format: desc.format,
1098 raw_view_formats: texture
1099 .view_formats
1100 .iter()
1101 .map(|tf| self.shared.private_caps.map_texture_format(*tf))
1102 .collect(),
1103 };
1104
1105 Ok(super::TextureView {
1106 raw,
1107 layers,
1108 attachment,
1109 })
1110 }
1111 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1112 if !self.shared.private_caps.imageless_framebuffers {
1113 let mut fbuf_lock = self.shared.framebuffers.lock();
1114 for (key, &raw_fbuf) in fbuf_lock.iter() {
1115 if key.attachments.iter().any(|at| at.raw == view.raw) {
1116 unsafe { self.shared.raw.destroy_framebuffer(raw_fbuf, None) };
1117 }
1118 }
1119 fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
1120 }
1121 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1122 }
1123
1124 unsafe fn create_sampler(
1125 &self,
1126 desc: &crate::SamplerDescriptor,
1127 ) -> Result<super::Sampler, crate::DeviceError> {
1128 let mut vk_info = vk::SamplerCreateInfo::builder()
1129 .flags(vk::SamplerCreateFlags::empty())
1130 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1131 .min_filter(conv::map_filter_mode(desc.min_filter))
1132 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1133 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1134 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1135 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1136 .min_lod(desc.lod_clamp.start)
1137 .max_lod(desc.lod_clamp.end);
1138
1139 if let Some(fun) = desc.compare {
1140 vk_info = vk_info
1141 .compare_enable(true)
1142 .compare_op(conv::map_comparison(fun));
1143 }
1144
1145 if desc.anisotropy_clamp != 1 {
1146 vk_info = vk_info
1149 .anisotropy_enable(true)
1150 .max_anisotropy(desc.anisotropy_clamp as f32);
1151 }
1152
1153 if let Some(color) = desc.border_color {
1154 vk_info = vk_info.border_color(conv::map_border_color(color));
1155 }
1156
1157 let raw = unsafe { self.shared.raw.create_sampler(&vk_info, None)? };
1158
1159 if let Some(label) = desc.label {
1160 unsafe {
1161 self.shared
1162 .set_object_name(vk::ObjectType::SAMPLER, raw, label)
1163 };
1164 }
1165
1166 Ok(super::Sampler { raw })
1167 }
1168 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1169 unsafe { self.shared.raw.destroy_sampler(sampler.raw, None) };
1170 }
1171
1172 unsafe fn create_command_encoder(
1173 &self,
1174 desc: &crate::CommandEncoderDescriptor<super::Api>,
1175 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1176 let vk_info = vk::CommandPoolCreateInfo::builder()
1177 .queue_family_index(desc.queue.family_index)
1178 .flags(vk::CommandPoolCreateFlags::TRANSIENT)
1179 .build();
1180 let raw = unsafe { self.shared.raw.create_command_pool(&vk_info, None)? };
1181
1182 Ok(super::CommandEncoder {
1183 raw,
1184 device: Arc::clone(&self.shared),
1185 active: vk::CommandBuffer::null(),
1186 bind_point: vk::PipelineBindPoint::default(),
1187 temp: super::Temp::default(),
1188 free: Vec::new(),
1189 discarded: Vec::new(),
1190 rpass_debug_marker_active: false,
1191 })
1192 }
1193 unsafe fn destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder) {
1194 unsafe {
1195 if !cmd_encoder.free.is_empty() {
1196 self.shared
1197 .raw
1198 .free_command_buffers(cmd_encoder.raw, &cmd_encoder.free)
1199 }
1200 if !cmd_encoder.discarded.is_empty() {
1201 self.shared
1202 .raw
1203 .free_command_buffers(cmd_encoder.raw, &cmd_encoder.discarded)
1204 }
1205 self.shared.raw.destroy_command_pool(cmd_encoder.raw, None);
1206 }
1207 }
1208
1209 unsafe fn create_bind_group_layout(
1210 &self,
1211 desc: &crate::BindGroupLayoutDescriptor,
1212 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1213 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1214 let mut types = Vec::new();
1215 for entry in desc.entries {
1216 let count = entry.count.map_or(1, |c| c.get());
1217 if entry.binding as usize >= types.len() {
1218 types.resize(
1219 entry.binding as usize + 1,
1220 (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1221 );
1222 }
1223 types[entry.binding as usize] = (
1224 conv::map_binding_type(entry.ty),
1225 entry.count.map_or(1, |c| c.get()),
1226 );
1227
1228 match entry.ty {
1229 wgt::BindingType::Buffer {
1230 ty,
1231 has_dynamic_offset,
1232 ..
1233 } => match ty {
1234 wgt::BufferBindingType::Uniform => {
1235 if has_dynamic_offset {
1236 desc_count.uniform_buffer_dynamic += count;
1237 } else {
1238 desc_count.uniform_buffer += count;
1239 }
1240 }
1241 wgt::BufferBindingType::Storage { .. } => {
1242 if has_dynamic_offset {
1243 desc_count.storage_buffer_dynamic += count;
1244 } else {
1245 desc_count.storage_buffer += count;
1246 }
1247 }
1248 },
1249 wgt::BindingType::Sampler { .. } => {
1250 desc_count.sampler += count;
1251 }
1252 wgt::BindingType::Texture { .. } => {
1253 desc_count.sampled_image += count;
1254 }
1255 wgt::BindingType::StorageTexture { .. } => {
1256 desc_count.storage_image += count;
1257 }
1258 }
1259 }
1260
1261 let vk_bindings = desc
1263 .entries
1264 .iter()
1265 .map(|entry| vk::DescriptorSetLayoutBinding {
1266 binding: entry.binding,
1267 descriptor_type: types[entry.binding as usize].0,
1268 descriptor_count: types[entry.binding as usize].1,
1269 stage_flags: conv::map_shader_stage(entry.visibility),
1270 p_immutable_samplers: ptr::null(),
1271 })
1272 .collect::<Vec<_>>();
1273
1274 let vk_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&vk_bindings);
1275
1276 let binding_arrays = desc
1277 .entries
1278 .iter()
1279 .enumerate()
1280 .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1281 .collect();
1282
1283 let mut binding_flag_info;
1284 let binding_flag_vec;
1285
1286 let partially_bound = desc
1287 .flags
1288 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1289
1290 let vk_info = if partially_bound {
1291 binding_flag_vec = desc
1292 .entries
1293 .iter()
1294 .map(|entry| {
1295 let mut flags = vk::DescriptorBindingFlags::empty();
1296
1297 if partially_bound && entry.count.is_some() {
1298 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1299 }
1300
1301 flags
1302 })
1303 .collect::<Vec<_>>();
1304
1305 binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::builder()
1306 .binding_flags(&binding_flag_vec);
1307
1308 vk_info.push_next(&mut binding_flag_info)
1309 } else {
1310 vk_info
1311 };
1312
1313 let raw = unsafe {
1314 self.shared
1315 .raw
1316 .create_descriptor_set_layout(&vk_info, None)?
1317 };
1318
1319 if let Some(label) = desc.label {
1320 unsafe {
1321 self.shared
1322 .set_object_name(vk::ObjectType::DESCRIPTOR_SET_LAYOUT, raw, label)
1323 };
1324 }
1325
1326 Ok(super::BindGroupLayout {
1327 raw,
1328 desc_count,
1329 types: types.into_boxed_slice(),
1330 binding_arrays,
1331 })
1332 }
1333 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1334 unsafe {
1335 self.shared
1336 .raw
1337 .destroy_descriptor_set_layout(bg_layout.raw, None)
1338 };
1339 }
1340
1341 unsafe fn create_pipeline_layout(
1342 &self,
1343 desc: &crate::PipelineLayoutDescriptor<super::Api>,
1344 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1345 let vk_set_layouts = desc
1347 .bind_group_layouts
1348 .iter()
1349 .map(|bgl| bgl.raw)
1350 .collect::<Vec<_>>();
1351 let vk_push_constant_ranges = desc
1352 .push_constant_ranges
1353 .iter()
1354 .map(|pcr| vk::PushConstantRange {
1355 stage_flags: conv::map_shader_stage(pcr.stages),
1356 offset: pcr.range.start,
1357 size: pcr.range.end - pcr.range.start,
1358 })
1359 .collect::<Vec<_>>();
1360
1361 let vk_info = vk::PipelineLayoutCreateInfo::builder()
1362 .flags(vk::PipelineLayoutCreateFlags::empty())
1363 .set_layouts(&vk_set_layouts)
1364 .push_constant_ranges(&vk_push_constant_ranges);
1365
1366 let raw = {
1367 profiling::scope!("vkCreatePipelineLayout");
1368 unsafe { self.shared.raw.create_pipeline_layout(&vk_info, None)? }
1369 };
1370
1371 if let Some(label) = desc.label {
1372 unsafe {
1373 self.shared
1374 .set_object_name(vk::ObjectType::PIPELINE_LAYOUT, raw, label)
1375 };
1376 }
1377
1378 let mut binding_arrays = BTreeMap::new();
1379 for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1380 for &(binding, binding_array_size) in &layout.binding_arrays {
1381 binding_arrays.insert(
1382 naga::ResourceBinding {
1383 group: group as u32,
1384 binding,
1385 },
1386 naga::back::spv::BindingInfo {
1387 binding_array_size: Some(binding_array_size.get()),
1388 },
1389 );
1390 }
1391 }
1392
1393 Ok(super::PipelineLayout {
1394 raw,
1395 binding_arrays,
1396 })
1397 }
1398 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1399 unsafe {
1400 self.shared
1401 .raw
1402 .destroy_pipeline_layout(pipeline_layout.raw, None)
1403 };
1404 }
1405
1406 unsafe fn create_bind_group(
1407 &self,
1408 desc: &crate::BindGroupDescriptor<super::Api>,
1409 ) -> Result<super::BindGroup, crate::DeviceError> {
1410 let mut vk_sets = unsafe {
1411 self.desc_allocator.lock().allocate(
1412 &*self.shared,
1413 &desc.layout.raw,
1414 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty(),
1415 &desc.layout.desc_count,
1416 1,
1417 )?
1418 };
1419
1420 let set = vk_sets.pop().unwrap();
1421 if let Some(label) = desc.label {
1422 unsafe {
1423 self.shared
1424 .set_object_name(vk::ObjectType::DESCRIPTOR_SET, *set.raw(), label)
1425 };
1426 }
1427
1428 let mut writes = Vec::with_capacity(desc.entries.len());
1429 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1430 let mut sampler_infos = Vec::with_capacity(desc.samplers.len());
1431 let mut image_infos = Vec::with_capacity(desc.textures.len());
1432 for entry in desc.entries {
1433 let (ty, size) = desc.layout.types[entry.binding as usize];
1434 if size == 0 {
1435 continue; }
1437 let mut write = vk::WriteDescriptorSet::builder()
1438 .dst_set(*set.raw())
1439 .dst_binding(entry.binding)
1440 .descriptor_type(ty);
1441 write = match ty {
1442 vk::DescriptorType::SAMPLER => {
1443 let index = sampler_infos.len();
1444 let start = entry.resource_index;
1445 let end = start + entry.count;
1446 sampler_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1447 |binding| {
1448 vk::DescriptorImageInfo::builder()
1449 .sampler(binding.raw)
1450 .build()
1451 },
1452 ));
1453 write.image_info(&sampler_infos[index..])
1454 }
1455 vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1456 let index = image_infos.len();
1457 let start = entry.resource_index;
1458 let end = start + entry.count;
1459 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1460 |binding| {
1461 let layout = conv::derive_image_layout(
1462 binding.usage,
1463 binding.view.attachment.view_format,
1464 );
1465 vk::DescriptorImageInfo::builder()
1466 .image_view(binding.view.raw)
1467 .image_layout(layout)
1468 .build()
1469 },
1470 ));
1471 write.image_info(&image_infos[index..])
1472 }
1473 vk::DescriptorType::UNIFORM_BUFFER
1474 | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1475 | vk::DescriptorType::STORAGE_BUFFER
1476 | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1477 let index = buffer_infos.len();
1478 let start = entry.resource_index;
1479 let end = start + entry.count;
1480 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1481 |binding| {
1482 vk::DescriptorBufferInfo::builder()
1483 .buffer(binding.buffer.raw)
1484 .offset(binding.offset)
1485 .range(binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get))
1486 .build()
1487 },
1488 ));
1489 write.buffer_info(&buffer_infos[index..])
1490 }
1491 _ => unreachable!(),
1492 };
1493 writes.push(write.build());
1494 }
1495
1496 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1497 Ok(super::BindGroup { set })
1498 }
1499 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1500 unsafe {
1501 self.desc_allocator
1502 .lock()
1503 .free(&*self.shared, Some(group.set))
1504 };
1505 }
1506
1507 unsafe fn create_shader_module(
1508 &self,
1509 desc: &crate::ShaderModuleDescriptor,
1510 shader: crate::ShaderInput,
1511 ) -> Result<super::ShaderModule, crate::ShaderError> {
1512 let spv = match shader {
1513 crate::ShaderInput::Naga(naga_shader) => {
1514 if self
1515 .shared
1516 .workarounds
1517 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1518 {
1519 return Ok(super::ShaderModule::Intermediate {
1520 naga_shader,
1521 runtime_checks: desc.runtime_checks,
1522 });
1523 }
1524 let mut naga_options = self.naga_options.clone();
1525 if !desc.runtime_checks {
1526 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1527 index: naga::proc::BoundsCheckPolicy::Unchecked,
1528 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1529 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1530 image_store: naga::proc::BoundsCheckPolicy::Unchecked,
1531 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1532 };
1533 }
1534 Cow::Owned(
1535 naga::back::spv::write_vec(
1536 &naga_shader.module,
1537 &naga_shader.info,
1538 &naga_options,
1539 None,
1540 )
1541 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1542 )
1543 }
1544 crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1545 };
1546
1547 let raw = self.create_shader_module_impl(&spv)?;
1548
1549 if let Some(label) = desc.label {
1550 unsafe {
1551 self.shared
1552 .set_object_name(vk::ObjectType::SHADER_MODULE, raw, label)
1553 };
1554 }
1555
1556 Ok(super::ShaderModule::Raw(raw))
1557 }
1558 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1559 match module {
1560 super::ShaderModule::Raw(raw) => {
1561 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1562 }
1563 super::ShaderModule::Intermediate { .. } => {}
1564 }
1565 }
1566
1567 unsafe fn create_render_pipeline(
1568 &self,
1569 desc: &crate::RenderPipelineDescriptor<super::Api>,
1570 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1571 let dynamic_states = [
1572 vk::DynamicState::VIEWPORT,
1573 vk::DynamicState::SCISSOR,
1574 vk::DynamicState::BLEND_CONSTANTS,
1575 vk::DynamicState::STENCIL_REFERENCE,
1576 ];
1577 let mut compatible_rp_key = super::RenderPassKey {
1578 sample_count: desc.multisample.count,
1579 multiview: desc.multiview,
1580 ..Default::default()
1581 };
1582 let mut stages = ArrayVec::<_, 2>::new();
1583 let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1584 let mut vertex_attributes = Vec::new();
1585
1586 for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1587 vertex_buffers.push(vk::VertexInputBindingDescription {
1588 binding: i as u32,
1589 stride: vb.array_stride as u32,
1590 input_rate: match vb.step_mode {
1591 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1592 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1593 },
1594 });
1595 for at in vb.attributes {
1596 vertex_attributes.push(vk::VertexInputAttributeDescription {
1597 location: at.shader_location,
1598 binding: i as u32,
1599 format: conv::map_vertex_format(at.format),
1600 offset: at.offset as u32,
1601 });
1602 }
1603 }
1604
1605 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::builder()
1606 .vertex_binding_descriptions(&vertex_buffers)
1607 .vertex_attribute_descriptions(&vertex_attributes)
1608 .build();
1609
1610 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder()
1611 .topology(conv::map_topology(desc.primitive.topology))
1612 .primitive_restart_enable(desc.primitive.strip_index_format.is_some())
1613 .build();
1614
1615 let compiled_vs = self.compile_stage(
1616 &desc.vertex_stage,
1617 naga::ShaderStage::Vertex,
1618 &desc.layout.binding_arrays,
1619 )?;
1620 stages.push(compiled_vs.create_info);
1621 let compiled_fs = match desc.fragment_stage {
1622 Some(ref stage) => {
1623 let compiled = self.compile_stage(
1624 stage,
1625 naga::ShaderStage::Fragment,
1626 &desc.layout.binding_arrays,
1627 )?;
1628 stages.push(compiled.create_info);
1629 Some(compiled)
1630 }
1631 None => None,
1632 };
1633
1634 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::builder()
1635 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1636 .front_face(conv::map_front_face(desc.primitive.front_face))
1637 .line_width(1.0)
1638 .depth_clamp_enable(desc.primitive.unclipped_depth);
1639 if let Some(face) = desc.primitive.cull_mode {
1640 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1641 }
1642 let mut vk_rasterization_conservative_state =
1643 vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
1644 .conservative_rasterization_mode(vk::ConservativeRasterizationModeEXT::OVERESTIMATE)
1645 .build();
1646 if desc.primitive.conservative {
1647 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1648 }
1649
1650 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder();
1651 if let Some(ref ds) = desc.depth_stencil {
1652 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1653 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1654 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1655 } else {
1656 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1657 };
1658 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1659 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1660 stencil_ops: crate::AttachmentOps::all(),
1661 });
1662
1663 if ds.is_depth_enabled() {
1664 vk_depth_stencil = vk_depth_stencil
1665 .depth_test_enable(true)
1666 .depth_write_enable(ds.depth_write_enabled)
1667 .depth_compare_op(conv::map_comparison(ds.depth_compare));
1668 }
1669 if ds.stencil.is_enabled() {
1670 let s = &ds.stencil;
1671 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1672 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1673 vk_depth_stencil = vk_depth_stencil
1674 .stencil_test_enable(true)
1675 .front(front)
1676 .back(back);
1677 }
1678
1679 if ds.bias.is_enabled() {
1680 vk_rasterization = vk_rasterization
1681 .depth_bias_enable(true)
1682 .depth_bias_constant_factor(ds.bias.constant as f32)
1683 .depth_bias_clamp(ds.bias.clamp)
1684 .depth_bias_slope_factor(ds.bias.slope_scale);
1685 }
1686 }
1687
1688 let vk_viewport = vk::PipelineViewportStateCreateInfo::builder()
1689 .flags(vk::PipelineViewportStateCreateFlags::empty())
1690 .scissor_count(1)
1691 .viewport_count(1)
1692 .build();
1693
1694 let vk_sample_mask = [
1695 desc.multisample.mask as u32,
1696 (desc.multisample.mask >> 32) as u32,
1697 ];
1698 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::builder()
1699 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1700 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1701 .sample_mask(&vk_sample_mask)
1702 .build();
1703
1704 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1705 for cat in desc.color_targets {
1706 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1707 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::builder()
1708 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1709 if let Some(ref blend) = cat.blend {
1710 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1711 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1712 vk_attachment = vk_attachment
1713 .blend_enable(true)
1714 .color_blend_op(color_op)
1715 .src_color_blend_factor(color_src)
1716 .dst_color_blend_factor(color_dst)
1717 .alpha_blend_op(alpha_op)
1718 .src_alpha_blend_factor(alpha_src)
1719 .dst_alpha_blend_factor(alpha_dst);
1720 }
1721
1722 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1723 (
1724 Some(super::ColorAttachmentKey {
1725 base: super::AttachmentKey::compatible(
1726 vk_format,
1727 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1728 ),
1729 resolve: None,
1730 }),
1731 vk_attachment.build(),
1732 )
1733 } else {
1734 (None, vk::PipelineColorBlendAttachmentState::default())
1735 };
1736
1737 compatible_rp_key.colors.push(key);
1738 vk_attachments.push(attarchment);
1739 }
1740
1741 let vk_color_blend = vk::PipelineColorBlendStateCreateInfo::builder()
1742 .attachments(&vk_attachments)
1743 .build();
1744
1745 let vk_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
1746 .dynamic_states(&dynamic_states)
1747 .build();
1748
1749 let raw_pass = self
1750 .shared
1751 .make_render_pass(compatible_rp_key)
1752 .map_err(crate::DeviceError::from)?;
1753
1754 let vk_infos = [{
1755 vk::GraphicsPipelineCreateInfo::builder()
1756 .layout(desc.layout.raw)
1757 .stages(&stages)
1758 .vertex_input_state(&vk_vertex_input)
1759 .input_assembly_state(&vk_input_assembly)
1760 .rasterization_state(&vk_rasterization)
1761 .viewport_state(&vk_viewport)
1762 .multisample_state(&vk_multisample)
1763 .depth_stencil_state(&vk_depth_stencil)
1764 .color_blend_state(&vk_color_blend)
1765 .dynamic_state(&vk_dynamic_state)
1766 .render_pass(raw_pass)
1767 .build()
1768 }];
1769
1770 let mut raw_vec = {
1771 profiling::scope!("vkCreateGraphicsPipelines");
1772 unsafe {
1773 self.shared
1774 .raw
1775 .create_graphics_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1776 .map_err(|(_, e)| crate::DeviceError::from(e))
1777 }?
1778 };
1779
1780 let raw = raw_vec.pop().unwrap();
1781 if let Some(label) = desc.label {
1782 unsafe {
1783 self.shared
1784 .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1785 };
1786 }
1787
1788 if let Some(raw_module) = compiled_vs.temp_raw_module {
1789 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1790 }
1791 if let Some(CompiledStage {
1792 temp_raw_module: Some(raw_module),
1793 ..
1794 }) = compiled_fs
1795 {
1796 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1797 }
1798
1799 Ok(super::RenderPipeline { raw })
1800 }
1801 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
1802 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1803 }
1804
1805 unsafe fn create_compute_pipeline(
1806 &self,
1807 desc: &crate::ComputePipelineDescriptor<super::Api>,
1808 ) -> Result<super::ComputePipeline, crate::PipelineError> {
1809 let compiled = self.compile_stage(
1810 &desc.stage,
1811 naga::ShaderStage::Compute,
1812 &desc.layout.binding_arrays,
1813 )?;
1814
1815 let vk_infos = [{
1816 vk::ComputePipelineCreateInfo::builder()
1817 .layout(desc.layout.raw)
1818 .stage(compiled.create_info)
1819 .build()
1820 }];
1821
1822 let mut raw_vec = {
1823 profiling::scope!("vkCreateComputePipelines");
1824 unsafe {
1825 self.shared
1826 .raw
1827 .create_compute_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1828 .map_err(|(_, e)| crate::DeviceError::from(e))
1829 }?
1830 };
1831
1832 let raw = raw_vec.pop().unwrap();
1833 if let Some(label) = desc.label {
1834 unsafe {
1835 self.shared
1836 .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1837 };
1838 }
1839
1840 if let Some(raw_module) = compiled.temp_raw_module {
1841 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1842 }
1843
1844 Ok(super::ComputePipeline { raw })
1845 }
1846 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
1847 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1848 }
1849
1850 unsafe fn create_query_set(
1851 &self,
1852 desc: &wgt::QuerySetDescriptor<crate::Label>,
1853 ) -> Result<super::QuerySet, crate::DeviceError> {
1854 let (vk_type, pipeline_statistics) = match desc.ty {
1855 wgt::QueryType::Occlusion => (
1856 vk::QueryType::OCCLUSION,
1857 vk::QueryPipelineStatisticFlags::empty(),
1858 ),
1859 wgt::QueryType::PipelineStatistics(statistics) => (
1860 vk::QueryType::PIPELINE_STATISTICS,
1861 conv::map_pipeline_statistics(statistics),
1862 ),
1863 wgt::QueryType::Timestamp => (
1864 vk::QueryType::TIMESTAMP,
1865 vk::QueryPipelineStatisticFlags::empty(),
1866 ),
1867 };
1868
1869 let vk_info = vk::QueryPoolCreateInfo::builder()
1870 .query_type(vk_type)
1871 .query_count(desc.count)
1872 .pipeline_statistics(pipeline_statistics)
1873 .build();
1874
1875 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }?;
1876 if let Some(label) = desc.label {
1877 unsafe {
1878 self.shared
1879 .set_object_name(vk::ObjectType::QUERY_POOL, raw, label)
1880 };
1881 }
1882
1883 Ok(super::QuerySet { raw })
1884 }
1885 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
1886 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
1887 }
1888
1889 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
1890 Ok(if self.shared.private_caps.timeline_semaphores {
1891 let mut sem_type_info =
1892 vk::SemaphoreTypeCreateInfo::builder().semaphore_type(vk::SemaphoreType::TIMELINE);
1893 let vk_info = vk::SemaphoreCreateInfo::builder().push_next(&mut sem_type_info);
1894 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }?;
1895 super::Fence::TimelineSemaphore(raw)
1896 } else {
1897 super::Fence::FencePool {
1898 last_completed: 0,
1899 active: Vec::new(),
1900 free: Vec::new(),
1901 }
1902 })
1903 }
1904 unsafe fn destroy_fence(&self, fence: super::Fence) {
1905 match fence {
1906 super::Fence::TimelineSemaphore(raw) => {
1907 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
1908 }
1909 super::Fence::FencePool {
1910 active,
1911 free,
1912 last_completed: _,
1913 } => {
1914 for (_, raw) in active {
1915 unsafe { self.shared.raw.destroy_fence(raw, None) };
1916 }
1917 for raw in free {
1918 unsafe { self.shared.raw.destroy_fence(raw, None) };
1919 }
1920 }
1921 }
1922 }
1923 unsafe fn get_fence_value(
1924 &self,
1925 fence: &super::Fence,
1926 ) -> Result<crate::FenceValue, crate::DeviceError> {
1927 fence.get_latest(
1928 &self.shared.raw,
1929 self.shared.extension_fns.timeline_semaphore.as_ref(),
1930 )
1931 }
1932 unsafe fn wait(
1933 &self,
1934 fence: &super::Fence,
1935 wait_value: crate::FenceValue,
1936 timeout_ms: u32,
1937 ) -> Result<bool, crate::DeviceError> {
1938 let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
1939 match *fence {
1940 super::Fence::TimelineSemaphore(raw) => {
1941 let semaphores = [raw];
1942 let values = [wait_value];
1943 let vk_info = vk::SemaphoreWaitInfo::builder()
1944 .semaphores(&semaphores)
1945 .values(&values);
1946 let result = match self.shared.extension_fns.timeline_semaphore {
1947 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
1948 ext.wait_semaphores(&vk_info, timeout_ns)
1949 },
1950 Some(super::ExtensionFn::Promoted) => unsafe {
1951 self.shared.raw.wait_semaphores(&vk_info, timeout_ns)
1952 },
1953 None => unreachable!(),
1954 };
1955 match result {
1956 Ok(()) => Ok(true),
1957 Err(vk::Result::TIMEOUT) => Ok(false),
1958 Err(other) => Err(other.into()),
1959 }
1960 }
1961 super::Fence::FencePool {
1962 last_completed,
1963 ref active,
1964 free: _,
1965 } => {
1966 if wait_value <= last_completed {
1967 Ok(true)
1968 } else {
1969 match active.iter().find(|&&(value, _)| value >= wait_value) {
1970 Some(&(_, raw)) => {
1971 match unsafe {
1972 self.shared.raw.wait_for_fences(&[raw], true, timeout_ns)
1973 } {
1974 Ok(()) => Ok(true),
1975 Err(vk::Result::TIMEOUT) => Ok(false),
1976 Err(other) => Err(other.into()),
1977 }
1978 }
1979 None => {
1980 log::error!("No signals reached value {}", wait_value);
1981 Err(crate::DeviceError::Lost)
1982 }
1983 }
1984 }
1985 }
1986 }
1987 }
1988
1989 unsafe fn start_capture(&self) -> bool {
1990 #[cfg(feature = "renderdoc")]
1991 {
1992 let raw_vk_instance =
1994 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
1995 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
1996 unsafe {
1997 self.render_doc
1998 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
1999 }
2000 }
2001 #[cfg(not(feature = "renderdoc"))]
2002 false
2003 }
2004 unsafe fn stop_capture(&self) {
2005 #[cfg(feature = "renderdoc")]
2006 {
2007 let raw_vk_instance =
2009 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2010 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2011
2012 unsafe {
2013 self.render_doc
2014 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2015 }
2016 }
2017 }
2018}
2019
2020impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2021 fn from(error: gpu_alloc::AllocationError) -> Self {
2022 use gpu_alloc::AllocationError as Ae;
2023 match error {
2024 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory => Self::OutOfMemory,
2025 _ => {
2026 log::error!("memory allocation: {:?}", error);
2027 Self::Lost
2028 }
2029 }
2030 }
2031}
2032impl From<gpu_alloc::MapError> for crate::DeviceError {
2033 fn from(error: gpu_alloc::MapError) -> Self {
2034 use gpu_alloc::MapError as Me;
2035 match error {
2036 Me::OutOfDeviceMemory | Me::OutOfHostMemory => Self::OutOfMemory,
2037 _ => {
2038 log::error!("memory mapping: {:?}", error);
2039 Self::Lost
2040 }
2041 }
2042 }
2043}
2044impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2045 fn from(error: gpu_descriptor::AllocationError) -> Self {
2046 log::error!("descriptor allocation: {:?}", error);
2047 Self::OutOfMemory
2048 }
2049}