wgpu_core/device/
global.rs

1#[cfg(feature = "trace")]
2use crate::device::trace;
3use crate::{
4    binding_model, command, conv,
5    device::{life::WaitIdleError, map_buffer, queue, Device, DeviceError, HostMap},
6    global::Global,
7    hal_api::HalApi,
8    hub::Token,
9    id::{self, AdapterId, DeviceId, SurfaceId},
10    identity::{GlobalIdentityHandlerFactory, Input},
11    init_tracker::TextureInitTracker,
12    instance::{self, Adapter, Surface},
13    pipeline, present,
14    resource::{self, Buffer, BufferAccessResult, BufferMapState},
15    resource::{BufferAccessError, BufferMapOperation, TextureClearMode},
16    storage::InvalidId,
17    validation::check_buffer_usage,
18    FastHashMap, Label, LabelHelpers as _, Stored,
19};
20
21use hal::{CommandEncoder as _, Device as _};
22use smallvec::SmallVec;
23
24use wgt::{BufferAddress, TextureFormat};
25
26use std::{borrow::Cow, iter, mem, ops::Range, ptr};
27
28use super::{BufferMapPendingClosure, ImplicitPipelineIds, InvalidDevice, UserClosures};
29
30impl<G: GlobalIdentityHandlerFactory> Global<G> {
31    pub fn adapter_is_surface_supported<A: HalApi>(
32        &self,
33        adapter_id: AdapterId,
34        surface_id: SurfaceId,
35    ) -> Result<bool, instance::IsSurfaceSupportedError> {
36        let hub = A::hub(self);
37        let mut token = Token::root();
38
39        let (surface_guard, mut token) = self.surfaces.read(&mut token);
40        let (adapter_guard, mut _token) = hub.adapters.read(&mut token);
41        let adapter = adapter_guard
42            .get(adapter_id)
43            .map_err(|_| instance::IsSurfaceSupportedError::InvalidAdapter)?;
44        let surface = surface_guard
45            .get(surface_id)
46            .map_err(|_| instance::IsSurfaceSupportedError::InvalidSurface)?;
47        Ok(adapter.is_surface_supported(surface))
48    }
49
50    pub fn surface_get_capabilities<A: HalApi>(
51        &self,
52        surface_id: SurfaceId,
53        adapter_id: AdapterId,
54    ) -> Result<wgt::SurfaceCapabilities, instance::GetSurfaceSupportError> {
55        profiling::scope!("Surface::get_capabilities");
56        self.fetch_adapter_and_surface::<A, _, _>(surface_id, adapter_id, |adapter, surface| {
57            let mut hal_caps = surface.get_capabilities(adapter)?;
58
59            hal_caps.formats.sort_by_key(|f| !f.is_srgb());
60
61            let usages = conv::map_texture_usage_from_hal(hal_caps.usage);
62
63            Ok(wgt::SurfaceCapabilities {
64                formats: hal_caps.formats,
65                present_modes: hal_caps.present_modes,
66                alpha_modes: hal_caps.composite_alpha_modes,
67                usages,
68            })
69        })
70    }
71
72    fn fetch_adapter_and_surface<
73        A: HalApi,
74        F: FnOnce(&Adapter<A>, &Surface) -> Result<B, instance::GetSurfaceSupportError>,
75        B,
76    >(
77        &self,
78        surface_id: SurfaceId,
79        adapter_id: AdapterId,
80        get_supported_callback: F,
81    ) -> Result<B, instance::GetSurfaceSupportError> {
82        let hub = A::hub(self);
83        let mut token = Token::root();
84
85        let (surface_guard, mut token) = self.surfaces.read(&mut token);
86        let (adapter_guard, mut _token) = hub.adapters.read(&mut token);
87        let adapter = adapter_guard
88            .get(adapter_id)
89            .map_err(|_| instance::GetSurfaceSupportError::InvalidAdapter)?;
90        let surface = surface_guard
91            .get(surface_id)
92            .map_err(|_| instance::GetSurfaceSupportError::InvalidSurface)?;
93
94        get_supported_callback(adapter, surface)
95    }
96
97    pub fn device_features<A: HalApi>(
98        &self,
99        device_id: DeviceId,
100    ) -> Result<wgt::Features, InvalidDevice> {
101        let hub = A::hub(self);
102        let mut token = Token::root();
103        let (device_guard, _) = hub.devices.read(&mut token);
104        let device = device_guard.get(device_id).map_err(|_| InvalidDevice)?;
105
106        Ok(device.features)
107    }
108
109    pub fn device_limits<A: HalApi>(
110        &self,
111        device_id: DeviceId,
112    ) -> Result<wgt::Limits, InvalidDevice> {
113        let hub = A::hub(self);
114        let mut token = Token::root();
115        let (device_guard, _) = hub.devices.read(&mut token);
116        let device = device_guard.get(device_id).map_err(|_| InvalidDevice)?;
117
118        Ok(device.limits.clone())
119    }
120
121    pub fn device_downlevel_properties<A: HalApi>(
122        &self,
123        device_id: DeviceId,
124    ) -> Result<wgt::DownlevelCapabilities, InvalidDevice> {
125        let hub = A::hub(self);
126        let mut token = Token::root();
127        let (device_guard, _) = hub.devices.read(&mut token);
128        let device = device_guard.get(device_id).map_err(|_| InvalidDevice)?;
129
130        Ok(device.downlevel.clone())
131    }
132
133    pub fn device_create_buffer<A: HalApi>(
134        &self,
135        device_id: DeviceId,
136        desc: &resource::BufferDescriptor,
137        id_in: Input<G, id::BufferId>,
138    ) -> (id::BufferId, Option<resource::CreateBufferError>) {
139        profiling::scope!("Device::create_buffer");
140
141        let hub = A::hub(self);
142        let mut token = Token::root();
143        let fid = hub.buffers.prepare(id_in);
144
145        let (device_guard, mut token) = hub.devices.read(&mut token);
146        let error = loop {
147            let device = match device_guard.get(device_id) {
148                Ok(device) => device,
149                Err(_) => break DeviceError::Invalid.into(),
150            };
151
152            if desc.usage.is_empty() {
153                // Per spec, `usage` must not be zero.
154                break resource::CreateBufferError::InvalidUsage(desc.usage);
155            }
156
157            #[cfg(feature = "trace")]
158            if let Some(ref trace) = device.trace {
159                let mut desc = desc.clone();
160                let mapped_at_creation = mem::replace(&mut desc.mapped_at_creation, false);
161                if mapped_at_creation && !desc.usage.contains(wgt::BufferUsages::MAP_WRITE) {
162                    desc.usage |= wgt::BufferUsages::COPY_DST;
163                }
164                trace
165                    .lock()
166                    .add(trace::Action::CreateBuffer(fid.id(), desc));
167            }
168
169            let mut buffer = match device.create_buffer(device_id, desc, false) {
170                Ok(buffer) => buffer,
171                Err(e) => break e,
172            };
173            let ref_count = buffer.life_guard.add_ref();
174
175            let buffer_use = if !desc.mapped_at_creation {
176                hal::BufferUses::empty()
177            } else if desc.usage.contains(wgt::BufferUsages::MAP_WRITE) {
178                // buffer is mappable, so we are just doing that at start
179                let map_size = buffer.size;
180                let ptr = if map_size == 0 {
181                    std::ptr::NonNull::dangling()
182                } else {
183                    match map_buffer(&device.raw, &mut buffer, 0, map_size, HostMap::Write) {
184                        Ok(ptr) => ptr,
185                        Err(e) => {
186                            let raw = buffer.raw.unwrap();
187                            device.lock_life(&mut token).schedule_resource_destruction(
188                                queue::TempResource::Buffer(raw),
189                                !0,
190                            );
191                            break e.into();
192                        }
193                    }
194                };
195                buffer.map_state = resource::BufferMapState::Active {
196                    ptr,
197                    range: 0..map_size,
198                    host: HostMap::Write,
199                };
200                hal::BufferUses::MAP_WRITE
201            } else {
202                // buffer needs staging area for initialization only
203                let stage_desc = wgt::BufferDescriptor {
204                    label: Some(Cow::Borrowed(
205                        "(wgpu internal) initializing unmappable buffer",
206                    )),
207                    size: desc.size,
208                    usage: wgt::BufferUsages::MAP_WRITE | wgt::BufferUsages::COPY_SRC,
209                    mapped_at_creation: false,
210                };
211                let mut stage = match device.create_buffer(device_id, &stage_desc, true) {
212                    Ok(stage) => stage,
213                    Err(e) => {
214                        let raw = buffer.raw.unwrap();
215                        device
216                            .lock_life(&mut token)
217                            .schedule_resource_destruction(queue::TempResource::Buffer(raw), !0);
218                        break e;
219                    }
220                };
221                let stage_buffer = stage.raw.unwrap();
222                let mapping = match unsafe { device.raw.map_buffer(&stage_buffer, 0..stage.size) } {
223                    Ok(mapping) => mapping,
224                    Err(e) => {
225                        let raw = buffer.raw.unwrap();
226                        let mut life_lock = device.lock_life(&mut token);
227                        life_lock
228                            .schedule_resource_destruction(queue::TempResource::Buffer(raw), !0);
229                        life_lock.schedule_resource_destruction(
230                            queue::TempResource::Buffer(stage_buffer),
231                            !0,
232                        );
233                        break DeviceError::from(e).into();
234                    }
235                };
236
237                assert_eq!(buffer.size % wgt::COPY_BUFFER_ALIGNMENT, 0);
238                // Zero initialize memory and then mark both staging and buffer as initialized
239                // (it's guaranteed that this is the case by the time the buffer is usable)
240                unsafe { ptr::write_bytes(mapping.ptr.as_ptr(), 0, buffer.size as usize) };
241                buffer.initialization_status.drain(0..buffer.size);
242                stage.initialization_status.drain(0..buffer.size);
243
244                buffer.map_state = resource::BufferMapState::Init {
245                    ptr: mapping.ptr,
246                    needs_flush: !mapping.is_coherent,
247                    stage_buffer,
248                };
249                hal::BufferUses::COPY_DST
250            };
251
252            let id = fid.assign(buffer, &mut token);
253            log::info!("Created buffer {:?} with {:?}", id, desc);
254
255            device
256                .trackers
257                .lock()
258                .buffers
259                .insert_single(id, ref_count, buffer_use);
260
261            return (id.0, None);
262        };
263
264        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
265        (id, Some(error))
266    }
267
268    /// Assign `id_in` an error with the given `label`.
269    ///
270    /// Ensure that future attempts to use `id_in` as a buffer ID will propagate
271    /// the error, following the WebGPU ["contagious invalidity"] style.
272    ///
273    /// Firefox uses this function to comply strictly with the WebGPU spec,
274    /// which requires [`GPUBufferDescriptor`] validation to be generated on the
275    /// Device timeline and leave the newly created [`GPUBuffer`] invalid.
276    ///
277    /// Ideally, we would simply let [`device_create_buffer`] take care of all
278    /// of this, but some errors must be detected before we can even construct a
279    /// [`wgpu_types::BufferDescriptor`] to give it. For example, the WebGPU API
280    /// allows a `GPUBufferDescriptor`'s [`usage`] property to be any WebIDL
281    /// `unsigned long` value, but we can't construct a
282    /// [`wgpu_types::BufferUsages`] value from values with unassigned bits
283    /// set. This means we must validate `usage` before we can call
284    /// `device_create_buffer`.
285    ///
286    /// When that validation fails, we must arrange for the buffer id to be
287    /// considered invalid. This method provides the means to do so.
288    ///
289    /// ["contagious invalidity"]: https://www.w3.org/TR/webgpu/#invalidity
290    /// [`GPUBufferDescriptor`]: https://www.w3.org/TR/webgpu/#dictdef-gpubufferdescriptor
291    /// [`GPUBuffer`]: https://www.w3.org/TR/webgpu/#gpubuffer
292    /// [`wgpu_types::BufferDescriptor`]: wgt::BufferDescriptor
293    /// [`device_create_buffer`]: Global::device_create_buffer
294    /// [`usage`]: https://www.w3.org/TR/webgpu/#dom-gputexturedescriptor-usage
295    /// [`wgpu_types::BufferUsages`]: wgt::BufferUsages
296    pub fn create_buffer_error<A: HalApi>(&self, id_in: Input<G, id::BufferId>, label: Label) {
297        let hub = A::hub(self);
298        let mut token = Token::root();
299        let fid = hub.buffers.prepare(id_in);
300
301        fid.assign_error(label.borrow_or_default(), &mut token);
302    }
303
304    pub fn create_render_bundle_error<A: HalApi>(
305        &self,
306        id_in: Input<G, id::RenderBundleId>,
307        label: Label,
308    ) {
309        let hub = A::hub(self);
310        let mut token = Token::root();
311        let fid = hub.render_bundles.prepare(id_in);
312
313        let (_, mut token) = hub.devices.read(&mut token);
314        fid.assign_error(label.borrow_or_default(), &mut token);
315    }
316
317    /// Assign `id_in` an error with the given `label`.
318    ///
319    /// See `create_buffer_error` for more context and explaination.
320    pub fn create_texture_error<A: HalApi>(&self, id_in: Input<G, id::TextureId>, label: Label) {
321        let hub = A::hub(self);
322        let mut token = Token::root();
323        let fid = hub.textures.prepare(id_in);
324
325        fid.assign_error(label.borrow_or_default(), &mut token);
326    }
327
328    #[cfg(feature = "replay")]
329    pub fn device_wait_for_buffer<A: HalApi>(
330        &self,
331        device_id: DeviceId,
332        buffer_id: id::BufferId,
333    ) -> Result<(), WaitIdleError> {
334        let hub = A::hub(self);
335        let mut token = Token::root();
336        let (device_guard, mut token) = hub.devices.read(&mut token);
337        let last_submission = {
338            let (buffer_guard, _) = hub.buffers.write(&mut token);
339            match buffer_guard.get(buffer_id) {
340                Ok(buffer) => buffer.life_guard.life_count(),
341                Err(_) => return Ok(()),
342            }
343        };
344
345        device_guard
346            .get(device_id)
347            .map_err(|_| DeviceError::Invalid)?
348            .wait_for_submit(last_submission, &mut token)
349    }
350
351    #[doc(hidden)]
352    pub fn device_set_buffer_sub_data<A: HalApi>(
353        &self,
354        device_id: DeviceId,
355        buffer_id: id::BufferId,
356        offset: BufferAddress,
357        data: &[u8],
358    ) -> BufferAccessResult {
359        profiling::scope!("Device::set_buffer_sub_data");
360
361        let hub = A::hub(self);
362        let mut token = Token::root();
363
364        let (device_guard, mut token) = hub.devices.read(&mut token);
365        let (mut buffer_guard, _) = hub.buffers.write(&mut token);
366        let device = device_guard
367            .get(device_id)
368            .map_err(|_| DeviceError::Invalid)?;
369        let buffer = buffer_guard
370            .get_mut(buffer_id)
371            .map_err(|_| BufferAccessError::Invalid)?;
372        check_buffer_usage(buffer.usage, wgt::BufferUsages::MAP_WRITE)?;
373        //assert!(buffer isn't used by the GPU);
374
375        #[cfg(feature = "trace")]
376        if let Some(ref trace) = device.trace {
377            let mut trace = trace.lock();
378            let data_path = trace.make_binary("bin", data);
379            trace.add(trace::Action::WriteBuffer {
380                id: buffer_id,
381                data: data_path,
382                range: offset..offset + data.len() as BufferAddress,
383                queued: false,
384            });
385        }
386
387        let raw_buf = buffer.raw.as_ref().unwrap();
388        unsafe {
389            let mapping = device
390                .raw
391                .map_buffer(raw_buf, offset..offset + data.len() as u64)
392                .map_err(DeviceError::from)?;
393            ptr::copy_nonoverlapping(data.as_ptr(), mapping.ptr.as_ptr(), data.len());
394            if !mapping.is_coherent {
395                device
396                    .raw
397                    .flush_mapped_ranges(raw_buf, iter::once(offset..offset + data.len() as u64));
398            }
399            device
400                .raw
401                .unmap_buffer(raw_buf)
402                .map_err(DeviceError::from)?;
403        }
404
405        Ok(())
406    }
407
408    #[doc(hidden)]
409    pub fn device_get_buffer_sub_data<A: HalApi>(
410        &self,
411        device_id: DeviceId,
412        buffer_id: id::BufferId,
413        offset: BufferAddress,
414        data: &mut [u8],
415    ) -> BufferAccessResult {
416        profiling::scope!("Device::get_buffer_sub_data");
417
418        let hub = A::hub(self);
419        let mut token = Token::root();
420
421        let (device_guard, mut token) = hub.devices.read(&mut token);
422        let (mut buffer_guard, _) = hub.buffers.write(&mut token);
423        let device = device_guard
424            .get(device_id)
425            .map_err(|_| DeviceError::Invalid)?;
426        let buffer = buffer_guard
427            .get_mut(buffer_id)
428            .map_err(|_| BufferAccessError::Invalid)?;
429        check_buffer_usage(buffer.usage, wgt::BufferUsages::MAP_READ)?;
430        //assert!(buffer isn't used by the GPU);
431
432        let raw_buf = buffer.raw.as_ref().unwrap();
433        unsafe {
434            let mapping = device
435                .raw
436                .map_buffer(raw_buf, offset..offset + data.len() as u64)
437                .map_err(DeviceError::from)?;
438            if !mapping.is_coherent {
439                device.raw.invalidate_mapped_ranges(
440                    raw_buf,
441                    iter::once(offset..offset + data.len() as u64),
442                );
443            }
444            ptr::copy_nonoverlapping(mapping.ptr.as_ptr(), data.as_mut_ptr(), data.len());
445            device
446                .raw
447                .unmap_buffer(raw_buf)
448                .map_err(DeviceError::from)?;
449        }
450
451        Ok(())
452    }
453
454    pub fn buffer_label<A: HalApi>(&self, id: id::BufferId) -> String {
455        A::hub(self).buffers.label_for_resource(id)
456    }
457
458    pub fn buffer_destroy<A: HalApi>(
459        &self,
460        buffer_id: id::BufferId,
461    ) -> Result<(), resource::DestroyError> {
462        profiling::scope!("Buffer::destroy");
463
464        let map_closure;
465        // Restrict the locks to this scope.
466        {
467            let hub = A::hub(self);
468            let mut token = Token::root();
469
470            //TODO: lock pending writes separately, keep the device read-only
471            let (mut device_guard, mut token) = hub.devices.write(&mut token);
472
473            log::info!("Buffer {:?} is destroyed", buffer_id);
474            let (mut buffer_guard, _) = hub.buffers.write(&mut token);
475            let buffer = buffer_guard
476                .get_mut(buffer_id)
477                .map_err(|_| resource::DestroyError::Invalid)?;
478
479            let device = &mut device_guard[buffer.device_id.value];
480
481            map_closure = match &buffer.map_state {
482                &BufferMapState::Waiting(..) // To get the proper callback behavior.
483                | &BufferMapState::Init { .. }
484                | &BufferMapState::Active { .. }
485                => {
486                    self.buffer_unmap_inner(buffer_id, buffer, device)
487                        .unwrap_or(None)
488                }
489                _ => None,
490            };
491
492            #[cfg(feature = "trace")]
493            if let Some(ref trace) = device.trace {
494                trace.lock().add(trace::Action::FreeBuffer(buffer_id));
495            }
496
497            let raw = buffer
498                .raw
499                .take()
500                .ok_or(resource::DestroyError::AlreadyDestroyed)?;
501            let temp = queue::TempResource::Buffer(raw);
502
503            if device.pending_writes.dst_buffers.contains(&buffer_id) {
504                device.pending_writes.temp_resources.push(temp);
505            } else {
506                let last_submit_index = buffer.life_guard.life_count();
507                drop(buffer_guard);
508                device
509                    .lock_life(&mut token)
510                    .schedule_resource_destruction(temp, last_submit_index);
511            }
512        }
513
514        // Note: outside the scope where locks are held when calling the callback
515        if let Some((operation, status)) = map_closure {
516            operation.callback.call(status);
517        }
518
519        Ok(())
520    }
521
522    pub fn buffer_drop<A: HalApi>(&self, buffer_id: id::BufferId, wait: bool) {
523        profiling::scope!("Buffer::drop");
524        log::debug!("buffer {:?} is dropped", buffer_id);
525
526        let hub = A::hub(self);
527        let mut token = Token::root();
528
529        let (ref_count, last_submit_index, device_id) = {
530            let (mut buffer_guard, _) = hub.buffers.write(&mut token);
531            match buffer_guard.get_mut(buffer_id) {
532                Ok(buffer) => {
533                    let ref_count = buffer.life_guard.ref_count.take().unwrap();
534                    let last_submit_index = buffer.life_guard.life_count();
535                    (ref_count, last_submit_index, buffer.device_id.value)
536                }
537                Err(InvalidId) => {
538                    hub.buffers.unregister_locked(buffer_id, &mut *buffer_guard);
539                    return;
540                }
541            }
542        };
543
544        let (device_guard, mut token) = hub.devices.read(&mut token);
545        let device = &device_guard[device_id];
546        {
547            let mut life_lock = device.lock_life(&mut token);
548            if device.pending_writes.dst_buffers.contains(&buffer_id) {
549                life_lock.future_suspected_buffers.push(Stored {
550                    value: id::Valid(buffer_id),
551                    ref_count,
552                });
553            } else {
554                drop(ref_count);
555                life_lock
556                    .suspected_resources
557                    .buffers
558                    .push(id::Valid(buffer_id));
559            }
560        }
561
562        if wait {
563            match device.wait_for_submit(last_submit_index, &mut token) {
564                Ok(()) => (),
565                Err(e) => log::error!("Failed to wait for buffer {:?}: {:?}", buffer_id, e),
566            }
567        }
568    }
569
570    pub fn device_create_texture<A: HalApi>(
571        &self,
572        device_id: DeviceId,
573        desc: &resource::TextureDescriptor,
574        id_in: Input<G, id::TextureId>,
575    ) -> (id::TextureId, Option<resource::CreateTextureError>) {
576        profiling::scope!("Device::create_texture");
577
578        let hub = A::hub(self);
579        let mut token = Token::root();
580        let fid = hub.textures.prepare(id_in);
581
582        let (adapter_guard, mut token) = hub.adapters.read(&mut token);
583        let (device_guard, mut token) = hub.devices.read(&mut token);
584        let error = loop {
585            let device = match device_guard.get(device_id) {
586                Ok(device) => device,
587                Err(_) => break DeviceError::Invalid.into(),
588            };
589            #[cfg(feature = "trace")]
590            if let Some(ref trace) = device.trace {
591                trace
592                    .lock()
593                    .add(trace::Action::CreateTexture(fid.id(), desc.clone()));
594            }
595
596            let adapter = &adapter_guard[device.adapter_id.value];
597            let texture = match device.create_texture(device_id, adapter, desc) {
598                Ok(texture) => texture,
599                Err(error) => break error,
600            };
601            let ref_count = texture.life_guard.add_ref();
602
603            let id = fid.assign(texture, &mut token);
604            log::info!("Created texture {:?} with {:?}", id, desc);
605
606            device.trackers.lock().textures.insert_single(
607                id.0,
608                ref_count,
609                hal::TextureUses::UNINITIALIZED,
610            );
611
612            return (id.0, None);
613        };
614
615        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
616        (id, Some(error))
617    }
618
619    /// # Safety
620    ///
621    /// - `hal_texture` must be created from `device_id` corresponding raw handle.
622    /// - `hal_texture` must be created respecting `desc`
623    /// - `hal_texture` must be initialized
624    pub unsafe fn create_texture_from_hal<A: HalApi>(
625        &self,
626        hal_texture: A::Texture,
627        device_id: DeviceId,
628        desc: &resource::TextureDescriptor,
629        id_in: Input<G, id::TextureId>,
630    ) -> (id::TextureId, Option<resource::CreateTextureError>) {
631        profiling::scope!("Device::create_texture");
632
633        let hub = A::hub(self);
634        let mut token = Token::root();
635        let fid = hub.textures.prepare(id_in);
636
637        let (adapter_guard, mut token) = hub.adapters.read(&mut token);
638        let (device_guard, mut token) = hub.devices.read(&mut token);
639        let error = loop {
640            let device = match device_guard.get(device_id) {
641                Ok(device) => device,
642                Err(_) => break DeviceError::Invalid.into(),
643            };
644
645            // NB: Any change done through the raw texture handle will not be
646            // recorded in the replay
647            #[cfg(feature = "trace")]
648            if let Some(ref trace) = device.trace {
649                trace
650                    .lock()
651                    .add(trace::Action::CreateTexture(fid.id(), desc.clone()));
652            }
653
654            let adapter = &adapter_guard[device.adapter_id.value];
655
656            let format_features = match device
657                .describe_format_features(adapter, desc.format)
658                .map_err(|error| resource::CreateTextureError::MissingFeatures(desc.format, error))
659            {
660                Ok(features) => features,
661                Err(error) => break error,
662            };
663
664            let mut texture = device.create_texture_from_hal(
665                hal_texture,
666                conv::map_texture_usage(desc.usage, desc.format.into()),
667                device_id,
668                desc,
669                format_features,
670                TextureClearMode::None,
671            );
672            if desc.usage.contains(wgt::TextureUsages::COPY_DST) {
673                texture.hal_usage |= hal::TextureUses::COPY_DST;
674            }
675
676            texture.initialization_status = TextureInitTracker::new(desc.mip_level_count, 0);
677
678            let ref_count = texture.life_guard.add_ref();
679
680            let id = fid.assign(texture, &mut token);
681            log::info!("Created texture {:?} with {:?}", id, desc);
682
683            device.trackers.lock().textures.insert_single(
684                id.0,
685                ref_count,
686                hal::TextureUses::UNINITIALIZED,
687            );
688
689            return (id.0, None);
690        };
691
692        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
693        (id, Some(error))
694    }
695
696    /// # Safety
697    ///
698    /// - `hal_buffer` must be created from `device_id` corresponding raw handle.
699    /// - `hal_buffer` must be created respecting `desc`
700    /// - `hal_buffer` must be initialized
701    pub unsafe fn create_buffer_from_hal<A: HalApi>(
702        &self,
703        hal_buffer: A::Buffer,
704        device_id: DeviceId,
705        desc: &resource::BufferDescriptor,
706        id_in: Input<G, id::BufferId>,
707    ) -> (id::BufferId, Option<resource::CreateBufferError>) {
708        profiling::scope!("Device::create_buffer");
709
710        let hub = A::hub(self);
711        let mut token = Token::root();
712        let fid = hub.buffers.prepare(id_in);
713
714        let (device_guard, mut token) = hub.devices.read(&mut token);
715        let error = loop {
716            let device = match device_guard.get(device_id) {
717                Ok(device) => device,
718                Err(_) => break DeviceError::Invalid.into(),
719            };
720
721            // NB: Any change done through the raw buffer handle will not be
722            // recorded in the replay
723            #[cfg(feature = "trace")]
724            if let Some(ref trace) = device.trace {
725                trace
726                    .lock()
727                    .add(trace::Action::CreateBuffer(fid.id(), desc.clone()));
728            }
729
730            let mut buffer = device.create_buffer_from_hal(hal_buffer, device_id, desc);
731
732            // Assume external buffers are initialized
733            buffer.initialization_status = crate::init_tracker::BufferInitTracker::new(0);
734
735            let ref_count = buffer.life_guard.add_ref();
736
737            let id = fid.assign(buffer, &mut token);
738            log::info!("Created buffer {:?} with {:?}", id, desc);
739
740            device
741                .trackers
742                .lock()
743                .buffers
744                .insert_single(id, ref_count, hal::BufferUses::empty());
745
746            return (id.0, None);
747        };
748
749        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
750        (id, Some(error))
751    }
752
753    pub fn texture_label<A: HalApi>(&self, id: id::TextureId) -> String {
754        A::hub(self).textures.label_for_resource(id)
755    }
756
757    pub fn texture_destroy<A: HalApi>(
758        &self,
759        texture_id: id::TextureId,
760    ) -> Result<(), resource::DestroyError> {
761        profiling::scope!("Texture::destroy");
762
763        let hub = A::hub(self);
764        let mut token = Token::root();
765
766        //TODO: lock pending writes separately, keep the device read-only
767        let (mut device_guard, mut token) = hub.devices.write(&mut token);
768
769        log::info!("Buffer {:?} is destroyed", texture_id);
770        let (mut texture_guard, _) = hub.textures.write(&mut token);
771        let texture = texture_guard
772            .get_mut(texture_id)
773            .map_err(|_| resource::DestroyError::Invalid)?;
774
775        let device = &mut device_guard[texture.device_id.value];
776
777        #[cfg(feature = "trace")]
778        if let Some(ref trace) = device.trace {
779            trace.lock().add(trace::Action::FreeTexture(texture_id));
780        }
781
782        let last_submit_index = texture.life_guard.life_count();
783
784        let clear_views = match std::mem::replace(&mut texture.clear_mode, TextureClearMode::None) {
785            TextureClearMode::BufferCopy => SmallVec::new(),
786            TextureClearMode::RenderPass { clear_views, .. } => clear_views,
787            TextureClearMode::None => SmallVec::new(),
788        };
789
790        match texture.inner {
791            resource::TextureInner::Native { ref mut raw } => {
792                let raw = raw.take().ok_or(resource::DestroyError::AlreadyDestroyed)?;
793                let temp = queue::TempResource::Texture(raw, clear_views);
794
795                if device.pending_writes.dst_textures.contains(&texture_id) {
796                    device.pending_writes.temp_resources.push(temp);
797                } else {
798                    drop(texture_guard);
799                    device
800                        .lock_life(&mut token)
801                        .schedule_resource_destruction(temp, last_submit_index);
802                }
803            }
804            resource::TextureInner::Surface { .. } => {
805                for clear_view in clear_views {
806                    unsafe {
807                        device.raw.destroy_texture_view(clear_view);
808                    }
809                }
810                // TODO?
811            }
812        }
813
814        Ok(())
815    }
816
817    pub fn texture_drop<A: HalApi>(&self, texture_id: id::TextureId, wait: bool) {
818        profiling::scope!("Texture::drop");
819        log::debug!("texture {:?} is dropped", texture_id);
820
821        let hub = A::hub(self);
822        let mut token = Token::root();
823
824        let (ref_count, last_submit_index, device_id) = {
825            let (mut texture_guard, _) = hub.textures.write(&mut token);
826            match texture_guard.get_mut(texture_id) {
827                Ok(texture) => {
828                    let ref_count = texture.life_guard.ref_count.take().unwrap();
829                    let last_submit_index = texture.life_guard.life_count();
830                    (ref_count, last_submit_index, texture.device_id.value)
831                }
832                Err(InvalidId) => {
833                    hub.textures
834                        .unregister_locked(texture_id, &mut *texture_guard);
835                    return;
836                }
837            }
838        };
839
840        let (device_guard, mut token) = hub.devices.read(&mut token);
841        let device = &device_guard[device_id];
842        {
843            let mut life_lock = device.lock_life(&mut token);
844            if device.pending_writes.dst_textures.contains(&texture_id) {
845                life_lock.future_suspected_textures.push(Stored {
846                    value: id::Valid(texture_id),
847                    ref_count,
848                });
849            } else {
850                drop(ref_count);
851                life_lock
852                    .suspected_resources
853                    .textures
854                    .push(id::Valid(texture_id));
855            }
856        }
857
858        if wait {
859            match device.wait_for_submit(last_submit_index, &mut token) {
860                Ok(()) => (),
861                Err(e) => log::error!("Failed to wait for texture {:?}: {:?}", texture_id, e),
862            }
863        }
864    }
865
866    pub fn texture_create_view<A: HalApi>(
867        &self,
868        texture_id: id::TextureId,
869        desc: &resource::TextureViewDescriptor,
870        id_in: Input<G, id::TextureViewId>,
871    ) -> (id::TextureViewId, Option<resource::CreateTextureViewError>) {
872        profiling::scope!("Texture::create_view");
873
874        let hub = A::hub(self);
875        let mut token = Token::root();
876        let fid = hub.texture_views.prepare(id_in);
877
878        let (device_guard, mut token) = hub.devices.read(&mut token);
879        let (texture_guard, mut token) = hub.textures.read(&mut token);
880        let error = loop {
881            let texture = match texture_guard.get(texture_id) {
882                Ok(texture) => texture,
883                Err(_) => break resource::CreateTextureViewError::InvalidTexture,
884            };
885            let device = &device_guard[texture.device_id.value];
886            #[cfg(feature = "trace")]
887            if let Some(ref trace) = device.trace {
888                trace.lock().add(trace::Action::CreateTextureView {
889                    id: fid.id(),
890                    parent_id: texture_id,
891                    desc: desc.clone(),
892                });
893            }
894
895            let view = match device.create_texture_view(texture, texture_id, desc) {
896                Ok(view) => view,
897                Err(e) => break e,
898            };
899            let ref_count = view.life_guard.add_ref();
900            let id = fid.assign(view, &mut token);
901
902            device.trackers.lock().views.insert_single(id, ref_count);
903            return (id.0, None);
904        };
905
906        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
907        (id, Some(error))
908    }
909
910    pub fn texture_view_label<A: HalApi>(&self, id: id::TextureViewId) -> String {
911        A::hub(self).texture_views.label_for_resource(id)
912    }
913
914    pub fn texture_view_drop<A: HalApi>(
915        &self,
916        texture_view_id: id::TextureViewId,
917        wait: bool,
918    ) -> Result<(), resource::TextureViewDestroyError> {
919        profiling::scope!("TextureView::drop");
920        log::debug!("texture view {:?} is dropped", texture_view_id);
921
922        let hub = A::hub(self);
923        let mut token = Token::root();
924
925        let (last_submit_index, device_id) = {
926            let (mut texture_view_guard, _) = hub.texture_views.write(&mut token);
927
928            match texture_view_guard.get_mut(texture_view_id) {
929                Ok(view) => {
930                    let _ref_count = view.life_guard.ref_count.take();
931                    let last_submit_index = view.life_guard.life_count();
932                    (last_submit_index, view.device_id.value)
933                }
934                Err(InvalidId) => {
935                    hub.texture_views
936                        .unregister_locked(texture_view_id, &mut *texture_view_guard);
937                    return Ok(());
938                }
939            }
940        };
941
942        let (device_guard, mut token) = hub.devices.read(&mut token);
943        let device = &device_guard[device_id];
944        device
945            .lock_life(&mut token)
946            .suspected_resources
947            .texture_views
948            .push(id::Valid(texture_view_id));
949
950        if wait {
951            match device.wait_for_submit(last_submit_index, &mut token) {
952                Ok(()) => (),
953                Err(e) => log::error!(
954                    "Failed to wait for texture view {:?}: {:?}",
955                    texture_view_id,
956                    e
957                ),
958            }
959        }
960        Ok(())
961    }
962
963    pub fn device_create_sampler<A: HalApi>(
964        &self,
965        device_id: DeviceId,
966        desc: &resource::SamplerDescriptor,
967        id_in: Input<G, id::SamplerId>,
968    ) -> (id::SamplerId, Option<resource::CreateSamplerError>) {
969        profiling::scope!("Device::create_sampler");
970
971        let hub = A::hub(self);
972        let mut token = Token::root();
973        let fid = hub.samplers.prepare(id_in);
974
975        let (device_guard, mut token) = hub.devices.read(&mut token);
976        let error = loop {
977            let device = match device_guard.get(device_id) {
978                Ok(device) => device,
979                Err(_) => break DeviceError::Invalid.into(),
980            };
981            #[cfg(feature = "trace")]
982            if let Some(ref trace) = device.trace {
983                trace
984                    .lock()
985                    .add(trace::Action::CreateSampler(fid.id(), desc.clone()));
986            }
987
988            let sampler = match device.create_sampler(device_id, desc) {
989                Ok(sampler) => sampler,
990                Err(e) => break e,
991            };
992            let ref_count = sampler.life_guard.add_ref();
993            let id = fid.assign(sampler, &mut token);
994
995            device.trackers.lock().samplers.insert_single(id, ref_count);
996
997            return (id.0, None);
998        };
999
1000        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1001        (id, Some(error))
1002    }
1003
1004    pub fn sampler_label<A: HalApi>(&self, id: id::SamplerId) -> String {
1005        A::hub(self).samplers.label_for_resource(id)
1006    }
1007
1008    pub fn sampler_drop<A: HalApi>(&self, sampler_id: id::SamplerId) {
1009        profiling::scope!("Sampler::drop");
1010        log::debug!("sampler {:?} is dropped", sampler_id);
1011
1012        let hub = A::hub(self);
1013        let mut token = Token::root();
1014
1015        let device_id = {
1016            let (mut sampler_guard, _) = hub.samplers.write(&mut token);
1017            match sampler_guard.get_mut(sampler_id) {
1018                Ok(sampler) => {
1019                    sampler.life_guard.ref_count.take();
1020                    sampler.device_id.value
1021                }
1022                Err(InvalidId) => {
1023                    hub.samplers
1024                        .unregister_locked(sampler_id, &mut *sampler_guard);
1025                    return;
1026                }
1027            }
1028        };
1029
1030        let (device_guard, mut token) = hub.devices.read(&mut token);
1031        device_guard[device_id]
1032            .lock_life(&mut token)
1033            .suspected_resources
1034            .samplers
1035            .push(id::Valid(sampler_id));
1036    }
1037
1038    pub fn device_create_bind_group_layout<A: HalApi>(
1039        &self,
1040        device_id: DeviceId,
1041        desc: &binding_model::BindGroupLayoutDescriptor,
1042        id_in: Input<G, id::BindGroupLayoutId>,
1043    ) -> (
1044        id::BindGroupLayoutId,
1045        Option<binding_model::CreateBindGroupLayoutError>,
1046    ) {
1047        profiling::scope!("Device::create_bind_group_layout");
1048
1049        let mut token = Token::root();
1050        let hub = A::hub(self);
1051        let fid = hub.bind_group_layouts.prepare(id_in);
1052
1053        let error = 'outer: loop {
1054            let (device_guard, mut token) = hub.devices.read(&mut token);
1055            let device = match device_guard.get(device_id) {
1056                Ok(device) => device,
1057                Err(_) => break DeviceError::Invalid.into(),
1058            };
1059            #[cfg(feature = "trace")]
1060            if let Some(ref trace) = device.trace {
1061                trace
1062                    .lock()
1063                    .add(trace::Action::CreateBindGroupLayout(fid.id(), desc.clone()));
1064            }
1065
1066            let mut entry_map = FastHashMap::default();
1067            for entry in desc.entries.iter() {
1068                if entry.binding > device.limits.max_bindings_per_bind_group {
1069                    break 'outer binding_model::CreateBindGroupLayoutError::InvalidBindingIndex {
1070                        binding: entry.binding,
1071                        maximum: device.limits.max_bindings_per_bind_group,
1072                    };
1073                }
1074                if entry_map.insert(entry.binding, *entry).is_some() {
1075                    break 'outer binding_model::CreateBindGroupLayoutError::ConflictBinding(
1076                        entry.binding,
1077                    );
1078                }
1079            }
1080
1081            // If there is an equivalent BGL, just bump the refcount and return it.
1082            // This is only applicable for identity filters that are generating new IDs,
1083            // so their inputs are `PhantomData` of size 0.
1084            if mem::size_of::<Input<G, id::BindGroupLayoutId>>() == 0 {
1085                let (bgl_guard, _) = hub.bind_group_layouts.read(&mut token);
1086                if let Some(id) =
1087                    Device::deduplicate_bind_group_layout(device_id, &entry_map, &*bgl_guard)
1088                {
1089                    return (id, None);
1090                }
1091            }
1092
1093            let layout = match device.create_bind_group_layout(
1094                device_id,
1095                desc.label.borrow_option(),
1096                entry_map,
1097            ) {
1098                Ok(layout) => layout,
1099                Err(e) => break e,
1100            };
1101
1102            let id = fid.assign(layout, &mut token);
1103            return (id.0, None);
1104        };
1105
1106        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1107        (id, Some(error))
1108    }
1109
1110    pub fn bind_group_layout_label<A: HalApi>(&self, id: id::BindGroupLayoutId) -> String {
1111        A::hub(self).bind_group_layouts.label_for_resource(id)
1112    }
1113
1114    pub fn bind_group_layout_drop<A: HalApi>(&self, bind_group_layout_id: id::BindGroupLayoutId) {
1115        profiling::scope!("BindGroupLayout::drop");
1116        log::debug!("bind group layout {:?} is dropped", bind_group_layout_id);
1117
1118        let hub = A::hub(self);
1119        let mut token = Token::root();
1120        let device_id = {
1121            let (mut bind_group_layout_guard, _) = hub.bind_group_layouts.write(&mut token);
1122            match bind_group_layout_guard.get_mut(bind_group_layout_id) {
1123                Ok(layout) => layout.device_id.value,
1124                Err(InvalidId) => {
1125                    hub.bind_group_layouts
1126                        .unregister_locked(bind_group_layout_id, &mut *bind_group_layout_guard);
1127                    return;
1128                }
1129            }
1130        };
1131
1132        let (device_guard, mut token) = hub.devices.read(&mut token);
1133        device_guard[device_id]
1134            .lock_life(&mut token)
1135            .suspected_resources
1136            .bind_group_layouts
1137            .push(id::Valid(bind_group_layout_id));
1138    }
1139
1140    pub fn device_create_pipeline_layout<A: HalApi>(
1141        &self,
1142        device_id: DeviceId,
1143        desc: &binding_model::PipelineLayoutDescriptor,
1144        id_in: Input<G, id::PipelineLayoutId>,
1145    ) -> (
1146        id::PipelineLayoutId,
1147        Option<binding_model::CreatePipelineLayoutError>,
1148    ) {
1149        profiling::scope!("Device::create_pipeline_layout");
1150
1151        let hub = A::hub(self);
1152        let mut token = Token::root();
1153        let fid = hub.pipeline_layouts.prepare(id_in);
1154
1155        let (device_guard, mut token) = hub.devices.read(&mut token);
1156        let error = loop {
1157            let device = match device_guard.get(device_id) {
1158                Ok(device) => device,
1159                Err(_) => break DeviceError::Invalid.into(),
1160            };
1161            #[cfg(feature = "trace")]
1162            if let Some(ref trace) = device.trace {
1163                trace
1164                    .lock()
1165                    .add(trace::Action::CreatePipelineLayout(fid.id(), desc.clone()));
1166            }
1167
1168            let layout = {
1169                let (bgl_guard, _) = hub.bind_group_layouts.read(&mut token);
1170                match device.create_pipeline_layout(device_id, desc, &*bgl_guard) {
1171                    Ok(layout) => layout,
1172                    Err(e) => break e,
1173                }
1174            };
1175
1176            let id = fid.assign(layout, &mut token);
1177            return (id.0, None);
1178        };
1179
1180        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1181        (id, Some(error))
1182    }
1183
1184    pub fn pipeline_layout_label<A: HalApi>(&self, id: id::PipelineLayoutId) -> String {
1185        A::hub(self).pipeline_layouts.label_for_resource(id)
1186    }
1187
1188    pub fn pipeline_layout_drop<A: HalApi>(&self, pipeline_layout_id: id::PipelineLayoutId) {
1189        profiling::scope!("PipelineLayout::drop");
1190        log::debug!("pipeline layout {:?} is dropped", pipeline_layout_id);
1191
1192        let hub = A::hub(self);
1193        let mut token = Token::root();
1194        let (device_id, ref_count) = {
1195            let (mut pipeline_layout_guard, _) = hub.pipeline_layouts.write(&mut token);
1196            match pipeline_layout_guard.get_mut(pipeline_layout_id) {
1197                Ok(layout) => (
1198                    layout.device_id.value,
1199                    layout.life_guard.ref_count.take().unwrap(),
1200                ),
1201                Err(InvalidId) => {
1202                    hub.pipeline_layouts
1203                        .unregister_locked(pipeline_layout_id, &mut *pipeline_layout_guard);
1204                    return;
1205                }
1206            }
1207        };
1208
1209        let (device_guard, mut token) = hub.devices.read(&mut token);
1210        device_guard[device_id]
1211            .lock_life(&mut token)
1212            .suspected_resources
1213            .pipeline_layouts
1214            .push(Stored {
1215                value: id::Valid(pipeline_layout_id),
1216                ref_count,
1217            });
1218    }
1219
1220    pub fn device_create_bind_group<A: HalApi>(
1221        &self,
1222        device_id: DeviceId,
1223        desc: &binding_model::BindGroupDescriptor,
1224        id_in: Input<G, id::BindGroupId>,
1225    ) -> (id::BindGroupId, Option<binding_model::CreateBindGroupError>) {
1226        profiling::scope!("Device::create_bind_group");
1227
1228        let hub = A::hub(self);
1229        let mut token = Token::root();
1230        let fid = hub.bind_groups.prepare(id_in);
1231
1232        let (device_guard, mut token) = hub.devices.read(&mut token);
1233        let (bind_group_layout_guard, mut token) = hub.bind_group_layouts.read(&mut token);
1234
1235        let error = loop {
1236            let device = match device_guard.get(device_id) {
1237                Ok(device) => device,
1238                Err(_) => break DeviceError::Invalid.into(),
1239            };
1240            #[cfg(feature = "trace")]
1241            if let Some(ref trace) = device.trace {
1242                trace
1243                    .lock()
1244                    .add(trace::Action::CreateBindGroup(fid.id(), desc.clone()));
1245            }
1246
1247            let bind_group_layout = match bind_group_layout_guard.get(desc.layout) {
1248                Ok(layout) => layout,
1249                Err(_) => break binding_model::CreateBindGroupError::InvalidLayout,
1250            };
1251            let bind_group =
1252                match device.create_bind_group(device_id, bind_group_layout, desc, hub, &mut token)
1253                {
1254                    Ok(bind_group) => bind_group,
1255                    Err(e) => break e,
1256                };
1257            let ref_count = bind_group.life_guard.add_ref();
1258
1259            let id = fid.assign(bind_group, &mut token);
1260            log::debug!("Bind group {:?}", id,);
1261
1262            device
1263                .trackers
1264                .lock()
1265                .bind_groups
1266                .insert_single(id, ref_count);
1267            return (id.0, None);
1268        };
1269
1270        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1271        (id, Some(error))
1272    }
1273
1274    pub fn bind_group_label<A: HalApi>(&self, id: id::BindGroupId) -> String {
1275        A::hub(self).bind_groups.label_for_resource(id)
1276    }
1277
1278    pub fn bind_group_drop<A: HalApi>(&self, bind_group_id: id::BindGroupId) {
1279        profiling::scope!("BindGroup::drop");
1280        log::debug!("bind group {:?} is dropped", bind_group_id);
1281
1282        let hub = A::hub(self);
1283        let mut token = Token::root();
1284
1285        let device_id = {
1286            let (mut bind_group_guard, _) = hub.bind_groups.write(&mut token);
1287            match bind_group_guard.get_mut(bind_group_id) {
1288                Ok(bind_group) => {
1289                    bind_group.life_guard.ref_count.take();
1290                    bind_group.device_id.value
1291                }
1292                Err(InvalidId) => {
1293                    hub.bind_groups
1294                        .unregister_locked(bind_group_id, &mut *bind_group_guard);
1295                    return;
1296                }
1297            }
1298        };
1299
1300        let (device_guard, mut token) = hub.devices.read(&mut token);
1301        device_guard[device_id]
1302            .lock_life(&mut token)
1303            .suspected_resources
1304            .bind_groups
1305            .push(id::Valid(bind_group_id));
1306    }
1307
1308    pub fn device_create_shader_module<A: HalApi>(
1309        &self,
1310        device_id: DeviceId,
1311        desc: &pipeline::ShaderModuleDescriptor,
1312        source: pipeline::ShaderModuleSource,
1313        id_in: Input<G, id::ShaderModuleId>,
1314    ) -> (
1315        id::ShaderModuleId,
1316        Option<pipeline::CreateShaderModuleError>,
1317    ) {
1318        profiling::scope!("Device::create_shader_module");
1319
1320        let hub = A::hub(self);
1321        let mut token = Token::root();
1322        let fid = hub.shader_modules.prepare(id_in);
1323
1324        let (device_guard, mut token) = hub.devices.read(&mut token);
1325        let error = loop {
1326            let device = match device_guard.get(device_id) {
1327                Ok(device) => device,
1328                Err(_) => break DeviceError::Invalid.into(),
1329            };
1330            #[cfg(feature = "trace")]
1331            if let Some(ref trace) = device.trace {
1332                let mut trace = trace.lock();
1333                let data = match source {
1334                    #[cfg(feature = "wgsl")]
1335                    pipeline::ShaderModuleSource::Wgsl(ref code) => {
1336                        trace.make_binary("wgsl", code.as_bytes())
1337                    }
1338                    pipeline::ShaderModuleSource::Naga(ref module) => {
1339                        let string =
1340                            ron::ser::to_string_pretty(module, ron::ser::PrettyConfig::default())
1341                                .unwrap();
1342                        trace.make_binary("ron", string.as_bytes())
1343                    }
1344                    pipeline::ShaderModuleSource::Dummy(_) => {
1345                        panic!("found `ShaderModuleSource::Dummy`")
1346                    }
1347                };
1348                trace.add(trace::Action::CreateShaderModule {
1349                    id: fid.id(),
1350                    desc: desc.clone(),
1351                    data,
1352                });
1353            };
1354
1355            let shader = match device.create_shader_module(device_id, desc, source) {
1356                Ok(shader) => shader,
1357                Err(e) => break e,
1358            };
1359            let id = fid.assign(shader, &mut token);
1360            return (id.0, None);
1361        };
1362
1363        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1364        (id, Some(error))
1365    }
1366
1367    // Unsafe-ness of internal calls has little to do with unsafe-ness of this.
1368    #[allow(unused_unsafe)]
1369    /// # Safety
1370    ///
1371    /// This function passes SPIR-V binary to the backend as-is and can potentially result in a
1372    /// driver crash.
1373    pub unsafe fn device_create_shader_module_spirv<A: HalApi>(
1374        &self,
1375        device_id: DeviceId,
1376        desc: &pipeline::ShaderModuleDescriptor,
1377        source: Cow<[u32]>,
1378        id_in: Input<G, id::ShaderModuleId>,
1379    ) -> (
1380        id::ShaderModuleId,
1381        Option<pipeline::CreateShaderModuleError>,
1382    ) {
1383        profiling::scope!("Device::create_shader_module");
1384
1385        let hub = A::hub(self);
1386        let mut token = Token::root();
1387        let fid = hub.shader_modules.prepare(id_in);
1388
1389        let (device_guard, mut token) = hub.devices.read(&mut token);
1390        let error = loop {
1391            let device = match device_guard.get(device_id) {
1392                Ok(device) => device,
1393                Err(_) => break DeviceError::Invalid.into(),
1394            };
1395            #[cfg(feature = "trace")]
1396            if let Some(ref trace) = device.trace {
1397                let mut trace = trace.lock();
1398                let data = trace.make_binary("spv", unsafe {
1399                    std::slice::from_raw_parts(source.as_ptr() as *const u8, source.len() * 4)
1400                });
1401                trace.add(trace::Action::CreateShaderModule {
1402                    id: fid.id(),
1403                    desc: desc.clone(),
1404                    data,
1405                });
1406            };
1407
1408            let shader =
1409                match unsafe { device.create_shader_module_spirv(device_id, desc, &source) } {
1410                    Ok(shader) => shader,
1411                    Err(e) => break e,
1412                };
1413            let id = fid.assign(shader, &mut token);
1414            return (id.0, None);
1415        };
1416
1417        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1418        (id, Some(error))
1419    }
1420
1421    pub fn shader_module_label<A: HalApi>(&self, id: id::ShaderModuleId) -> String {
1422        A::hub(self).shader_modules.label_for_resource(id)
1423    }
1424
1425    pub fn shader_module_drop<A: HalApi>(&self, shader_module_id: id::ShaderModuleId) {
1426        profiling::scope!("ShaderModule::drop");
1427        log::debug!("shader module {:?} is dropped", shader_module_id);
1428
1429        let hub = A::hub(self);
1430        let mut token = Token::root();
1431        let (device_guard, mut token) = hub.devices.read(&mut token);
1432        let (module, _) = hub.shader_modules.unregister(shader_module_id, &mut token);
1433        if let Some(module) = module {
1434            let device = &device_guard[module.device_id.value];
1435            #[cfg(feature = "trace")]
1436            if let Some(ref trace) = device.trace {
1437                trace
1438                    .lock()
1439                    .add(trace::Action::DestroyShaderModule(shader_module_id));
1440            }
1441            unsafe {
1442                device.raw.destroy_shader_module(module.raw);
1443            }
1444        }
1445    }
1446
1447    pub fn device_create_command_encoder<A: HalApi>(
1448        &self,
1449        device_id: DeviceId,
1450        desc: &wgt::CommandEncoderDescriptor<Label>,
1451        id_in: Input<G, id::CommandEncoderId>,
1452    ) -> (id::CommandEncoderId, Option<DeviceError>) {
1453        profiling::scope!("Device::create_command_encoder");
1454
1455        let hub = A::hub(self);
1456        let mut token = Token::root();
1457        let fid = hub.command_buffers.prepare(id_in);
1458
1459        let (device_guard, mut token) = hub.devices.read(&mut token);
1460        let error = loop {
1461            let device = match device_guard.get(device_id) {
1462                Ok(device) => device,
1463                Err(_) => break DeviceError::Invalid,
1464            };
1465            let dev_stored = Stored {
1466                value: id::Valid(device_id),
1467                ref_count: device.life_guard.add_ref(),
1468            };
1469            let encoder = match device
1470                .command_allocator
1471                .lock()
1472                .acquire_encoder(&device.raw, &device.queue)
1473            {
1474                Ok(raw) => raw,
1475                Err(_) => break DeviceError::OutOfMemory,
1476            };
1477            let command_buffer = command::CommandBuffer::new(
1478                encoder,
1479                dev_stored,
1480                device.limits.clone(),
1481                device.downlevel.clone(),
1482                device.features,
1483                #[cfg(feature = "trace")]
1484                device.trace.is_some(),
1485                &desc.label,
1486            );
1487
1488            let id = fid.assign(command_buffer, &mut token);
1489            return (id.0, None);
1490        };
1491
1492        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1493        (id, Some(error))
1494    }
1495
1496    pub fn command_buffer_label<A: HalApi>(&self, id: id::CommandBufferId) -> String {
1497        A::hub(self).command_buffers.label_for_resource(id)
1498    }
1499
1500    pub fn command_encoder_drop<A: HalApi>(&self, command_encoder_id: id::CommandEncoderId) {
1501        profiling::scope!("CommandEncoder::drop");
1502        log::debug!("command encoder {:?} is dropped", command_encoder_id);
1503
1504        let hub = A::hub(self);
1505        let mut token = Token::root();
1506
1507        let (mut device_guard, mut token) = hub.devices.write(&mut token);
1508        let (cmdbuf, _) = hub
1509            .command_buffers
1510            .unregister(command_encoder_id, &mut token);
1511        if let Some(cmdbuf) = cmdbuf {
1512            let device = &mut device_guard[cmdbuf.device_id.value];
1513            device.untrack::<G>(hub, &cmdbuf.trackers, &mut token);
1514            device.destroy_command_buffer(cmdbuf);
1515        }
1516    }
1517
1518    pub fn command_buffer_drop<A: HalApi>(&self, command_buffer_id: id::CommandBufferId) {
1519        profiling::scope!("CommandBuffer::drop");
1520        log::debug!("command buffer {:?} is dropped", command_buffer_id);
1521        self.command_encoder_drop::<A>(command_buffer_id)
1522    }
1523
1524    pub fn device_create_render_bundle_encoder(
1525        &self,
1526        device_id: DeviceId,
1527        desc: &command::RenderBundleEncoderDescriptor,
1528    ) -> (
1529        id::RenderBundleEncoderId,
1530        Option<command::CreateRenderBundleError>,
1531    ) {
1532        profiling::scope!("Device::create_render_bundle_encoder");
1533        let (encoder, error) = match command::RenderBundleEncoder::new(desc, device_id, None) {
1534            Ok(encoder) => (encoder, None),
1535            Err(e) => (command::RenderBundleEncoder::dummy(device_id), Some(e)),
1536        };
1537        (Box::into_raw(Box::new(encoder)), error)
1538    }
1539
1540    pub fn render_bundle_encoder_finish<A: HalApi>(
1541        &self,
1542        bundle_encoder: command::RenderBundleEncoder,
1543        desc: &command::RenderBundleDescriptor,
1544        id_in: Input<G, id::RenderBundleId>,
1545    ) -> (id::RenderBundleId, Option<command::RenderBundleError>) {
1546        profiling::scope!("RenderBundleEncoder::finish");
1547
1548        let hub = A::hub(self);
1549        let mut token = Token::root();
1550        let fid = hub.render_bundles.prepare(id_in);
1551
1552        let (device_guard, mut token) = hub.devices.read(&mut token);
1553        let error = loop {
1554            let device = match device_guard.get(bundle_encoder.parent()) {
1555                Ok(device) => device,
1556                Err(_) => break command::RenderBundleError::INVALID_DEVICE,
1557            };
1558            #[cfg(feature = "trace")]
1559            if let Some(ref trace) = device.trace {
1560                trace.lock().add(trace::Action::CreateRenderBundle {
1561                    id: fid.id(),
1562                    desc: trace::new_render_bundle_encoder_descriptor(
1563                        desc.label.clone(),
1564                        &bundle_encoder.context,
1565                        bundle_encoder.is_depth_read_only,
1566                        bundle_encoder.is_stencil_read_only,
1567                    ),
1568                    base: bundle_encoder.to_base_pass(),
1569                });
1570            }
1571
1572            let render_bundle = match bundle_encoder.finish(desc, device, hub, &mut token) {
1573                Ok(bundle) => bundle,
1574                Err(e) => break e,
1575            };
1576
1577            log::debug!("Render bundle");
1578            let ref_count = render_bundle.life_guard.add_ref();
1579            let id = fid.assign(render_bundle, &mut token);
1580
1581            device.trackers.lock().bundles.insert_single(id, ref_count);
1582            return (id.0, None);
1583        };
1584
1585        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1586        (id, Some(error))
1587    }
1588
1589    pub fn render_bundle_label<A: HalApi>(&self, id: id::RenderBundleId) -> String {
1590        A::hub(self).render_bundles.label_for_resource(id)
1591    }
1592
1593    pub fn render_bundle_drop<A: HalApi>(&self, render_bundle_id: id::RenderBundleId) {
1594        profiling::scope!("RenderBundle::drop");
1595        log::debug!("render bundle {:?} is dropped", render_bundle_id);
1596        let hub = A::hub(self);
1597        let mut token = Token::root();
1598
1599        let (device_guard, mut token) = hub.devices.read(&mut token);
1600        let device_id = {
1601            let (mut bundle_guard, _) = hub.render_bundles.write(&mut token);
1602            match bundle_guard.get_mut(render_bundle_id) {
1603                Ok(bundle) => {
1604                    bundle.life_guard.ref_count.take();
1605                    bundle.device_id.value
1606                }
1607                Err(InvalidId) => {
1608                    hub.render_bundles
1609                        .unregister_locked(render_bundle_id, &mut *bundle_guard);
1610                    return;
1611                }
1612            }
1613        };
1614
1615        device_guard[device_id]
1616            .lock_life(&mut token)
1617            .suspected_resources
1618            .render_bundles
1619            .push(id::Valid(render_bundle_id));
1620    }
1621
1622    pub fn device_create_query_set<A: HalApi>(
1623        &self,
1624        device_id: DeviceId,
1625        desc: &resource::QuerySetDescriptor,
1626        id_in: Input<G, id::QuerySetId>,
1627    ) -> (id::QuerySetId, Option<resource::CreateQuerySetError>) {
1628        profiling::scope!("Device::create_query_set");
1629
1630        let hub = A::hub(self);
1631        let mut token = Token::root();
1632        let fid = hub.query_sets.prepare(id_in);
1633
1634        let (device_guard, mut token) = hub.devices.read(&mut token);
1635        let error = loop {
1636            let device = match device_guard.get(device_id) {
1637                Ok(device) => device,
1638                Err(_) => break DeviceError::Invalid.into(),
1639            };
1640            #[cfg(feature = "trace")]
1641            if let Some(ref trace) = device.trace {
1642                trace.lock().add(trace::Action::CreateQuerySet {
1643                    id: fid.id(),
1644                    desc: desc.clone(),
1645                });
1646            }
1647
1648            let query_set = match device.create_query_set(device_id, desc) {
1649                Ok(query_set) => query_set,
1650                Err(err) => break err,
1651            };
1652
1653            let ref_count = query_set.life_guard.add_ref();
1654            let id = fid.assign(query_set, &mut token);
1655
1656            device
1657                .trackers
1658                .lock()
1659                .query_sets
1660                .insert_single(id, ref_count);
1661
1662            return (id.0, None);
1663        };
1664
1665        let id = fid.assign_error("", &mut token);
1666        (id, Some(error))
1667    }
1668
1669    pub fn query_set_drop<A: HalApi>(&self, query_set_id: id::QuerySetId) {
1670        profiling::scope!("QuerySet::drop");
1671        log::debug!("query set {:?} is dropped", query_set_id);
1672
1673        let hub = A::hub(self);
1674        let mut token = Token::root();
1675
1676        let device_id = {
1677            let (mut query_set_guard, _) = hub.query_sets.write(&mut token);
1678            let query_set = query_set_guard.get_mut(query_set_id).unwrap();
1679            query_set.life_guard.ref_count.take();
1680            query_set.device_id.value
1681        };
1682
1683        let (device_guard, mut token) = hub.devices.read(&mut token);
1684        let device = &device_guard[device_id];
1685
1686        #[cfg(feature = "trace")]
1687        if let Some(ref trace) = device.trace {
1688            trace
1689                .lock()
1690                .add(trace::Action::DestroyQuerySet(query_set_id));
1691        }
1692
1693        device
1694            .lock_life(&mut token)
1695            .suspected_resources
1696            .query_sets
1697            .push(id::Valid(query_set_id));
1698    }
1699
1700    pub fn query_set_label<A: HalApi>(&self, id: id::QuerySetId) -> String {
1701        A::hub(self).query_sets.label_for_resource(id)
1702    }
1703
1704    pub fn device_create_render_pipeline<A: HalApi>(
1705        &self,
1706        device_id: DeviceId,
1707        desc: &pipeline::RenderPipelineDescriptor,
1708        id_in: Input<G, id::RenderPipelineId>,
1709        implicit_pipeline_ids: Option<ImplicitPipelineIds<G>>,
1710    ) -> (
1711        id::RenderPipelineId,
1712        Option<pipeline::CreateRenderPipelineError>,
1713    ) {
1714        profiling::scope!("Device::create_render_pipeline");
1715
1716        let hub = A::hub(self);
1717        let mut token = Token::root();
1718
1719        let fid = hub.render_pipelines.prepare(id_in);
1720        let implicit_context = implicit_pipeline_ids.map(|ipi| ipi.prepare(hub));
1721
1722        let (adapter_guard, mut token) = hub.adapters.read(&mut token);
1723        let (device_guard, mut token) = hub.devices.read(&mut token);
1724        let error = loop {
1725            let device = match device_guard.get(device_id) {
1726                Ok(device) => device,
1727                Err(_) => break DeviceError::Invalid.into(),
1728            };
1729            let adapter = &adapter_guard[device.adapter_id.value];
1730            #[cfg(feature = "trace")]
1731            if let Some(ref trace) = device.trace {
1732                trace.lock().add(trace::Action::CreateRenderPipeline {
1733                    id: fid.id(),
1734                    desc: desc.clone(),
1735                    implicit_context: implicit_context.clone(),
1736                });
1737            }
1738
1739            let pipeline = match device.create_render_pipeline(
1740                device_id,
1741                adapter,
1742                desc,
1743                implicit_context,
1744                hub,
1745                &mut token,
1746            ) {
1747                Ok(pair) => pair,
1748                Err(e) => break e,
1749            };
1750            let ref_count = pipeline.life_guard.add_ref();
1751
1752            let id = fid.assign(pipeline, &mut token);
1753            log::info!("Created render pipeline {:?} with {:?}", id, desc);
1754
1755            device
1756                .trackers
1757                .lock()
1758                .render_pipelines
1759                .insert_single(id, ref_count);
1760
1761            return (id.0, None);
1762        };
1763
1764        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1765        (id, Some(error))
1766    }
1767
1768    /// Get an ID of one of the bind group layouts. The ID adds a refcount,
1769    /// which needs to be released by calling `bind_group_layout_drop`.
1770    pub fn render_pipeline_get_bind_group_layout<A: HalApi>(
1771        &self,
1772        pipeline_id: id::RenderPipelineId,
1773        index: u32,
1774        id_in: Input<G, id::BindGroupLayoutId>,
1775    ) -> (
1776        id::BindGroupLayoutId,
1777        Option<binding_model::GetBindGroupLayoutError>,
1778    ) {
1779        let hub = A::hub(self);
1780        let mut token = Token::root();
1781        let (pipeline_layout_guard, mut token) = hub.pipeline_layouts.read(&mut token);
1782
1783        let error = loop {
1784            let (bgl_guard, mut token) = hub.bind_group_layouts.read(&mut token);
1785            let (_, mut token) = hub.bind_groups.read(&mut token);
1786            let (pipeline_guard, _) = hub.render_pipelines.read(&mut token);
1787
1788            let pipeline = match pipeline_guard.get(pipeline_id) {
1789                Ok(pipeline) => pipeline,
1790                Err(_) => break binding_model::GetBindGroupLayoutError::InvalidPipeline,
1791            };
1792            let id = match pipeline_layout_guard[pipeline.layout_id.value]
1793                .bind_group_layout_ids
1794                .get(index as usize)
1795            {
1796                Some(id) => id,
1797                None => break binding_model::GetBindGroupLayoutError::InvalidGroupIndex(index),
1798            };
1799
1800            bgl_guard[*id].multi_ref_count.inc();
1801            return (id.0, None);
1802        };
1803
1804        let id = hub
1805            .bind_group_layouts
1806            .prepare(id_in)
1807            .assign_error("<derived>", &mut token);
1808        (id, Some(error))
1809    }
1810
1811    pub fn render_pipeline_label<A: HalApi>(&self, id: id::RenderPipelineId) -> String {
1812        A::hub(self).render_pipelines.label_for_resource(id)
1813    }
1814
1815    pub fn render_pipeline_drop<A: HalApi>(&self, render_pipeline_id: id::RenderPipelineId) {
1816        profiling::scope!("RenderPipeline::drop");
1817        log::debug!("render pipeline {:?} is dropped", render_pipeline_id);
1818        let hub = A::hub(self);
1819        let mut token = Token::root();
1820        let (device_guard, mut token) = hub.devices.read(&mut token);
1821
1822        let (device_id, layout_id) = {
1823            let (mut pipeline_guard, _) = hub.render_pipelines.write(&mut token);
1824            match pipeline_guard.get_mut(render_pipeline_id) {
1825                Ok(pipeline) => {
1826                    pipeline.life_guard.ref_count.take();
1827                    (pipeline.device_id.value, pipeline.layout_id.clone())
1828                }
1829                Err(InvalidId) => {
1830                    hub.render_pipelines
1831                        .unregister_locked(render_pipeline_id, &mut *pipeline_guard);
1832                    return;
1833                }
1834            }
1835        };
1836
1837        let mut life_lock = device_guard[device_id].lock_life(&mut token);
1838        life_lock
1839            .suspected_resources
1840            .render_pipelines
1841            .push(id::Valid(render_pipeline_id));
1842        life_lock
1843            .suspected_resources
1844            .pipeline_layouts
1845            .push(layout_id);
1846    }
1847
1848    pub fn device_create_compute_pipeline<A: HalApi>(
1849        &self,
1850        device_id: DeviceId,
1851        desc: &pipeline::ComputePipelineDescriptor,
1852        id_in: Input<G, id::ComputePipelineId>,
1853        implicit_pipeline_ids: Option<ImplicitPipelineIds<G>>,
1854    ) -> (
1855        id::ComputePipelineId,
1856        Option<pipeline::CreateComputePipelineError>,
1857    ) {
1858        profiling::scope!("Device::create_compute_pipeline");
1859
1860        let hub = A::hub(self);
1861        let mut token = Token::root();
1862
1863        let fid = hub.compute_pipelines.prepare(id_in);
1864        let implicit_context = implicit_pipeline_ids.map(|ipi| ipi.prepare(hub));
1865
1866        let (device_guard, mut token) = hub.devices.read(&mut token);
1867        let error = loop {
1868            let device = match device_guard.get(device_id) {
1869                Ok(device) => device,
1870                Err(_) => break DeviceError::Invalid.into(),
1871            };
1872            #[cfg(feature = "trace")]
1873            if let Some(ref trace) = device.trace {
1874                trace.lock().add(trace::Action::CreateComputePipeline {
1875                    id: fid.id(),
1876                    desc: desc.clone(),
1877                    implicit_context: implicit_context.clone(),
1878                });
1879            }
1880
1881            let pipeline = match device.create_compute_pipeline(
1882                device_id,
1883                desc,
1884                implicit_context,
1885                hub,
1886                &mut token,
1887            ) {
1888                Ok(pair) => pair,
1889                Err(e) => break e,
1890            };
1891            let ref_count = pipeline.life_guard.add_ref();
1892
1893            let id = fid.assign(pipeline, &mut token);
1894            log::info!("Created compute pipeline {:?} with {:?}", id, desc);
1895
1896            device
1897                .trackers
1898                .lock()
1899                .compute_pipelines
1900                .insert_single(id, ref_count);
1901            return (id.0, None);
1902        };
1903
1904        let id = fid.assign_error(desc.label.borrow_or_default(), &mut token);
1905        (id, Some(error))
1906    }
1907
1908    /// Get an ID of one of the bind group layouts. The ID adds a refcount,
1909    /// which needs to be released by calling `bind_group_layout_drop`.
1910    pub fn compute_pipeline_get_bind_group_layout<A: HalApi>(
1911        &self,
1912        pipeline_id: id::ComputePipelineId,
1913        index: u32,
1914        id_in: Input<G, id::BindGroupLayoutId>,
1915    ) -> (
1916        id::BindGroupLayoutId,
1917        Option<binding_model::GetBindGroupLayoutError>,
1918    ) {
1919        let hub = A::hub(self);
1920        let mut token = Token::root();
1921        let (pipeline_layout_guard, mut token) = hub.pipeline_layouts.read(&mut token);
1922
1923        let error = loop {
1924            let (bgl_guard, mut token) = hub.bind_group_layouts.read(&mut token);
1925            let (_, mut token) = hub.bind_groups.read(&mut token);
1926            let (pipeline_guard, _) = hub.compute_pipelines.read(&mut token);
1927
1928            let pipeline = match pipeline_guard.get(pipeline_id) {
1929                Ok(pipeline) => pipeline,
1930                Err(_) => break binding_model::GetBindGroupLayoutError::InvalidPipeline,
1931            };
1932            let id = match pipeline_layout_guard[pipeline.layout_id.value]
1933                .bind_group_layout_ids
1934                .get(index as usize)
1935            {
1936                Some(id) => id,
1937                None => break binding_model::GetBindGroupLayoutError::InvalidGroupIndex(index),
1938            };
1939
1940            bgl_guard[*id].multi_ref_count.inc();
1941            return (id.0, None);
1942        };
1943
1944        let id = hub
1945            .bind_group_layouts
1946            .prepare(id_in)
1947            .assign_error("<derived>", &mut token);
1948        (id, Some(error))
1949    }
1950
1951    pub fn compute_pipeline_label<A: HalApi>(&self, id: id::ComputePipelineId) -> String {
1952        A::hub(self).compute_pipelines.label_for_resource(id)
1953    }
1954
1955    pub fn compute_pipeline_drop<A: HalApi>(&self, compute_pipeline_id: id::ComputePipelineId) {
1956        profiling::scope!("ComputePipeline::drop");
1957        log::debug!("compute pipeline {:?} is dropped", compute_pipeline_id);
1958        let hub = A::hub(self);
1959        let mut token = Token::root();
1960        let (device_guard, mut token) = hub.devices.read(&mut token);
1961
1962        let (device_id, layout_id) = {
1963            let (mut pipeline_guard, _) = hub.compute_pipelines.write(&mut token);
1964            match pipeline_guard.get_mut(compute_pipeline_id) {
1965                Ok(pipeline) => {
1966                    pipeline.life_guard.ref_count.take();
1967                    (pipeline.device_id.value, pipeline.layout_id.clone())
1968                }
1969                Err(InvalidId) => {
1970                    hub.compute_pipelines
1971                        .unregister_locked(compute_pipeline_id, &mut *pipeline_guard);
1972                    return;
1973                }
1974            }
1975        };
1976
1977        let mut life_lock = device_guard[device_id].lock_life(&mut token);
1978        life_lock
1979            .suspected_resources
1980            .compute_pipelines
1981            .push(id::Valid(compute_pipeline_id));
1982        life_lock
1983            .suspected_resources
1984            .pipeline_layouts
1985            .push(layout_id);
1986    }
1987
1988    pub fn surface_configure<A: HalApi>(
1989        &self,
1990        surface_id: SurfaceId,
1991        device_id: DeviceId,
1992        config: &wgt::SurfaceConfiguration<Vec<TextureFormat>>,
1993    ) -> Option<present::ConfigureSurfaceError> {
1994        use hal::{Adapter as _, Surface as _};
1995        use present::ConfigureSurfaceError as E;
1996        profiling::scope!("surface_configure");
1997
1998        fn validate_surface_configuration(
1999            config: &mut hal::SurfaceConfiguration,
2000            caps: &hal::SurfaceCapabilities,
2001        ) -> Result<(), E> {
2002            let width = config.extent.width;
2003            let height = config.extent.height;
2004            if width < caps.extents.start().width
2005                || width > caps.extents.end().width
2006                || height < caps.extents.start().height
2007                || height > caps.extents.end().height
2008            {
2009                log::warn!(
2010                    "Requested size {}x{} is outside of the supported range: {:?}",
2011                    width,
2012                    height,
2013                    caps.extents
2014                );
2015            }
2016            if !caps.present_modes.contains(&config.present_mode) {
2017                let new_mode = 'b: loop {
2018                    // Automatic present mode checks.
2019                    //
2020                    // The "Automatic" modes are never supported by the backends.
2021                    let fallbacks = match config.present_mode {
2022                        wgt::PresentMode::AutoVsync => {
2023                            &[wgt::PresentMode::FifoRelaxed, wgt::PresentMode::Fifo][..]
2024                        }
2025                        // Always end in FIFO to make sure it's always supported
2026                        wgt::PresentMode::AutoNoVsync => &[
2027                            wgt::PresentMode::Immediate,
2028                            wgt::PresentMode::Mailbox,
2029                            wgt::PresentMode::Fifo,
2030                        ][..],
2031                        _ => {
2032                            return Err(E::UnsupportedPresentMode {
2033                                requested: config.present_mode,
2034                                available: caps.present_modes.clone(),
2035                            });
2036                        }
2037                    };
2038
2039                    for &fallback in fallbacks {
2040                        if caps.present_modes.contains(&fallback) {
2041                            break 'b fallback;
2042                        }
2043                    }
2044
2045                    unreachable!("Fallback system failed to choose present mode. This is a bug. Mode: {:?}, Options: {:?}", config.present_mode, &caps.present_modes);
2046                };
2047
2048                log::info!(
2049                    "Automatically choosing presentation mode by rule {:?}. Chose {new_mode:?}",
2050                    config.present_mode
2051                );
2052                config.present_mode = new_mode;
2053            }
2054            if !caps.formats.contains(&config.format) {
2055                return Err(E::UnsupportedFormat {
2056                    requested: config.format,
2057                    available: caps.formats.clone(),
2058                });
2059            }
2060            if !caps
2061                .composite_alpha_modes
2062                .contains(&config.composite_alpha_mode)
2063            {
2064                let new_alpha_mode = 'alpha: loop {
2065                    // Automatic alpha mode checks.
2066                    let fallbacks = match config.composite_alpha_mode {
2067                        wgt::CompositeAlphaMode::Auto => &[
2068                            wgt::CompositeAlphaMode::Opaque,
2069                            wgt::CompositeAlphaMode::Inherit,
2070                        ][..],
2071                        _ => {
2072                            return Err(E::UnsupportedAlphaMode {
2073                                requested: config.composite_alpha_mode,
2074                                available: caps.composite_alpha_modes.clone(),
2075                            });
2076                        }
2077                    };
2078
2079                    for &fallback in fallbacks {
2080                        if caps.composite_alpha_modes.contains(&fallback) {
2081                            break 'alpha fallback;
2082                        }
2083                    }
2084
2085                    unreachable!(
2086                        "Fallback system failed to choose alpha mode. This is a bug. \
2087                                  AlphaMode: {:?}, Options: {:?}",
2088                        config.composite_alpha_mode, &caps.composite_alpha_modes
2089                    );
2090                };
2091
2092                log::info!(
2093                    "Automatically choosing alpha mode by rule {:?}. Chose {new_alpha_mode:?}",
2094                    config.composite_alpha_mode
2095                );
2096                config.composite_alpha_mode = new_alpha_mode;
2097            }
2098            if !caps.usage.contains(config.usage) {
2099                return Err(E::UnsupportedUsage);
2100            }
2101            if width == 0 || height == 0 {
2102                return Err(E::ZeroArea);
2103            }
2104            Ok(())
2105        }
2106
2107        log::info!("configuring surface with {:?}", config);
2108        let hub = A::hub(self);
2109        let mut token = Token::root();
2110
2111        let (mut surface_guard, mut token) = self.surfaces.write(&mut token);
2112        let (adapter_guard, mut token) = hub.adapters.read(&mut token);
2113        let (device_guard, mut token) = hub.devices.read(&mut token);
2114
2115        let error = 'outer: loop {
2116            let device = match device_guard.get(device_id) {
2117                Ok(device) => device,
2118                Err(_) => break DeviceError::Invalid.into(),
2119            };
2120            #[cfg(feature = "trace")]
2121            if let Some(ref trace) = device.trace {
2122                trace
2123                    .lock()
2124                    .add(trace::Action::ConfigureSurface(surface_id, config.clone()));
2125            }
2126
2127            let surface = match surface_guard.get_mut(surface_id) {
2128                Ok(surface) => surface,
2129                Err(_) => break E::InvalidSurface,
2130            };
2131
2132            let caps = unsafe {
2133                let suf = A::get_surface(surface);
2134                let adapter = &adapter_guard[device.adapter_id.value];
2135                match adapter.raw.adapter.surface_capabilities(&suf.unwrap().raw) {
2136                    Some(caps) => caps,
2137                    None => break E::UnsupportedQueueFamily,
2138                }
2139            };
2140
2141            let mut hal_view_formats = vec![];
2142            for format in config.view_formats.iter() {
2143                if *format == config.format {
2144                    continue;
2145                }
2146                if !caps.formats.contains(&config.format) {
2147                    break 'outer E::UnsupportedFormat {
2148                        requested: config.format,
2149                        available: caps.formats,
2150                    };
2151                }
2152                if config.format.remove_srgb_suffix() != format.remove_srgb_suffix() {
2153                    break 'outer E::InvalidViewFormat(*format, config.format);
2154                }
2155                hal_view_formats.push(*format);
2156            }
2157
2158            if !hal_view_formats.is_empty() {
2159                if let Err(missing_flag) =
2160                    device.require_downlevel_flags(wgt::DownlevelFlags::SURFACE_VIEW_FORMATS)
2161                {
2162                    break 'outer E::MissingDownlevelFlags(missing_flag);
2163                }
2164            }
2165
2166            let num_frames = present::DESIRED_NUM_FRAMES
2167                .clamp(*caps.swap_chain_sizes.start(), *caps.swap_chain_sizes.end());
2168            let mut hal_config = hal::SurfaceConfiguration {
2169                swap_chain_size: num_frames,
2170                present_mode: config.present_mode,
2171                composite_alpha_mode: config.alpha_mode,
2172                format: config.format,
2173                extent: wgt::Extent3d {
2174                    width: config.width,
2175                    height: config.height,
2176                    depth_or_array_layers: 1,
2177                },
2178                usage: conv::map_texture_usage(config.usage, hal::FormatAspects::COLOR),
2179                view_formats: hal_view_formats,
2180            };
2181
2182            if let Err(error) = validate_surface_configuration(&mut hal_config, &caps) {
2183                break error;
2184            }
2185
2186            // Wait for all work to finish before configuring the surface.
2187            if let Err(e) = device.maintain(hub, wgt::Maintain::Wait, &mut token) {
2188                break e.into();
2189            }
2190
2191            // All textures must be destroyed before the surface can be re-configured.
2192            if let Some(present) = surface.presentation.take() {
2193                if present.acquired_texture.is_some() {
2194                    break E::PreviousOutputExists;
2195                }
2196            }
2197
2198            // TODO: Texture views may still be alive that point to the texture.
2199            // this will allow the user to render to the surface texture, long after
2200            // it has been removed.
2201            //
2202            // https://github.com/gfx-rs/wgpu/issues/4105
2203
2204            match unsafe {
2205                A::get_surface_mut(surface)
2206                    .unwrap()
2207                    .raw
2208                    .configure(&device.raw, &hal_config)
2209            } {
2210                Ok(()) => (),
2211                Err(error) => {
2212                    break match error {
2213                        hal::SurfaceError::Outdated | hal::SurfaceError::Lost => E::InvalidSurface,
2214                        hal::SurfaceError::Device(error) => E::Device(error.into()),
2215                        hal::SurfaceError::Other(message) => {
2216                            log::error!("surface configuration failed: {}", message);
2217                            E::InvalidSurface
2218                        }
2219                    }
2220                }
2221            }
2222
2223            surface.presentation = Some(present::Presentation {
2224                device_id: Stored {
2225                    value: id::Valid(device_id),
2226                    ref_count: device.life_guard.add_ref(),
2227                },
2228                config: config.clone(),
2229                num_frames,
2230                acquired_texture: None,
2231            });
2232
2233            return None;
2234        };
2235
2236        Some(error)
2237    }
2238
2239    #[cfg(feature = "replay")]
2240    /// Only triange suspected resource IDs. This helps us to avoid ID collisions
2241    /// upon creating new resources when re-playing a trace.
2242    pub fn device_maintain_ids<A: HalApi>(&self, device_id: DeviceId) -> Result<(), InvalidDevice> {
2243        let hub = A::hub(self);
2244        let mut token = Token::root();
2245        let (device_guard, mut token) = hub.devices.read(&mut token);
2246        let device = device_guard.get(device_id).map_err(|_| InvalidDevice)?;
2247        device.lock_life(&mut token).triage_suspected(
2248            hub,
2249            &device.trackers,
2250            #[cfg(feature = "trace")]
2251            None,
2252            &mut token,
2253        );
2254        Ok(())
2255    }
2256
2257    /// Check `device_id` for freeable resources and completed buffer mappings.
2258    ///
2259    /// Return `queue_empty` indicating whether there are more queue submissions still in flight.
2260    pub fn device_poll<A: HalApi>(
2261        &self,
2262        device_id: DeviceId,
2263        maintain: wgt::Maintain<queue::WrappedSubmissionIndex>,
2264    ) -> Result<bool, WaitIdleError> {
2265        let (closures, queue_empty) = {
2266            if let wgt::Maintain::WaitForSubmissionIndex(submission_index) = maintain {
2267                if submission_index.queue_id != device_id {
2268                    return Err(WaitIdleError::WrongSubmissionIndex(
2269                        submission_index.queue_id,
2270                        device_id,
2271                    ));
2272                }
2273            }
2274
2275            let hub = A::hub(self);
2276            let mut token = Token::root();
2277            let (device_guard, mut token) = hub.devices.read(&mut token);
2278            device_guard
2279                .get(device_id)
2280                .map_err(|_| DeviceError::Invalid)?
2281                .maintain(hub, maintain, &mut token)?
2282        };
2283
2284        closures.fire();
2285
2286        Ok(queue_empty)
2287    }
2288
2289    /// Poll all devices belonging to the backend `A`.
2290    ///
2291    /// If `force_wait` is true, block until all buffer mappings are done.
2292    ///
2293    /// Return `all_queue_empty` indicating whether there are more queue
2294    /// submissions still in flight.
2295    fn poll_devices<A: HalApi>(
2296        &self,
2297        force_wait: bool,
2298        closures: &mut UserClosures,
2299    ) -> Result<bool, WaitIdleError> {
2300        profiling::scope!("poll_devices");
2301
2302        let hub = A::hub(self);
2303        let mut devices_to_drop = vec![];
2304        let mut all_queue_empty = true;
2305        {
2306            let mut token = Token::root();
2307            let (device_guard, mut token) = hub.devices.read(&mut token);
2308
2309            for (id, device) in device_guard.iter(A::VARIANT) {
2310                let maintain = if force_wait {
2311                    wgt::Maintain::Wait
2312                } else {
2313                    wgt::Maintain::Poll
2314                };
2315                let (cbs, queue_empty) = device.maintain(hub, maintain, &mut token)?;
2316                all_queue_empty = all_queue_empty && queue_empty;
2317
2318                // If the device's own `RefCount` clone is the only one left, and
2319                // its submission queue is empty, then it can be freed.
2320                if queue_empty && device.ref_count.load() == 1 {
2321                    devices_to_drop.push(id);
2322                }
2323                closures.extend(cbs);
2324            }
2325        }
2326
2327        for device_id in devices_to_drop {
2328            self.exit_device::<A>(device_id);
2329        }
2330
2331        Ok(all_queue_empty)
2332    }
2333
2334    /// Poll all devices on all backends.
2335    ///
2336    /// This is the implementation of `wgpu::Instance::poll_all`.
2337    ///
2338    /// Return `all_queue_empty` indicating whether there are more queue
2339    /// submissions still in flight.
2340    pub fn poll_all_devices(&self, force_wait: bool) -> Result<bool, WaitIdleError> {
2341        let mut closures = UserClosures::default();
2342        let mut all_queue_empty = true;
2343
2344        #[cfg(all(feature = "vulkan", not(target_arch = "wasm32")))]
2345        {
2346            all_queue_empty = self.poll_devices::<hal::api::Vulkan>(force_wait, &mut closures)?
2347                && all_queue_empty;
2348        }
2349        #[cfg(all(feature = "metal", any(target_os = "macos", target_os = "ios")))]
2350        {
2351            all_queue_empty =
2352                self.poll_devices::<hal::api::Metal>(force_wait, &mut closures)? && all_queue_empty;
2353        }
2354        #[cfg(all(feature = "dx12", windows))]
2355        {
2356            all_queue_empty =
2357                self.poll_devices::<hal::api::Dx12>(force_wait, &mut closures)? && all_queue_empty;
2358        }
2359        #[cfg(all(feature = "dx11", windows))]
2360        {
2361            all_queue_empty =
2362                self.poll_devices::<hal::api::Dx11>(force_wait, &mut closures)? && all_queue_empty;
2363        }
2364        #[cfg(feature = "gles")]
2365        {
2366            all_queue_empty =
2367                self.poll_devices::<hal::api::Gles>(force_wait, &mut closures)? && all_queue_empty;
2368        }
2369
2370        closures.fire();
2371
2372        Ok(all_queue_empty)
2373    }
2374
2375    pub fn device_label<A: HalApi>(&self, id: DeviceId) -> String {
2376        A::hub(self).devices.label_for_resource(id)
2377    }
2378
2379    pub fn device_start_capture<A: HalApi>(&self, id: DeviceId) {
2380        let hub = A::hub(self);
2381        let mut token = Token::root();
2382        let (device_guard, _) = hub.devices.read(&mut token);
2383        if let Ok(device) = device_guard.get(id) {
2384            unsafe { device.raw.start_capture() };
2385        }
2386    }
2387
2388    pub fn device_stop_capture<A: HalApi>(&self, id: DeviceId) {
2389        let hub = A::hub(self);
2390        let mut token = Token::root();
2391        let (device_guard, _) = hub.devices.read(&mut token);
2392        if let Ok(device) = device_guard.get(id) {
2393            unsafe { device.raw.stop_capture() };
2394        }
2395    }
2396
2397    pub fn device_drop<A: HalApi>(&self, device_id: DeviceId) {
2398        profiling::scope!("Device::drop");
2399        log::debug!("device {:?} is dropped", device_id);
2400
2401        let hub = A::hub(self);
2402        let mut token = Token::root();
2403
2404        // For now, just drop the `RefCount` in `device.life_guard`, which
2405        // stands for the user's reference to the device. We'll take care of
2406        // cleaning up the device when we're polled, once its queue submissions
2407        // have completed and it is no longer needed by other resources.
2408        let (mut device_guard, _) = hub.devices.write(&mut token);
2409        if let Ok(device) = device_guard.get_mut(device_id) {
2410            device.life_guard.ref_count.take().unwrap();
2411        }
2412    }
2413
2414    /// Exit the unreferenced, inactive device `device_id`.
2415    fn exit_device<A: HalApi>(&self, device_id: DeviceId) {
2416        let hub = A::hub(self);
2417        let mut token = Token::root();
2418        let mut free_adapter_id = None;
2419        {
2420            let (device, mut _token) = hub.devices.unregister(device_id, &mut token);
2421            if let Some(mut device) = device {
2422                // The things `Device::prepare_to_die` takes care are mostly
2423                // unnecessary here. We know our queue is empty, so we don't
2424                // need to wait for submissions or triage them. We know we were
2425                // just polled, so `life_tracker.free_resources` is empty.
2426                debug_assert!(device.lock_life(&mut _token).queue_empty());
2427                device.pending_writes.deactivate();
2428
2429                // Adapter is only referenced by the device and itself.
2430                // This isn't a robust way to destroy them, we should find a better one.
2431                if device.adapter_id.ref_count.load() == 1 {
2432                    free_adapter_id = Some(device.adapter_id.value.0);
2433                }
2434
2435                device.dispose();
2436            }
2437        }
2438
2439        // Free the adapter now that we've dropped the `Device` token.
2440        if let Some(free_adapter_id) = free_adapter_id {
2441            let _ = hub.adapters.unregister(free_adapter_id, &mut token);
2442        }
2443    }
2444
2445    pub fn buffer_map_async<A: HalApi>(
2446        &self,
2447        buffer_id: id::BufferId,
2448        range: Range<BufferAddress>,
2449        op: BufferMapOperation,
2450    ) -> BufferAccessResult {
2451        // User callbacks must not be called while holding buffer_map_async_inner's locks, so we
2452        // defer the error callback if it needs to be called immediately (typically when running
2453        // into errors).
2454        if let Err((op, err)) = self.buffer_map_async_inner::<A>(buffer_id, range, op) {
2455            op.callback.call(Err(err.clone()));
2456
2457            return Err(err);
2458        }
2459
2460        Ok(())
2461    }
2462
2463    // Returns the mapping callback in case of error so that the callback can be fired outside
2464    // of the locks that are held in this function.
2465    fn buffer_map_async_inner<A: HalApi>(
2466        &self,
2467        buffer_id: id::BufferId,
2468        range: Range<BufferAddress>,
2469        op: BufferMapOperation,
2470    ) -> Result<(), (BufferMapOperation, BufferAccessError)> {
2471        profiling::scope!("Buffer::map_async");
2472
2473        let hub = A::hub(self);
2474        let mut token = Token::root();
2475        let (device_guard, mut token) = hub.devices.read(&mut token);
2476        let (pub_usage, internal_use) = match op.host {
2477            HostMap::Read => (wgt::BufferUsages::MAP_READ, hal::BufferUses::MAP_READ),
2478            HostMap::Write => (wgt::BufferUsages::MAP_WRITE, hal::BufferUses::MAP_WRITE),
2479        };
2480
2481        if range.start % wgt::MAP_ALIGNMENT != 0 || range.end % wgt::COPY_BUFFER_ALIGNMENT != 0 {
2482            return Err((op, BufferAccessError::UnalignedRange));
2483        }
2484
2485        let (device_id, ref_count) = {
2486            let (mut buffer_guard, _) = hub.buffers.write(&mut token);
2487            let buffer = buffer_guard
2488                .get_mut(buffer_id)
2489                .map_err(|_| BufferAccessError::Invalid);
2490
2491            let buffer = match buffer {
2492                Ok(b) => b,
2493                Err(e) => {
2494                    return Err((op, e));
2495                }
2496            };
2497
2498            if let Err(e) = check_buffer_usage(buffer.usage, pub_usage) {
2499                return Err((op, e.into()));
2500            }
2501
2502            if range.start > range.end {
2503                return Err((
2504                    op,
2505                    BufferAccessError::NegativeRange {
2506                        start: range.start,
2507                        end: range.end,
2508                    },
2509                ));
2510            }
2511            if range.end > buffer.size {
2512                return Err((
2513                    op,
2514                    BufferAccessError::OutOfBoundsOverrun {
2515                        index: range.end,
2516                        max: buffer.size,
2517                    },
2518                ));
2519            }
2520
2521            buffer.map_state = match buffer.map_state {
2522                resource::BufferMapState::Init { .. } | resource::BufferMapState::Active { .. } => {
2523                    return Err((op, BufferAccessError::AlreadyMapped));
2524                }
2525                resource::BufferMapState::Waiting(_) => {
2526                    return Err((op, BufferAccessError::MapAlreadyPending));
2527                }
2528                resource::BufferMapState::Idle => {
2529                    resource::BufferMapState::Waiting(resource::BufferPendingMapping {
2530                        range,
2531                        op,
2532                        _parent_ref_count: buffer.life_guard.add_ref(),
2533                    })
2534                }
2535            };
2536            log::debug!("Buffer {:?} map state -> Waiting", buffer_id);
2537
2538            let device = &device_guard[buffer.device_id.value];
2539
2540            let ret = (buffer.device_id.value, buffer.life_guard.add_ref());
2541
2542            let mut trackers = device.trackers.lock();
2543            trackers
2544                .buffers
2545                .set_single(&*buffer_guard, buffer_id, internal_use);
2546            trackers.buffers.drain();
2547
2548            ret
2549        };
2550
2551        let device = &device_guard[device_id];
2552
2553        device
2554            .lock_life(&mut token)
2555            .map(id::Valid(buffer_id), ref_count);
2556
2557        Ok(())
2558    }
2559
2560    pub fn buffer_get_mapped_range<A: HalApi>(
2561        &self,
2562        buffer_id: id::BufferId,
2563        offset: BufferAddress,
2564        size: Option<BufferAddress>,
2565    ) -> Result<(*mut u8, u64), BufferAccessError> {
2566        profiling::scope!("Buffer::get_mapped_range");
2567
2568        let hub = A::hub(self);
2569        let mut token = Token::root();
2570        let (buffer_guard, _) = hub.buffers.read(&mut token);
2571        let buffer = buffer_guard
2572            .get(buffer_id)
2573            .map_err(|_| BufferAccessError::Invalid)?;
2574
2575        let range_size = if let Some(size) = size {
2576            size
2577        } else if offset > buffer.size {
2578            0
2579        } else {
2580            buffer.size - offset
2581        };
2582
2583        if offset % wgt::MAP_ALIGNMENT != 0 {
2584            return Err(BufferAccessError::UnalignedOffset { offset });
2585        }
2586        if range_size % wgt::COPY_BUFFER_ALIGNMENT != 0 {
2587            return Err(BufferAccessError::UnalignedRangeSize { range_size });
2588        }
2589
2590        match buffer.map_state {
2591            resource::BufferMapState::Init { ptr, .. } => {
2592                // offset (u64) can not be < 0, so no need to validate the lower bound
2593                if offset + range_size > buffer.size {
2594                    return Err(BufferAccessError::OutOfBoundsOverrun {
2595                        index: offset + range_size - 1,
2596                        max: buffer.size,
2597                    });
2598                }
2599                unsafe { Ok((ptr.as_ptr().offset(offset as isize), range_size)) }
2600            }
2601            resource::BufferMapState::Active { ptr, ref range, .. } => {
2602                if offset < range.start {
2603                    return Err(BufferAccessError::OutOfBoundsUnderrun {
2604                        index: offset,
2605                        min: range.start,
2606                    });
2607                }
2608                if offset + range_size > range.end {
2609                    return Err(BufferAccessError::OutOfBoundsOverrun {
2610                        index: offset + range_size - 1,
2611                        max: range.end,
2612                    });
2613                }
2614                // ptr points to the beginning of the range we mapped in map_async
2615                // rather thant the beginning of the buffer.
2616                let relative_offset = (offset - range.start) as isize;
2617                unsafe { Ok((ptr.as_ptr().offset(relative_offset), range_size)) }
2618            }
2619            resource::BufferMapState::Idle | resource::BufferMapState::Waiting(_) => {
2620                Err(BufferAccessError::NotMapped)
2621            }
2622        }
2623    }
2624
2625    fn buffer_unmap_inner<A: HalApi>(
2626        &self,
2627        buffer_id: id::BufferId,
2628        buffer: &mut Buffer<A>,
2629        device: &mut Device<A>,
2630    ) -> Result<Option<BufferMapPendingClosure>, BufferAccessError> {
2631        log::debug!("Buffer {:?} map state -> Idle", buffer_id);
2632        match mem::replace(&mut buffer.map_state, resource::BufferMapState::Idle) {
2633            resource::BufferMapState::Init {
2634                ptr,
2635                stage_buffer,
2636                needs_flush,
2637            } => {
2638                #[cfg(feature = "trace")]
2639                if let Some(ref trace) = device.trace {
2640                    let mut trace = trace.lock();
2641                    let data = trace.make_binary("bin", unsafe {
2642                        std::slice::from_raw_parts(ptr.as_ptr(), buffer.size as usize)
2643                    });
2644                    trace.add(trace::Action::WriteBuffer {
2645                        id: buffer_id,
2646                        data,
2647                        range: 0..buffer.size,
2648                        queued: true,
2649                    });
2650                }
2651                let _ = ptr;
2652                if needs_flush {
2653                    unsafe {
2654                        device
2655                            .raw
2656                            .flush_mapped_ranges(&stage_buffer, iter::once(0..buffer.size));
2657                    }
2658                }
2659
2660                let raw_buf = buffer.raw.as_ref().ok_or(BufferAccessError::Destroyed)?;
2661
2662                buffer.life_guard.use_at(device.active_submission_index + 1);
2663                let region = wgt::BufferSize::new(buffer.size).map(|size| hal::BufferCopy {
2664                    src_offset: 0,
2665                    dst_offset: 0,
2666                    size,
2667                });
2668                let transition_src = hal::BufferBarrier {
2669                    buffer: &stage_buffer,
2670                    usage: hal::BufferUses::MAP_WRITE..hal::BufferUses::COPY_SRC,
2671                };
2672                let transition_dst = hal::BufferBarrier {
2673                    buffer: raw_buf,
2674                    usage: hal::BufferUses::empty()..hal::BufferUses::COPY_DST,
2675                };
2676                let encoder = device.pending_writes.activate();
2677                unsafe {
2678                    encoder.transition_buffers(
2679                        iter::once(transition_src).chain(iter::once(transition_dst)),
2680                    );
2681                    if buffer.size > 0 {
2682                        encoder.copy_buffer_to_buffer(&stage_buffer, raw_buf, region.into_iter());
2683                    }
2684                }
2685                device
2686                    .pending_writes
2687                    .consume_temp(queue::TempResource::Buffer(stage_buffer));
2688                device.pending_writes.dst_buffers.insert(buffer_id);
2689            }
2690            resource::BufferMapState::Idle => {
2691                return Err(BufferAccessError::NotMapped);
2692            }
2693            resource::BufferMapState::Waiting(pending) => {
2694                return Ok(Some((pending.op, Err(BufferAccessError::MapAborted))));
2695            }
2696            resource::BufferMapState::Active { ptr, range, host } => {
2697                if host == HostMap::Write {
2698                    #[cfg(feature = "trace")]
2699                    if let Some(ref trace) = device.trace {
2700                        let mut trace = trace.lock();
2701                        let size = range.end - range.start;
2702                        let data = trace.make_binary("bin", unsafe {
2703                            std::slice::from_raw_parts(ptr.as_ptr(), size as usize)
2704                        });
2705                        trace.add(trace::Action::WriteBuffer {
2706                            id: buffer_id,
2707                            data,
2708                            range: range.clone(),
2709                            queued: false,
2710                        });
2711                    }
2712                    let _ = (ptr, range);
2713                }
2714                unsafe {
2715                    device
2716                        .raw
2717                        .unmap_buffer(buffer.raw.as_ref().unwrap())
2718                        .map_err(DeviceError::from)?
2719                };
2720            }
2721        }
2722        Ok(None)
2723    }
2724
2725    pub fn buffer_unmap<A: HalApi>(&self, buffer_id: id::BufferId) -> BufferAccessResult {
2726        profiling::scope!("unmap", "Buffer");
2727
2728        let closure;
2729        {
2730            // Restrict the locks to this scope.
2731            let hub = A::hub(self);
2732            let mut token = Token::root();
2733
2734            let (mut device_guard, mut token) = hub.devices.write(&mut token);
2735            let (mut buffer_guard, _) = hub.buffers.write(&mut token);
2736            let buffer = buffer_guard
2737                .get_mut(buffer_id)
2738                .map_err(|_| BufferAccessError::Invalid)?;
2739            let device = &mut device_guard[buffer.device_id.value];
2740
2741            closure = self.buffer_unmap_inner(buffer_id, buffer, device)
2742        }
2743
2744        // Note: outside the scope where locks are held when calling the callback
2745        if let Some((operation, status)) = closure? {
2746            operation.callback.call(status);
2747        }
2748        Ok(())
2749    }
2750}