1use super::Command as C;
2use arrayvec::ArrayVec;
3use glow::HasContext;
4use std::{mem, slice, sync::Arc};
5
6#[cfg(not(target_arch = "wasm32"))]
7const DEBUG_ID: u32 = 0;
8
9const CUBEMAP_FACES: [u32; 6] = [
10 glow::TEXTURE_CUBE_MAP_POSITIVE_X,
11 glow::TEXTURE_CUBE_MAP_NEGATIVE_X,
12 glow::TEXTURE_CUBE_MAP_POSITIVE_Y,
13 glow::TEXTURE_CUBE_MAP_NEGATIVE_Y,
14 glow::TEXTURE_CUBE_MAP_POSITIVE_Z,
15 glow::TEXTURE_CUBE_MAP_NEGATIVE_Z,
16];
17
18#[cfg(not(target_arch = "wasm32"))]
19fn extract_marker<'a>(data: &'a [u8], range: &std::ops::Range<u32>) -> &'a str {
20 std::str::from_utf8(&data[range.start as usize..range.end as usize]).unwrap()
21}
22
23fn is_layered_target(target: super::BindTarget) -> bool {
24 match target {
25 glow::TEXTURE_2D_ARRAY | glow::TEXTURE_3D | glow::TEXTURE_CUBE_MAP_ARRAY => true,
26 _ => false,
27 }
28}
29
30impl super::Queue {
31 unsafe fn perform_shader_clear(&self, gl: &glow::Context, draw_buffer: u32, color: [f32; 4]) {
33 unsafe { gl.use_program(Some(self.shader_clear_program)) };
34 unsafe {
35 gl.uniform_4_f32(
36 Some(&self.shader_clear_program_color_uniform_location),
37 color[0],
38 color[1],
39 color[2],
40 color[3],
41 )
42 };
43 unsafe { gl.disable(glow::DEPTH_TEST) };
44 unsafe { gl.disable(glow::STENCIL_TEST) };
45 unsafe { gl.disable(glow::SCISSOR_TEST) };
46 unsafe { gl.disable(glow::BLEND) };
47 unsafe { gl.disable(glow::CULL_FACE) };
48 unsafe { gl.draw_buffers(&[glow::COLOR_ATTACHMENT0 + draw_buffer]) };
49 unsafe { gl.draw_arrays(glow::TRIANGLES, 0, 3) };
50
51 if self.draw_buffer_count != 0 {
52 let indices = (0..self.draw_buffer_count as u32)
54 .map(|i| glow::COLOR_ATTACHMENT0 + i)
55 .collect::<ArrayVec<_, { crate::MAX_COLOR_ATTACHMENTS }>>();
56 unsafe { gl.draw_buffers(&indices) };
57 }
58 #[cfg(not(target_arch = "wasm32"))]
59 for draw_buffer in 0..self.draw_buffer_count as u32 {
60 unsafe { gl.disable_draw_buffer(glow::BLEND, draw_buffer) };
61 }
62 }
63
64 unsafe fn reset_state(&mut self, gl: &glow::Context) {
65 unsafe { gl.use_program(None) };
66 unsafe { gl.bind_framebuffer(glow::FRAMEBUFFER, None) };
67 unsafe { gl.disable(glow::DEPTH_TEST) };
68 unsafe { gl.disable(glow::STENCIL_TEST) };
69 unsafe { gl.disable(glow::SCISSOR_TEST) };
70 unsafe { gl.disable(glow::BLEND) };
71 unsafe { gl.disable(glow::CULL_FACE) };
72 unsafe { gl.disable(glow::POLYGON_OFFSET_FILL) };
73 unsafe { gl.disable(glow::SAMPLE_ALPHA_TO_COVERAGE) };
74 if self.features.contains(wgt::Features::DEPTH_CLIP_CONTROL) {
75 unsafe { gl.disable(glow::DEPTH_CLAMP) };
76 }
77
78 unsafe { gl.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, None) };
79 self.current_index_buffer = None;
80 }
81
82 unsafe fn set_attachment(
83 &self,
84 gl: &glow::Context,
85 fbo_target: u32,
86 attachment: u32,
87 view: &super::TextureView,
88 ) {
89 match view.inner {
90 super::TextureInner::Renderbuffer { raw } => {
91 unsafe {
92 gl.framebuffer_renderbuffer(
93 fbo_target,
94 attachment,
95 glow::RENDERBUFFER,
96 Some(raw),
97 )
98 };
99 }
100 super::TextureInner::DefaultRenderbuffer => panic!("Unexpected default RBO"),
101 super::TextureInner::Texture { raw, target } => {
102 let num_layers = view.array_layers.end - view.array_layers.start;
103 if num_layers > 1 {
104 #[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
105 unsafe {
106 gl.framebuffer_texture_multiview_ovr(
107 fbo_target,
108 attachment,
109 Some(raw),
110 view.mip_levels.start as i32,
111 view.array_layers.start as i32,
112 num_layers as i32,
113 )
114 };
115 } else if is_layered_target(target) {
116 unsafe {
117 gl.framebuffer_texture_layer(
118 fbo_target,
119 attachment,
120 Some(raw),
121 view.mip_levels.start as i32,
122 view.array_layers.start as i32,
123 )
124 };
125 } else if target == glow::TEXTURE_CUBE_MAP {
126 unsafe {
127 gl.framebuffer_texture_2d(
128 fbo_target,
129 attachment,
130 CUBEMAP_FACES[view.array_layers.start as usize],
131 Some(raw),
132 view.mip_levels.start as i32,
133 )
134 };
135 } else {
136 unsafe {
137 gl.framebuffer_texture_2d(
138 fbo_target,
139 attachment,
140 target,
141 Some(raw),
142 view.mip_levels.start as i32,
143 )
144 };
145 }
146 }
147 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
148 super::TextureInner::ExternalFramebuffer { ref inner } => unsafe {
149 gl.bind_external_framebuffer(glow::FRAMEBUFFER, inner);
150 },
151 }
152 }
153
154 unsafe fn process(
155 &mut self,
156 gl: &glow::Context,
157 command: &C,
158 #[cfg_attr(target_arch = "wasm32", allow(unused))] data_bytes: &[u8],
159 queries: &[glow::Query],
160 ) {
161 match *command {
162 C::Draw {
163 topology,
164 start_vertex,
165 vertex_count,
166 instance_count,
167 } => {
168 unsafe {
172 gl.draw_arrays_instanced(
173 topology,
174 start_vertex as i32,
175 vertex_count as i32,
176 instance_count as i32,
177 )
178 };
179 }
180 C::DrawIndexed {
181 topology,
182 index_type,
183 index_count,
184 index_offset,
185 base_vertex,
186 instance_count,
187 } => {
188 match base_vertex {
189 0 => unsafe {
193 gl.draw_elements_instanced(
194 topology,
195 index_count as i32,
196 index_type,
197 index_offset as i32,
198 instance_count as i32,
199 )
200 },
201 _ => unsafe {
202 gl.draw_elements_instanced_base_vertex(
203 topology,
204 index_count as _,
205 index_type,
206 index_offset as i32,
207 instance_count as i32,
208 base_vertex,
209 )
210 },
211 }
212 }
213 C::DrawIndirect {
214 topology,
215 indirect_buf,
216 indirect_offset,
217 } => {
218 unsafe { gl.bind_buffer(glow::DRAW_INDIRECT_BUFFER, Some(indirect_buf)) };
219 unsafe { gl.draw_arrays_indirect_offset(topology, indirect_offset as i32) };
220 }
221 C::DrawIndexedIndirect {
222 topology,
223 index_type,
224 indirect_buf,
225 indirect_offset,
226 } => {
227 unsafe { gl.bind_buffer(glow::DRAW_INDIRECT_BUFFER, Some(indirect_buf)) };
228 unsafe {
229 gl.draw_elements_indirect_offset(topology, index_type, indirect_offset as i32)
230 };
231 }
232 C::Dispatch(group_counts) => {
233 unsafe { gl.dispatch_compute(group_counts[0], group_counts[1], group_counts[2]) };
234 }
235 C::DispatchIndirect {
236 indirect_buf,
237 indirect_offset,
238 } => {
239 unsafe { gl.bind_buffer(glow::DISPATCH_INDIRECT_BUFFER, Some(indirect_buf)) };
240 unsafe { gl.dispatch_compute_indirect(indirect_offset as i32) };
241 }
242 C::ClearBuffer {
243 ref dst,
244 dst_target,
245 ref range,
246 } => match dst.raw {
247 Some(buffer) => {
248 let can_use_zero_buffer = self
257 .shared
258 .private_caps
259 .contains(super::PrivateCapabilities::INDEX_BUFFER_ROLE_CHANGE)
260 || dst_target != glow::ELEMENT_ARRAY_BUFFER;
261
262 if can_use_zero_buffer {
263 unsafe { gl.bind_buffer(glow::COPY_READ_BUFFER, Some(self.zero_buffer)) };
264 unsafe { gl.bind_buffer(dst_target, Some(buffer)) };
265 let mut dst_offset = range.start;
266 while dst_offset < range.end {
267 let size = (range.end - dst_offset).min(super::ZERO_BUFFER_SIZE as u64);
268 unsafe {
269 gl.copy_buffer_sub_data(
270 glow::COPY_READ_BUFFER,
271 dst_target,
272 0,
273 dst_offset as i32,
274 size as i32,
275 )
276 };
277 dst_offset += size;
278 }
279 } else {
280 unsafe { gl.bind_buffer(dst_target, Some(buffer)) };
281 let zeroes = vec![0u8; (range.end - range.start) as usize];
282 unsafe {
283 gl.buffer_sub_data_u8_slice(dst_target, range.start as i32, &zeroes)
284 };
285 }
286 }
287 None => {
288 dst.data.as_ref().unwrap().lock().unwrap().as_mut_slice()
289 [range.start as usize..range.end as usize]
290 .fill(0);
291 }
292 },
293 C::CopyBufferToBuffer {
294 ref src,
295 src_target,
296 ref dst,
297 dst_target,
298 copy,
299 } => {
300 let copy_src_target = glow::COPY_READ_BUFFER;
301 let is_index_buffer_only_element_dst = !self
302 .shared
303 .private_caps
304 .contains(super::PrivateCapabilities::INDEX_BUFFER_ROLE_CHANGE)
305 && dst_target == glow::ELEMENT_ARRAY_BUFFER
306 || src_target == glow::ELEMENT_ARRAY_BUFFER;
307
308 let copy_dst_target = if is_index_buffer_only_element_dst {
310 glow::ELEMENT_ARRAY_BUFFER
311 } else {
312 glow::COPY_WRITE_BUFFER
313 };
314 let size = copy.size.get() as usize;
315 match (src.raw, dst.raw) {
316 (Some(ref src), Some(ref dst)) => {
317 unsafe { gl.bind_buffer(copy_src_target, Some(*src)) };
318 unsafe { gl.bind_buffer(copy_dst_target, Some(*dst)) };
319 unsafe {
320 gl.copy_buffer_sub_data(
321 copy_src_target,
322 copy_dst_target,
323 copy.src_offset as _,
324 copy.dst_offset as _,
325 copy.size.get() as _,
326 )
327 };
328 }
329 (Some(src), None) => {
330 let mut data = dst.data.as_ref().unwrap().lock().unwrap();
331 let dst_data = &mut data.as_mut_slice()
332 [copy.dst_offset as usize..copy.dst_offset as usize + size];
333
334 unsafe { gl.bind_buffer(copy_src_target, Some(src)) };
335 unsafe {
336 self.shared.get_buffer_sub_data(
337 gl,
338 copy_src_target,
339 copy.src_offset as i32,
340 dst_data,
341 )
342 };
343 }
344 (None, Some(dst)) => {
345 let data = src.data.as_ref().unwrap().lock().unwrap();
346 let src_data = &data.as_slice()
347 [copy.src_offset as usize..copy.src_offset as usize + size];
348 unsafe { gl.bind_buffer(copy_dst_target, Some(dst)) };
349 unsafe {
350 gl.buffer_sub_data_u8_slice(
351 copy_dst_target,
352 copy.dst_offset as i32,
353 src_data,
354 )
355 };
356 }
357 (None, None) => {
358 todo!()
359 }
360 }
361 unsafe { gl.bind_buffer(copy_src_target, None) };
362 if is_index_buffer_only_element_dst {
363 unsafe {
364 gl.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, self.current_index_buffer)
365 };
366 } else {
367 unsafe { gl.bind_buffer(copy_dst_target, None) };
368 }
369 }
370 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
371 C::CopyExternalImageToTexture {
372 ref src,
373 dst,
374 dst_target,
375 dst_format,
376 dst_premultiplication,
377 ref copy,
378 } => {
379 const UNPACK_FLIP_Y_WEBGL: u32 =
380 web_sys::WebGl2RenderingContext::UNPACK_FLIP_Y_WEBGL;
381 const UNPACK_PREMULTIPLY_ALPHA_WEBGL: u32 =
382 web_sys::WebGl2RenderingContext::UNPACK_PREMULTIPLY_ALPHA_WEBGL;
383
384 unsafe {
385 if src.flip_y {
386 gl.pixel_store_bool(UNPACK_FLIP_Y_WEBGL, true);
387 }
388 if dst_premultiplication {
389 gl.pixel_store_bool(UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
390 }
391 }
392
393 unsafe { gl.bind_texture(dst_target, Some(dst)) };
394 let format_desc = self.shared.describe_texture_format(dst_format);
395 if is_layered_target(dst_target) {
396 let z_offset =
397 if let glow::TEXTURE_2D_ARRAY | glow::TEXTURE_CUBE_MAP_ARRAY = dst_target {
398 copy.dst_base.array_layer as i32
399 } else {
400 copy.dst_base.origin.z as i32
401 };
402
403 match src.source {
404 wgt::ExternalImageSource::ImageBitmap(ref b) => unsafe {
405 gl.tex_sub_image_3d_with_image_bitmap(
406 dst_target,
407 copy.dst_base.mip_level as i32,
408 copy.dst_base.origin.x as i32,
409 copy.dst_base.origin.y as i32,
410 z_offset,
411 copy.size.width as i32,
412 copy.size.height as i32,
413 copy.size.depth as i32,
414 format_desc.external,
415 format_desc.data_type,
416 b,
417 );
418 },
419 wgt::ExternalImageSource::HTMLVideoElement(ref v) => unsafe {
420 gl.tex_sub_image_3d_with_html_video_element(
421 dst_target,
422 copy.dst_base.mip_level as i32,
423 copy.dst_base.origin.x as i32,
424 copy.dst_base.origin.y as i32,
425 z_offset,
426 copy.size.width as i32,
427 copy.size.height as i32,
428 copy.size.depth as i32,
429 format_desc.external,
430 format_desc.data_type,
431 v,
432 );
433 },
434 wgt::ExternalImageSource::HTMLCanvasElement(ref c) => unsafe {
435 gl.tex_sub_image_3d_with_html_canvas_element(
436 dst_target,
437 copy.dst_base.mip_level as i32,
438 copy.dst_base.origin.x as i32,
439 copy.dst_base.origin.y as i32,
440 z_offset,
441 copy.size.width as i32,
442 copy.size.height as i32,
443 copy.size.depth as i32,
444 format_desc.external,
445 format_desc.data_type,
446 c,
447 );
448 },
449 wgt::ExternalImageSource::OffscreenCanvas(_) => unreachable!(),
450 }
451 } else {
452 let dst_target = if let glow::TEXTURE_CUBE_MAP = dst_target {
453 CUBEMAP_FACES[copy.dst_base.array_layer as usize]
454 } else {
455 dst_target
456 };
457
458 match src.source {
459 wgt::ExternalImageSource::ImageBitmap(ref b) => unsafe {
460 gl.tex_sub_image_2d_with_image_bitmap_and_width_and_height(
461 dst_target,
462 copy.dst_base.mip_level as i32,
463 copy.dst_base.origin.x as i32,
464 copy.dst_base.origin.y as i32,
465 copy.size.width as i32,
466 copy.size.height as i32,
467 format_desc.external,
468 format_desc.data_type,
469 b,
470 );
471 },
472 wgt::ExternalImageSource::HTMLVideoElement(ref v) => unsafe {
473 gl.tex_sub_image_2d_with_html_video_and_width_and_height(
474 dst_target,
475 copy.dst_base.mip_level as i32,
476 copy.dst_base.origin.x as i32,
477 copy.dst_base.origin.y as i32,
478 copy.size.width as i32,
479 copy.size.height as i32,
480 format_desc.external,
481 format_desc.data_type,
482 v,
483 )
484 },
485 wgt::ExternalImageSource::HTMLCanvasElement(ref c) => unsafe {
486 gl.tex_sub_image_2d_with_html_canvas_and_width_and_height(
487 dst_target,
488 copy.dst_base.mip_level as i32,
489 copy.dst_base.origin.x as i32,
490 copy.dst_base.origin.y as i32,
491 copy.size.width as i32,
492 copy.size.height as i32,
493 format_desc.external,
494 format_desc.data_type,
495 c,
496 )
497 },
498 wgt::ExternalImageSource::OffscreenCanvas(_) => unreachable!(),
499 }
500 }
501
502 unsafe {
503 if src.flip_y {
504 gl.pixel_store_bool(UNPACK_FLIP_Y_WEBGL, false);
505 }
506 if dst_premultiplication {
507 gl.pixel_store_bool(UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
508 }
509 }
510 }
511 C::CopyTextureToTexture {
512 src,
513 src_target,
514 dst,
515 dst_target,
516 dst_is_cubemap,
517 ref copy,
518 } => {
519 unsafe { gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(self.copy_fbo)) };
521 if is_layered_target(src_target) {
522 unsafe {
524 gl.framebuffer_texture_layer(
525 glow::READ_FRAMEBUFFER,
526 glow::COLOR_ATTACHMENT0,
527 Some(src),
528 copy.src_base.mip_level as i32,
529 copy.src_base.array_layer as i32,
530 )
531 };
532 } else {
533 unsafe {
534 gl.framebuffer_texture_2d(
535 glow::READ_FRAMEBUFFER,
536 glow::COLOR_ATTACHMENT0,
537 src_target,
538 Some(src),
539 copy.src_base.mip_level as i32,
540 )
541 };
542 }
543
544 unsafe { gl.bind_texture(dst_target, Some(dst)) };
545 if dst_is_cubemap {
546 unsafe {
547 gl.copy_tex_sub_image_2d(
548 CUBEMAP_FACES[copy.dst_base.array_layer as usize],
549 copy.dst_base.mip_level as i32,
550 copy.dst_base.origin.x as i32,
551 copy.dst_base.origin.y as i32,
552 copy.src_base.origin.x as i32,
553 copy.src_base.origin.y as i32,
554 copy.size.width as i32,
555 copy.size.height as i32,
556 )
557 };
558 } else if is_layered_target(dst_target) {
559 unsafe {
560 gl.copy_tex_sub_image_3d(
561 dst_target,
562 copy.dst_base.mip_level as i32,
563 copy.dst_base.origin.x as i32,
564 copy.dst_base.origin.y as i32,
565 if let glow::TEXTURE_2D_ARRAY | glow::TEXTURE_CUBE_MAP_ARRAY =
566 dst_target
567 {
568 copy.dst_base.array_layer as i32
569 } else {
570 copy.dst_base.origin.z as i32
571 },
572 copy.src_base.origin.x as i32,
573 copy.src_base.origin.y as i32,
574 copy.size.width as i32,
575 copy.size.height as i32,
576 )
577 };
578 } else {
579 unsafe {
580 gl.copy_tex_sub_image_2d(
581 dst_target,
582 copy.dst_base.mip_level as i32,
583 copy.dst_base.origin.x as i32,
584 copy.dst_base.origin.y as i32,
585 copy.src_base.origin.x as i32,
586 copy.src_base.origin.y as i32,
587 copy.size.width as i32,
588 copy.size.height as i32,
589 )
590 };
591 }
592 }
593 C::CopyBufferToTexture {
594 ref src,
595 src_target: _,
596 dst,
597 dst_target,
598 dst_format,
599 ref copy,
600 } => {
601 let (block_width, block_height) = dst_format.block_dimensions();
602 let block_size = dst_format.block_size(None).unwrap();
603 let format_desc = self.shared.describe_texture_format(dst_format);
604 let row_texels = copy
605 .buffer_layout
606 .bytes_per_row
607 .map_or(0, |bpr| block_width * bpr / block_size);
608 let column_texels = copy
609 .buffer_layout
610 .rows_per_image
611 .map_or(0, |rpi| block_height * rpi);
612
613 unsafe { gl.bind_texture(dst_target, Some(dst)) };
614 unsafe { gl.pixel_store_i32(glow::UNPACK_ROW_LENGTH, row_texels as i32) };
615 unsafe { gl.pixel_store_i32(glow::UNPACK_IMAGE_HEIGHT, column_texels as i32) };
616 let mut unbind_unpack_buffer = false;
617 if !dst_format.is_compressed() {
618 let buffer_data;
619 let unpack_data = match src.raw {
620 Some(buffer) => {
621 unsafe { gl.bind_buffer(glow::PIXEL_UNPACK_BUFFER, Some(buffer)) };
622 unbind_unpack_buffer = true;
623 glow::PixelUnpackData::BufferOffset(copy.buffer_layout.offset as u32)
624 }
625 None => {
626 buffer_data = src.data.as_ref().unwrap().lock().unwrap();
627 let src_data =
628 &buffer_data.as_slice()[copy.buffer_layout.offset as usize..];
629 glow::PixelUnpackData::Slice(src_data)
630 }
631 };
632 if is_layered_target(dst_target) {
633 unsafe {
634 gl.tex_sub_image_3d(
635 dst_target,
636 copy.texture_base.mip_level as i32,
637 copy.texture_base.origin.x as i32,
638 copy.texture_base.origin.y as i32,
639 if let glow::TEXTURE_2D_ARRAY | glow::TEXTURE_CUBE_MAP_ARRAY =
640 dst_target
641 {
642 copy.texture_base.array_layer as i32
643 } else {
644 copy.texture_base.origin.z as i32
645 },
646 copy.size.width as i32,
647 copy.size.height as i32,
648 copy.size.depth as i32,
649 format_desc.external,
650 format_desc.data_type,
651 unpack_data,
652 )
653 };
654 } else {
655 unsafe {
656 gl.tex_sub_image_2d(
657 if let glow::TEXTURE_CUBE_MAP = dst_target {
658 CUBEMAP_FACES[copy.texture_base.array_layer as usize]
659 } else {
660 dst_target
661 },
662 copy.texture_base.mip_level as i32,
663 copy.texture_base.origin.x as i32,
664 copy.texture_base.origin.y as i32,
665 copy.size.width as i32,
666 copy.size.height as i32,
667 format_desc.external,
668 format_desc.data_type,
669 unpack_data,
670 )
671 };
672 }
673 } else {
674 let bytes_per_row = copy
675 .buffer_layout
676 .bytes_per_row
677 .unwrap_or(copy.size.width * block_size);
678 let minimum_rows_per_image =
679 (copy.size.height + block_height - 1) / block_height;
680 let rows_per_image = copy
681 .buffer_layout
682 .rows_per_image
683 .unwrap_or(minimum_rows_per_image);
684
685 let bytes_per_image = bytes_per_row * rows_per_image;
686 let minimum_bytes_per_image = bytes_per_row * minimum_rows_per_image;
687 let bytes_in_upload =
688 (bytes_per_image * (copy.size.depth - 1)) + minimum_bytes_per_image;
689 let offset = copy.buffer_layout.offset as u32;
690
691 let buffer_data;
692 let unpack_data = match src.raw {
693 Some(buffer) => {
694 unsafe { gl.bind_buffer(glow::PIXEL_UNPACK_BUFFER, Some(buffer)) };
695 unbind_unpack_buffer = true;
696 glow::CompressedPixelUnpackData::BufferRange(
697 offset..offset + bytes_in_upload,
698 )
699 }
700 None => {
701 buffer_data = src.data.as_ref().unwrap().lock().unwrap();
702 let src_data = &buffer_data.as_slice()
703 [(offset as usize)..(offset + bytes_in_upload) as usize];
704 glow::CompressedPixelUnpackData::Slice(src_data)
705 }
706 };
707
708 if is_layered_target(dst_target) {
709 unsafe {
710 gl.compressed_tex_sub_image_3d(
711 dst_target,
712 copy.texture_base.mip_level as i32,
713 copy.texture_base.origin.x as i32,
714 copy.texture_base.origin.y as i32,
715 if let glow::TEXTURE_2D_ARRAY | glow::TEXTURE_CUBE_MAP_ARRAY =
716 dst_target
717 {
718 copy.texture_base.array_layer as i32
719 } else {
720 copy.texture_base.origin.z as i32
721 },
722 copy.size.width as i32,
723 copy.size.height as i32,
724 copy.size.depth as i32,
725 format_desc.internal,
726 unpack_data,
727 )
728 };
729 } else {
730 unsafe {
731 gl.compressed_tex_sub_image_2d(
732 if let glow::TEXTURE_CUBE_MAP = dst_target {
733 CUBEMAP_FACES[copy.texture_base.array_layer as usize]
734 } else {
735 dst_target
736 },
737 copy.texture_base.mip_level as i32,
738 copy.texture_base.origin.x as i32,
739 copy.texture_base.origin.y as i32,
740 copy.size.width as i32,
741 copy.size.height as i32,
742 format_desc.internal,
743 unpack_data,
744 )
745 };
746 }
747 }
748 if unbind_unpack_buffer {
749 unsafe { gl.bind_buffer(glow::PIXEL_UNPACK_BUFFER, None) };
750 }
751 }
752 C::CopyTextureToBuffer {
753 src,
754 src_target,
755 src_format,
756 ref dst,
757 dst_target: _,
758 ref copy,
759 } => {
760 let block_size = src_format.block_size(None).unwrap();
761 if src_format.is_compressed() {
762 log::error!("Not implemented yet: compressed texture copy to buffer");
763 return;
764 }
765 if src_target == glow::TEXTURE_CUBE_MAP
766 || src_target == glow::TEXTURE_CUBE_MAP_ARRAY
767 {
768 log::error!("Not implemented yet: cubemap texture copy to buffer");
769 return;
770 }
771 let format_desc = self.shared.describe_texture_format(src_format);
772 let row_texels = copy
773 .buffer_layout
774 .bytes_per_row
775 .map_or(copy.size.width, |bpr| bpr / block_size);
776 let column_texels = copy
777 .buffer_layout
778 .rows_per_image
779 .unwrap_or(copy.size.height);
780
781 unsafe { gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(self.copy_fbo)) };
782
783 let read_pixels = |offset| {
784 let mut buffer_data;
785 let unpack_data = match dst.raw {
786 Some(buffer) => {
787 unsafe { gl.pixel_store_i32(glow::PACK_ROW_LENGTH, row_texels as i32) };
788 unsafe { gl.bind_buffer(glow::PIXEL_PACK_BUFFER, Some(buffer)) };
789 glow::PixelPackData::BufferOffset(offset as u32)
790 }
791 None => {
792 buffer_data = dst.data.as_ref().unwrap().lock().unwrap();
793 let dst_data = &mut buffer_data.as_mut_slice()[offset as usize..];
794 glow::PixelPackData::Slice(dst_data)
795 }
796 };
797 unsafe {
798 gl.read_pixels(
799 copy.texture_base.origin.x as i32,
800 copy.texture_base.origin.y as i32,
801 copy.size.width as i32,
802 copy.size.height as i32,
803 format_desc.external,
804 format_desc.data_type,
805 unpack_data,
806 )
807 };
808 };
809
810 match src_target {
811 glow::TEXTURE_2D => {
812 unsafe {
813 gl.framebuffer_texture_2d(
814 glow::READ_FRAMEBUFFER,
815 glow::COLOR_ATTACHMENT0,
816 src_target,
817 Some(src),
818 copy.texture_base.mip_level as i32,
819 )
820 };
821 read_pixels(copy.buffer_layout.offset);
822 }
823 glow::TEXTURE_2D_ARRAY => {
824 unsafe {
825 gl.framebuffer_texture_layer(
826 glow::READ_FRAMEBUFFER,
827 glow::COLOR_ATTACHMENT0,
828 Some(src),
829 copy.texture_base.mip_level as i32,
830 copy.texture_base.array_layer as i32,
831 )
832 };
833 read_pixels(copy.buffer_layout.offset);
834 }
835 glow::TEXTURE_3D => {
836 for z in copy.texture_base.origin.z..copy.size.depth {
837 unsafe {
838 gl.framebuffer_texture_layer(
839 glow::READ_FRAMEBUFFER,
840 glow::COLOR_ATTACHMENT0,
841 Some(src),
842 copy.texture_base.mip_level as i32,
843 z as i32,
844 )
845 };
846 let offset = copy.buffer_layout.offset
847 + (z * block_size * row_texels * column_texels) as u64;
848 read_pixels(offset);
849 }
850 }
851 glow::TEXTURE_CUBE_MAP | glow::TEXTURE_CUBE_MAP_ARRAY => unimplemented!(),
852 _ => unreachable!(),
853 }
854 }
855 C::SetIndexBuffer(buffer) => {
856 unsafe { gl.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, Some(buffer)) };
857 self.current_index_buffer = Some(buffer);
858 }
859 C::BeginQuery(query, target) => {
860 unsafe { gl.begin_query(target, query) };
861 }
862 C::EndQuery(target) => {
863 unsafe { gl.end_query(target) };
864 }
865 C::CopyQueryResults {
866 ref query_range,
867 ref dst,
868 dst_target,
869 dst_offset,
870 } => {
871 self.temp_query_results.clear();
872 for &query in queries[query_range.start as usize..query_range.end as usize].iter() {
873 let result = unsafe { gl.get_query_parameter_u32(query, glow::QUERY_RESULT) };
874 self.temp_query_results.push(result as u64);
875 }
876 let query_data = unsafe {
877 slice::from_raw_parts(
878 self.temp_query_results.as_ptr() as *const u8,
879 self.temp_query_results.len() * mem::size_of::<u64>(),
880 )
881 };
882 match dst.raw {
883 Some(buffer) => {
884 unsafe { gl.bind_buffer(dst_target, Some(buffer)) };
885 unsafe {
886 gl.buffer_sub_data_u8_slice(dst_target, dst_offset as i32, query_data)
887 };
888 }
889 None => {
890 let data = &mut dst.data.as_ref().unwrap().lock().unwrap();
891 let len = query_data.len().min(data.len());
892 data[..len].copy_from_slice(&query_data[..len]);
893 }
894 }
895 }
896 C::ResetFramebuffer { is_default } => {
897 if is_default {
898 unsafe { gl.bind_framebuffer(glow::DRAW_FRAMEBUFFER, None) };
899 } else {
900 unsafe { gl.bind_framebuffer(glow::DRAW_FRAMEBUFFER, Some(self.draw_fbo)) };
901 unsafe {
902 gl.framebuffer_texture_2d(
903 glow::DRAW_FRAMEBUFFER,
904 glow::DEPTH_STENCIL_ATTACHMENT,
905 glow::TEXTURE_2D,
906 None,
907 0,
908 )
909 };
910 for i in 0..crate::MAX_COLOR_ATTACHMENTS {
911 let target = glow::COLOR_ATTACHMENT0 + i as u32;
912 unsafe {
913 gl.framebuffer_texture_2d(
914 glow::DRAW_FRAMEBUFFER,
915 target,
916 glow::TEXTURE_2D,
917 None,
918 0,
919 )
920 };
921 }
922 }
923 unsafe { gl.color_mask(true, true, true, true) };
924 unsafe { gl.depth_mask(true) };
925 unsafe { gl.stencil_mask(!0) };
926 unsafe { gl.disable(glow::DEPTH_TEST) };
927 unsafe { gl.disable(glow::STENCIL_TEST) };
928 unsafe { gl.disable(glow::SCISSOR_TEST) };
929 }
930 C::BindAttachment {
931 attachment,
932 ref view,
933 } => {
934 unsafe { self.set_attachment(gl, glow::DRAW_FRAMEBUFFER, attachment, view) };
935 }
936 C::ResolveAttachment {
937 attachment,
938 ref dst,
939 ref size,
940 } => {
941 unsafe { gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(self.draw_fbo)) };
942 unsafe { gl.read_buffer(attachment) };
943 unsafe { gl.bind_framebuffer(glow::DRAW_FRAMEBUFFER, Some(self.copy_fbo)) };
944 unsafe {
945 self.set_attachment(gl, glow::DRAW_FRAMEBUFFER, glow::COLOR_ATTACHMENT0, dst)
946 };
947 unsafe {
948 gl.blit_framebuffer(
949 0,
950 0,
951 size.width as i32,
952 size.height as i32,
953 0,
954 0,
955 size.width as i32,
956 size.height as i32,
957 glow::COLOR_BUFFER_BIT,
958 glow::NEAREST,
959 )
960 };
961 unsafe { gl.bind_framebuffer(glow::READ_FRAMEBUFFER, None) };
962 unsafe { gl.bind_framebuffer(glow::DRAW_FRAMEBUFFER, Some(self.draw_fbo)) };
963 }
964 C::InvalidateAttachments(ref list) => {
965 unsafe { gl.invalidate_framebuffer(glow::DRAW_FRAMEBUFFER, list) };
966 }
967 C::SetDrawColorBuffers(count) => {
968 self.draw_buffer_count = count;
969 let indices = (0..count as u32)
970 .map(|i| glow::COLOR_ATTACHMENT0 + i)
971 .collect::<ArrayVec<_, { crate::MAX_COLOR_ATTACHMENTS }>>();
972 unsafe { gl.draw_buffers(&indices) };
973
974 if self
975 .shared
976 .private_caps
977 .contains(super::PrivateCapabilities::CAN_DISABLE_DRAW_BUFFER)
978 {
979 for draw_buffer in 0..count as u32 {
980 unsafe { gl.disable_draw_buffer(glow::BLEND, draw_buffer) };
981 }
982 }
983 }
984 C::ClearColorF {
985 draw_buffer,
986 ref color,
987 is_srgb,
988 } => {
989 if self
990 .shared
991 .workarounds
992 .contains(super::Workarounds::MESA_I915_SRGB_SHADER_CLEAR)
993 && is_srgb
994 {
995 unsafe { self.perform_shader_clear(gl, draw_buffer, *color) };
996 } else {
997 unsafe { gl.clear_buffer_f32_slice(glow::COLOR, draw_buffer, color) };
998 }
999 }
1000 C::ClearColorU(draw_buffer, ref color) => {
1001 unsafe { gl.clear_buffer_u32_slice(glow::COLOR, draw_buffer, color) };
1002 }
1003 C::ClearColorI(draw_buffer, ref color) => {
1004 unsafe { gl.clear_buffer_i32_slice(glow::COLOR, draw_buffer, color) };
1005 }
1006 C::ClearDepth(depth) => {
1007 unsafe { gl.clear_buffer_f32_slice(glow::DEPTH, 0, &[depth]) };
1008 }
1009 C::ClearStencil(value) => {
1010 unsafe { gl.clear_buffer_i32_slice(glow::STENCIL, 0, &[value as i32]) };
1011 }
1012 C::ClearDepthAndStencil(depth, stencil_value) => {
1013 unsafe {
1014 gl.clear_buffer_depth_stencil(
1015 glow::DEPTH_STENCIL,
1016 0,
1017 depth,
1018 stencil_value as i32,
1019 )
1020 };
1021 }
1022 C::BufferBarrier(raw, usage) => {
1023 let mut flags = 0;
1024 if usage.contains(crate::BufferUses::VERTEX) {
1025 flags |= glow::VERTEX_ATTRIB_ARRAY_BARRIER_BIT;
1026 unsafe { gl.bind_buffer(glow::ARRAY_BUFFER, Some(raw)) };
1027 unsafe { gl.vertex_attrib_pointer_f32(0, 1, glow::BYTE, true, 0, 0) };
1028 }
1029 if usage.contains(crate::BufferUses::INDEX) {
1030 flags |= glow::ELEMENT_ARRAY_BARRIER_BIT;
1031 unsafe { gl.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, Some(raw)) };
1032 }
1033 if usage.contains(crate::BufferUses::UNIFORM) {
1034 flags |= glow::UNIFORM_BARRIER_BIT;
1035 }
1036 if usage.contains(crate::BufferUses::INDIRECT) {
1037 flags |= glow::COMMAND_BARRIER_BIT;
1038 unsafe { gl.bind_buffer(glow::DRAW_INDIRECT_BUFFER, Some(raw)) };
1039 }
1040 if usage.contains(crate::BufferUses::COPY_SRC) {
1041 flags |= glow::PIXEL_BUFFER_BARRIER_BIT;
1042 unsafe { gl.bind_buffer(glow::PIXEL_UNPACK_BUFFER, Some(raw)) };
1043 }
1044 if usage.contains(crate::BufferUses::COPY_DST) {
1045 flags |= glow::PIXEL_BUFFER_BARRIER_BIT;
1046 unsafe { gl.bind_buffer(glow::PIXEL_PACK_BUFFER, Some(raw)) };
1047 }
1048 if usage.intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE) {
1049 flags |= glow::BUFFER_UPDATE_BARRIER_BIT;
1050 }
1051 if usage.intersects(
1052 crate::BufferUses::STORAGE_READ | crate::BufferUses::STORAGE_READ_WRITE,
1053 ) {
1054 flags |= glow::SHADER_STORAGE_BARRIER_BIT;
1055 }
1056 unsafe { gl.memory_barrier(flags) };
1057 }
1058 C::TextureBarrier(usage) => {
1059 let mut flags = 0;
1060 if usage.contains(crate::TextureUses::RESOURCE) {
1061 flags |= glow::TEXTURE_FETCH_BARRIER_BIT;
1062 }
1063 if usage.intersects(
1064 crate::TextureUses::STORAGE_READ | crate::TextureUses::STORAGE_READ_WRITE,
1065 ) {
1066 flags |= glow::SHADER_IMAGE_ACCESS_BARRIER_BIT;
1067 }
1068 if usage.contains(crate::TextureUses::COPY_DST) {
1069 flags |= glow::TEXTURE_UPDATE_BARRIER_BIT;
1070 }
1071 if usage.intersects(
1072 crate::TextureUses::COLOR_TARGET
1073 | crate::TextureUses::DEPTH_STENCIL_READ
1074 | crate::TextureUses::DEPTH_STENCIL_WRITE,
1075 ) {
1076 flags |= glow::FRAMEBUFFER_BARRIER_BIT;
1077 }
1078 unsafe { gl.memory_barrier(flags) };
1079 }
1080 C::SetViewport {
1081 ref rect,
1082 ref depth,
1083 } => {
1084 unsafe { gl.viewport(rect.x, rect.y, rect.w, rect.h) };
1085 unsafe { gl.depth_range_f32(depth.start, depth.end) };
1086 }
1087 C::SetScissor(ref rect) => {
1088 unsafe { gl.scissor(rect.x, rect.y, rect.w, rect.h) };
1089 unsafe { gl.enable(glow::SCISSOR_TEST) };
1090 }
1091 C::SetStencilFunc {
1092 face,
1093 function,
1094 reference,
1095 read_mask,
1096 } => {
1097 unsafe { gl.stencil_func_separate(face, function, reference as i32, read_mask) };
1098 }
1099 C::SetStencilOps {
1100 face,
1101 write_mask,
1102 ref ops,
1103 } => {
1104 unsafe { gl.stencil_mask_separate(face, write_mask) };
1105 unsafe { gl.stencil_op_separate(face, ops.fail, ops.depth_fail, ops.pass) };
1106 }
1107 C::SetVertexAttribute {
1108 buffer,
1109 ref buffer_desc,
1110 attribute_desc: ref vat,
1111 } => {
1112 unsafe { gl.bind_buffer(glow::ARRAY_BUFFER, buffer) };
1113 unsafe { gl.enable_vertex_attrib_array(vat.location) };
1114
1115 if buffer.is_none() {
1116 match vat.format_desc.attrib_kind {
1117 super::VertexAttribKind::Float => unsafe {
1118 gl.vertex_attrib_format_f32(
1119 vat.location,
1120 vat.format_desc.element_count,
1121 vat.format_desc.element_format,
1122 true, vat.offset,
1124 )
1125 },
1126 super::VertexAttribKind::Integer => unsafe {
1127 gl.vertex_attrib_format_i32(
1128 vat.location,
1129 vat.format_desc.element_count,
1130 vat.format_desc.element_format,
1131 vat.offset,
1132 )
1133 },
1134 }
1135
1136 unsafe { gl.vertex_attrib_binding(vat.location, vat.buffer_index) };
1139 } else {
1140 match vat.format_desc.attrib_kind {
1141 super::VertexAttribKind::Float => unsafe {
1142 gl.vertex_attrib_pointer_f32(
1143 vat.location,
1144 vat.format_desc.element_count,
1145 vat.format_desc.element_format,
1146 true, buffer_desc.stride as i32,
1148 vat.offset as i32,
1149 )
1150 },
1151 super::VertexAttribKind::Integer => unsafe {
1152 gl.vertex_attrib_pointer_i32(
1153 vat.location,
1154 vat.format_desc.element_count,
1155 vat.format_desc.element_format,
1156 buffer_desc.stride as i32,
1157 vat.offset as i32,
1158 )
1159 },
1160 }
1161 unsafe { gl.vertex_attrib_divisor(vat.location, buffer_desc.step as u32) };
1162 }
1163 }
1164 C::UnsetVertexAttribute(location) => {
1165 unsafe { gl.disable_vertex_attrib_array(location) };
1166 }
1167 C::SetVertexBuffer {
1168 index,
1169 ref buffer,
1170 ref buffer_desc,
1171 } => {
1172 unsafe { gl.vertex_binding_divisor(index, buffer_desc.step as u32) };
1173 unsafe {
1174 gl.bind_vertex_buffer(
1175 index,
1176 Some(buffer.raw),
1177 buffer.offset as i32,
1178 buffer_desc.stride as i32,
1179 )
1180 };
1181 }
1182 C::SetDepth(ref depth) => {
1183 unsafe { gl.depth_func(depth.function) };
1184 unsafe { gl.depth_mask(depth.mask) };
1185 }
1186 C::SetDepthBias(bias) => {
1187 if bias.is_enabled() {
1188 unsafe { gl.enable(glow::POLYGON_OFFSET_FILL) };
1189 unsafe { gl.polygon_offset(bias.slope_scale, bias.constant as f32) };
1190 } else {
1191 unsafe { gl.disable(glow::POLYGON_OFFSET_FILL) };
1192 }
1193 }
1194 C::ConfigureDepthStencil(aspects) => {
1195 if aspects.contains(crate::FormatAspects::DEPTH) {
1196 unsafe { gl.enable(glow::DEPTH_TEST) };
1197 } else {
1198 unsafe { gl.disable(glow::DEPTH_TEST) };
1199 }
1200 if aspects.contains(crate::FormatAspects::STENCIL) {
1201 unsafe { gl.enable(glow::STENCIL_TEST) };
1202 } else {
1203 unsafe { gl.disable(glow::STENCIL_TEST) };
1204 }
1205 }
1206 C::SetAlphaToCoverage(enabled) => {
1207 if enabled {
1208 unsafe { gl.enable(glow::SAMPLE_ALPHA_TO_COVERAGE) };
1209 } else {
1210 unsafe { gl.disable(glow::SAMPLE_ALPHA_TO_COVERAGE) };
1211 }
1212 }
1213 C::SetProgram(program) => {
1214 unsafe { gl.use_program(Some(program)) };
1215 }
1216 C::SetPrimitive(ref state) => {
1217 unsafe { gl.front_face(state.front_face) };
1218 if state.cull_face != 0 {
1219 unsafe { gl.enable(glow::CULL_FACE) };
1220 unsafe { gl.cull_face(state.cull_face) };
1221 } else {
1222 unsafe { gl.disable(glow::CULL_FACE) };
1223 }
1224 if self.features.contains(wgt::Features::DEPTH_CLIP_CONTROL) {
1225 if state.unclipped_depth {
1227 unsafe { gl.enable(glow::DEPTH_CLAMP) };
1228 } else {
1229 unsafe { gl.disable(glow::DEPTH_CLAMP) };
1230 }
1231 }
1232 }
1233 C::SetBlendConstant(c) => {
1234 unsafe { gl.blend_color(c[0], c[1], c[2], c[3]) };
1235 }
1236 C::SetColorTarget {
1237 draw_buffer_index,
1238 desc: super::ColorTargetDesc { mask, ref blend },
1239 } => {
1240 use wgt::ColorWrites as Cw;
1241 if let Some(index) = draw_buffer_index {
1242 unsafe {
1243 gl.color_mask_draw_buffer(
1244 index,
1245 mask.contains(Cw::RED),
1246 mask.contains(Cw::GREEN),
1247 mask.contains(Cw::BLUE),
1248 mask.contains(Cw::ALPHA),
1249 )
1250 };
1251 if let Some(ref blend) = *blend {
1252 unsafe { gl.enable_draw_buffer(index, glow::BLEND) };
1253 if blend.color != blend.alpha {
1254 unsafe {
1255 gl.blend_equation_separate_draw_buffer(
1256 index,
1257 blend.color.equation,
1258 blend.alpha.equation,
1259 )
1260 };
1261 unsafe {
1262 gl.blend_func_separate_draw_buffer(
1263 index,
1264 blend.color.src,
1265 blend.color.dst,
1266 blend.alpha.src,
1267 blend.alpha.dst,
1268 )
1269 };
1270 } else {
1271 unsafe { gl.blend_equation_draw_buffer(index, blend.color.equation) };
1272 unsafe {
1273 gl.blend_func_draw_buffer(index, blend.color.src, blend.color.dst)
1274 };
1275 }
1276 } else if self
1277 .shared
1278 .private_caps
1279 .contains(super::PrivateCapabilities::CAN_DISABLE_DRAW_BUFFER)
1280 {
1281 unsafe { gl.disable_draw_buffer(index, glow::BLEND) };
1282 }
1283 } else {
1284 unsafe {
1285 gl.color_mask(
1286 mask.contains(Cw::RED),
1287 mask.contains(Cw::GREEN),
1288 mask.contains(Cw::BLUE),
1289 mask.contains(Cw::ALPHA),
1290 )
1291 };
1292 if let Some(ref blend) = *blend {
1293 unsafe { gl.enable(glow::BLEND) };
1294 if blend.color != blend.alpha {
1295 unsafe {
1296 gl.blend_equation_separate(
1297 blend.color.equation,
1298 blend.alpha.equation,
1299 )
1300 };
1301 unsafe {
1302 gl.blend_func_separate(
1303 blend.color.src,
1304 blend.color.dst,
1305 blend.alpha.src,
1306 blend.alpha.dst,
1307 )
1308 };
1309 } else {
1310 unsafe { gl.blend_equation(blend.color.equation) };
1311 unsafe { gl.blend_func(blend.color.src, blend.color.dst) };
1312 }
1313 } else {
1314 unsafe { gl.disable(glow::BLEND) };
1315 }
1316 }
1317 }
1318 C::BindBuffer {
1319 target,
1320 slot,
1321 buffer,
1322 offset,
1323 size,
1324 } => {
1325 unsafe { gl.bind_buffer_range(target, slot, Some(buffer), offset, size) };
1326 }
1327 C::BindSampler(texture_index, sampler) => {
1328 unsafe { gl.bind_sampler(texture_index, sampler) };
1329 }
1330 C::BindTexture {
1331 slot,
1332 texture,
1333 target,
1334 aspects,
1335 } => {
1336 unsafe { gl.active_texture(glow::TEXTURE0 + slot) };
1337 unsafe { gl.bind_texture(target, Some(texture)) };
1338
1339 let version = gl.version();
1340 let is_min_es_3_1 = version.is_embedded && (version.major, version.minor) >= (3, 1);
1341 let is_min_4_3 = !version.is_embedded && (version.major, version.minor) >= (4, 3);
1342 if is_min_es_3_1 || is_min_4_3 {
1343 let mode = match aspects {
1344 crate::FormatAspects::DEPTH => Some(glow::DEPTH_COMPONENT),
1345 crate::FormatAspects::STENCIL => Some(glow::STENCIL_INDEX),
1346 _ => None,
1347 };
1348 if let Some(mode) = mode {
1349 unsafe {
1350 gl.tex_parameter_i32(
1351 target,
1352 glow::DEPTH_STENCIL_TEXTURE_MODE,
1353 mode as _,
1354 )
1355 };
1356 }
1357 }
1358 }
1359 C::BindImage { slot, ref binding } => {
1360 unsafe {
1361 gl.bind_image_texture(
1362 slot,
1363 binding.raw,
1364 binding.mip_level as i32,
1365 binding.array_layer.is_none(),
1366 binding.array_layer.unwrap_or_default() as i32,
1367 binding.access,
1368 binding.format,
1369 )
1370 };
1371 }
1372 #[cfg(not(target_arch = "wasm32"))]
1373 C::InsertDebugMarker(ref range) => {
1374 let marker = extract_marker(data_bytes, range);
1375 unsafe {
1376 gl.debug_message_insert(
1377 glow::DEBUG_SOURCE_APPLICATION,
1378 glow::DEBUG_TYPE_MARKER,
1379 DEBUG_ID,
1380 glow::DEBUG_SEVERITY_NOTIFICATION,
1381 marker,
1382 )
1383 };
1384 }
1385 #[cfg(target_arch = "wasm32")]
1386 C::InsertDebugMarker(_) => (),
1387 #[cfg_attr(target_arch = "wasm32", allow(unused))]
1388 C::PushDebugGroup(ref range) => {
1389 #[cfg(not(target_arch = "wasm32"))]
1390 let marker = extract_marker(data_bytes, range);
1391 #[cfg(not(target_arch = "wasm32"))]
1392 unsafe {
1393 gl.push_debug_group(glow::DEBUG_SOURCE_APPLICATION, DEBUG_ID, marker)
1394 };
1395 }
1396 C::PopDebugGroup => {
1397 #[cfg(not(target_arch = "wasm32"))]
1398 unsafe {
1399 gl.pop_debug_group()
1400 };
1401 }
1402 C::SetPushConstants {
1403 ref uniform,
1404 offset,
1405 } => {
1406 fn get_data<T>(data: &[u8], offset: u32) -> &[T] {
1407 let raw = &data[(offset as usize)..];
1408 unsafe {
1409 slice::from_raw_parts(
1410 raw.as_ptr() as *const _,
1411 raw.len() / mem::size_of::<T>(),
1412 )
1413 }
1414 }
1415
1416 let location = uniform.location.as_ref();
1417
1418 match uniform.utype {
1419 glow::FLOAT => {
1420 let data = get_data::<f32>(data_bytes, offset)[0];
1421 unsafe { gl.uniform_1_f32(location, data) };
1422 }
1423 glow::FLOAT_VEC2 => {
1424 let data = get_data::<[f32; 2]>(data_bytes, offset)[0];
1425 unsafe { gl.uniform_2_f32_slice(location, &data) };
1426 }
1427 glow::FLOAT_VEC3 => {
1428 let data = get_data::<[f32; 3]>(data_bytes, offset)[0];
1429 unsafe { gl.uniform_3_f32_slice(location, &data) };
1430 }
1431 glow::FLOAT_VEC4 => {
1432 let data = get_data::<[f32; 4]>(data_bytes, offset)[0];
1433 unsafe { gl.uniform_4_f32_slice(location, &data) };
1434 }
1435 glow::INT => {
1436 let data = get_data::<i32>(data_bytes, offset)[0];
1437 unsafe { gl.uniform_1_i32(location, data) };
1438 }
1439 glow::INT_VEC2 => {
1440 let data = get_data::<[i32; 2]>(data_bytes, offset)[0];
1441 unsafe { gl.uniform_2_i32_slice(location, &data) };
1442 }
1443 glow::INT_VEC3 => {
1444 let data = get_data::<[i32; 3]>(data_bytes, offset)[0];
1445 unsafe { gl.uniform_3_i32_slice(location, &data) };
1446 }
1447 glow::INT_VEC4 => {
1448 let data = get_data::<[i32; 4]>(data_bytes, offset)[0];
1449 unsafe { gl.uniform_4_i32_slice(location, &data) };
1450 }
1451 glow::FLOAT_MAT2 => {
1452 let data = get_data::<[f32; 4]>(data_bytes, offset)[0];
1453 unsafe { gl.uniform_matrix_2_f32_slice(location, false, &data) };
1454 }
1455 glow::FLOAT_MAT3 => {
1456 let data = get_data::<[f32; 9]>(data_bytes, offset)[0];
1457 unsafe { gl.uniform_matrix_3_f32_slice(location, false, &data) };
1458 }
1459 glow::FLOAT_MAT4 => {
1460 let data = get_data::<[f32; 16]>(data_bytes, offset)[0];
1461 unsafe { gl.uniform_matrix_4_f32_slice(location, false, &data) };
1462 }
1463 _ => panic!("Unsupported uniform datatype!"),
1464 }
1465 }
1466 }
1467 }
1468}
1469
1470impl crate::Queue<super::Api> for super::Queue {
1471 unsafe fn submit(
1472 &mut self,
1473 command_buffers: &[&super::CommandBuffer],
1474 signal_fence: Option<(&mut super::Fence, crate::FenceValue)>,
1475 ) -> Result<(), crate::DeviceError> {
1476 let shared = Arc::clone(&self.shared);
1477 let gl = &shared.context.lock();
1478 for cmd_buf in command_buffers.iter() {
1479 unsafe { self.reset_state(gl) };
1484 #[cfg(not(target_arch = "wasm32"))]
1485 if let Some(ref label) = cmd_buf.label {
1486 unsafe { gl.push_debug_group(glow::DEBUG_SOURCE_APPLICATION, DEBUG_ID, label) };
1487 }
1488
1489 for command in cmd_buf.commands.iter() {
1490 unsafe { self.process(gl, command, &cmd_buf.data_bytes, &cmd_buf.queries) };
1491 }
1492
1493 #[cfg(not(target_arch = "wasm32"))]
1494 if cmd_buf.label.is_some() {
1495 unsafe { gl.pop_debug_group() };
1496 }
1497 }
1498
1499 if let Some((fence, value)) = signal_fence {
1500 fence.maintain(gl);
1501 let sync = unsafe { gl.fence_sync(glow::SYNC_GPU_COMMANDS_COMPLETE, 0) }
1502 .map_err(|_| crate::DeviceError::OutOfMemory)?;
1503 fence.pending.push((value, sync));
1504 }
1505
1506 Ok(())
1507 }
1508
1509 unsafe fn present(
1510 &mut self,
1511 surface: &mut super::Surface,
1512 texture: super::Texture,
1513 ) -> Result<(), crate::SurfaceError> {
1514 #[cfg(any(not(target_arch = "wasm32"), target_os = "emscripten"))]
1515 let gl = unsafe { &self.shared.context.get_without_egl_lock() };
1516
1517 #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
1518 let gl = &self.shared.context.glow_context;
1519
1520 unsafe { surface.present(texture, gl) }
1521 }
1522
1523 unsafe fn get_timestamp_period(&self) -> f32 {
1524 1.0
1525 }
1526}
1527
1528#[cfg(all(
1529 target_arch = "wasm32",
1530 feature = "fragile-send-sync-non-atomic-wasm",
1531 not(target_feature = "atomics")
1532))]
1533unsafe impl Sync for super::Queue {}
1534#[cfg(all(
1535 target_arch = "wasm32",
1536 feature = "fragile-send-sync-non-atomic-wasm",
1537 not(target_feature = "atomics")
1538))]
1539unsafe impl Send for super::Queue {}