1#![allow(clippy::trivially_copy_pass_by_ref)]
2use crate::prelude::*;
3use crate::vk;
4use crate::RawPtr;
5use std::mem;
6use std::os::raw::c_void;
7use std::ptr;
8
9#[derive(Clone)]
11pub struct Device {
12 pub(crate) handle: vk::Device,
13
14 pub(crate) device_fn_1_0: vk::DeviceFnV1_0,
15 pub(crate) device_fn_1_1: vk::DeviceFnV1_1,
16 pub(crate) device_fn_1_2: vk::DeviceFnV1_2,
17 pub(crate) device_fn_1_3: vk::DeviceFnV1_3,
18}
19
20impl Device {
21 pub unsafe fn load(instance_fn: &vk::InstanceFnV1_0, device: vk::Device) -> Self {
22 let load_fn = |name: &std::ffi::CStr| {
23 mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr()))
24 };
25
26 Self {
27 handle: device,
28
29 device_fn_1_0: vk::DeviceFnV1_0::load(load_fn),
30 device_fn_1_1: vk::DeviceFnV1_1::load(load_fn),
31 device_fn_1_2: vk::DeviceFnV1_2::load(load_fn),
32 device_fn_1_3: vk::DeviceFnV1_3::load(load_fn),
33 }
34 }
35
36 pub fn handle(&self) -> vk::Device {
37 self.handle
38 }
39}
40
41#[allow(non_camel_case_types)]
43impl Device {
44 pub fn fp_v1_3(&self) -> &vk::DeviceFnV1_3 {
45 &self.device_fn_1_3
46 }
47
48 pub unsafe fn create_private_data_slot(
50 &self,
51 create_info: &vk::PrivateDataSlotCreateInfo,
52 allocation_callbacks: Option<&vk::AllocationCallbacks>,
53 ) -> VkResult<vk::PrivateDataSlot> {
54 let mut private_data_slot = mem::zeroed();
55 (self.device_fn_1_3.create_private_data_slot)(
56 self.handle,
57 create_info,
58 allocation_callbacks.as_raw_ptr(),
59 &mut private_data_slot,
60 )
61 .result_with_success(private_data_slot)
62 }
63
64 pub unsafe fn destroy_private_data_slot(
66 &self,
67 private_data_slot: vk::PrivateDataSlot,
68 allocation_callbacks: Option<&vk::AllocationCallbacks>,
69 ) {
70 (self.device_fn_1_3.destroy_private_data_slot)(
71 self.handle,
72 private_data_slot,
73 allocation_callbacks.as_raw_ptr(),
74 )
75 }
76
77 pub unsafe fn set_private_data<T: vk::Handle>(
79 &self,
80 object: T,
81 private_data_slot: vk::PrivateDataSlot,
82 data: u64,
83 ) -> VkResult<()> {
84 (self.device_fn_1_3.set_private_data)(
85 self.handle,
86 T::TYPE,
87 object.as_raw(),
88 private_data_slot,
89 data,
90 )
91 .result()
92 }
93
94 pub unsafe fn get_private_data<T: vk::Handle>(
96 &self,
97 object: T,
98 private_data_slot: vk::PrivateDataSlot,
99 ) -> u64 {
100 let mut data = mem::zeroed();
101 (self.device_fn_1_3.get_private_data)(
102 self.handle,
103 T::TYPE,
104 object.as_raw(),
105 private_data_slot,
106 &mut data,
107 );
108 data
109 }
110
111 pub unsafe fn cmd_pipeline_barrier2(
113 &self,
114 command_buffer: vk::CommandBuffer,
115 dependency_info: &vk::DependencyInfo,
116 ) {
117 (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info)
118 }
119
120 pub unsafe fn cmd_reset_event2(
122 &self,
123 command_buffer: vk::CommandBuffer,
124 event: vk::Event,
125 stage_mask: vk::PipelineStageFlags2,
126 ) {
127 (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask)
128 }
129
130 pub unsafe fn cmd_set_event2(
132 &self,
133 command_buffer: vk::CommandBuffer,
134 event: vk::Event,
135 dependency_info: &vk::DependencyInfo,
136 ) {
137 (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info)
138 }
139
140 pub unsafe fn cmd_wait_events2(
142 &self,
143 command_buffer: vk::CommandBuffer,
144 events: &[vk::Event],
145 dependency_infos: &[vk::DependencyInfo],
146 ) {
147 assert_eq!(events.len(), dependency_infos.len());
148 (self.device_fn_1_3.cmd_wait_events2)(
149 command_buffer,
150 events.len() as u32,
151 events.as_ptr(),
152 dependency_infos.as_ptr(),
153 )
154 }
155
156 pub unsafe fn cmd_write_timestamp2(
158 &self,
159 command_buffer: vk::CommandBuffer,
160 stage: vk::PipelineStageFlags2,
161 query_pool: vk::QueryPool,
162 query: u32,
163 ) {
164 (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
165 }
166
167 pub unsafe fn queue_submit2(
169 &self,
170 queue: vk::Queue,
171 submits: &[vk::SubmitInfo2],
172 fence: vk::Fence,
173 ) -> VkResult<()> {
174 (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
175 .result()
176 }
177
178 pub unsafe fn cmd_copy_buffer2(
180 &self,
181 command_buffer: vk::CommandBuffer,
182 copy_buffer_info: &vk::CopyBufferInfo2,
183 ) {
184 (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info)
185 }
186 pub unsafe fn cmd_copy_image2(
188 &self,
189 command_buffer: vk::CommandBuffer,
190 copy_image_info: &vk::CopyImageInfo2,
191 ) {
192 (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info)
193 }
194 pub unsafe fn cmd_copy_buffer_to_image2(
196 &self,
197 command_buffer: vk::CommandBuffer,
198 copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2,
199 ) {
200 (self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info)
201 }
202 pub unsafe fn cmd_copy_image_to_buffer2(
204 &self,
205 command_buffer: vk::CommandBuffer,
206 copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2,
207 ) {
208 (self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info)
209 }
210 pub unsafe fn cmd_blit_image2(
212 &self,
213 command_buffer: vk::CommandBuffer,
214 blit_image_info: &vk::BlitImageInfo2,
215 ) {
216 (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info)
217 }
218 pub unsafe fn cmd_resolve_image2(
220 &self,
221 command_buffer: vk::CommandBuffer,
222 resolve_image_info: &vk::ResolveImageInfo2,
223 ) {
224 (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info)
225 }
226
227 pub unsafe fn cmd_begin_rendering(
229 &self,
230 command_buffer: vk::CommandBuffer,
231 rendering_info: &vk::RenderingInfo,
232 ) {
233 (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info)
234 }
235
236 pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
238 (self.device_fn_1_3.cmd_end_rendering)(command_buffer)
239 }
240
241 pub unsafe fn cmd_set_cull_mode(
243 &self,
244 command_buffer: vk::CommandBuffer,
245 cull_mode: vk::CullModeFlags,
246 ) {
247 (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode)
248 }
249
250 pub unsafe fn cmd_set_front_face(
252 &self,
253 command_buffer: vk::CommandBuffer,
254 front_face: vk::FrontFace,
255 ) {
256 (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face)
257 }
258
259 pub unsafe fn cmd_set_primitive_topology(
261 &self,
262 command_buffer: vk::CommandBuffer,
263 primitive_topology: vk::PrimitiveTopology,
264 ) {
265 (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
266 }
267
268 pub unsafe fn cmd_set_viewport_with_count(
270 &self,
271 command_buffer: vk::CommandBuffer,
272 viewports: &[vk::Viewport],
273 ) {
274 (self.device_fn_1_3.cmd_set_viewport_with_count)(
275 command_buffer,
276 viewports.len() as u32,
277 viewports.as_ptr(),
278 )
279 }
280
281 pub unsafe fn cmd_set_scissor_with_count(
283 &self,
284 command_buffer: vk::CommandBuffer,
285 scissors: &[vk::Rect2D],
286 ) {
287 (self.device_fn_1_3.cmd_set_scissor_with_count)(
288 command_buffer,
289 scissors.len() as u32,
290 scissors.as_ptr(),
291 )
292 }
293
294 pub unsafe fn cmd_bind_vertex_buffers2(
296 &self,
297 command_buffer: vk::CommandBuffer,
298 first_binding: u32,
299 buffers: &[vk::Buffer],
300 offsets: &[vk::DeviceSize],
301 sizes: Option<&[vk::DeviceSize]>,
302 strides: Option<&[vk::DeviceSize]>,
303 ) {
304 assert_eq!(offsets.len(), buffers.len());
305 let p_sizes = if let Some(sizes) = sizes {
306 assert_eq!(sizes.len(), buffers.len());
307 sizes.as_ptr()
308 } else {
309 ptr::null()
310 };
311 let p_strides = if let Some(strides) = strides {
312 assert_eq!(strides.len(), buffers.len());
313 strides.as_ptr()
314 } else {
315 ptr::null()
316 };
317 (self.device_fn_1_3.cmd_bind_vertex_buffers2)(
318 command_buffer,
319 first_binding,
320 buffers.len() as u32,
321 buffers.as_ptr(),
322 offsets.as_ptr(),
323 p_sizes,
324 p_strides,
325 )
326 }
327
328 pub unsafe fn cmd_set_depth_test_enable(
330 &self,
331 command_buffer: vk::CommandBuffer,
332 depth_test_enable: bool,
333 ) {
334 (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
335 }
336
337 pub unsafe fn cmd_set_depth_write_enable(
339 &self,
340 command_buffer: vk::CommandBuffer,
341 depth_write_enable: bool,
342 ) {
343 (self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into())
344 }
345
346 pub unsafe fn cmd_set_depth_compare_op(
348 &self,
349 command_buffer: vk::CommandBuffer,
350 depth_compare_op: vk::CompareOp,
351 ) {
352 (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op)
353 }
354
355 pub unsafe fn cmd_set_depth_bounds_test_enable(
357 &self,
358 command_buffer: vk::CommandBuffer,
359 depth_bounds_test_enable: bool,
360 ) {
361 (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
362 command_buffer,
363 depth_bounds_test_enable.into(),
364 )
365 }
366
367 pub unsafe fn cmd_set_stencil_test_enable(
369 &self,
370 command_buffer: vk::CommandBuffer,
371 stencil_test_enable: bool,
372 ) {
373 (self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into())
374 }
375
376 pub unsafe fn cmd_set_stencil_op(
378 &self,
379 command_buffer: vk::CommandBuffer,
380 face_mask: vk::StencilFaceFlags,
381 fail_op: vk::StencilOp,
382 pass_op: vk::StencilOp,
383 depth_fail_op: vk::StencilOp,
384 compare_op: vk::CompareOp,
385 ) {
386 (self.device_fn_1_3.cmd_set_stencil_op)(
387 command_buffer,
388 face_mask,
389 fail_op,
390 pass_op,
391 depth_fail_op,
392 compare_op,
393 )
394 }
395
396 pub unsafe fn cmd_set_rasterizer_discard_enable(
398 &self,
399 command_buffer: vk::CommandBuffer,
400 rasterizer_discard_enable: bool,
401 ) {
402 (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
403 command_buffer,
404 rasterizer_discard_enable.into(),
405 )
406 }
407
408 pub unsafe fn cmd_set_depth_bias_enable(
410 &self,
411 command_buffer: vk::CommandBuffer,
412 depth_bias_enable: bool,
413 ) {
414 (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
415 }
416
417 pub unsafe fn cmd_set_primitive_restart_enable(
419 &self,
420 command_buffer: vk::CommandBuffer,
421 primitive_restart_enable: bool,
422 ) {
423 (self.device_fn_1_3.cmd_set_primitive_restart_enable)(
424 command_buffer,
425 primitive_restart_enable.into(),
426 )
427 }
428
429 pub unsafe fn get_device_buffer_memory_requirements(
431 &self,
432 create_info: &vk::DeviceBufferMemoryRequirements,
433 out: &mut vk::MemoryRequirements2,
434 ) {
435 (self.device_fn_1_3.get_device_buffer_memory_requirements)(self.handle, create_info, out)
436 }
437
438 pub unsafe fn get_device_image_memory_requirements(
440 &self,
441 create_info: &vk::DeviceImageMemoryRequirements,
442 out: &mut vk::MemoryRequirements2,
443 ) {
444 (self.device_fn_1_3.get_device_image_memory_requirements)(self.handle, create_info, out)
445 }
446
447 pub unsafe fn get_device_image_sparse_memory_requirements_len(
449 &self,
450 create_info: &vk::DeviceImageMemoryRequirements,
451 ) -> usize {
452 let mut count = 0;
453 (self
454 .device_fn_1_3
455 .get_device_image_sparse_memory_requirements)(
456 self.handle,
457 create_info,
458 &mut count,
459 std::ptr::null_mut(),
460 );
461 count as usize
462 }
463
464 pub unsafe fn get_device_image_sparse_memory_requirements(
469 &self,
470 create_info: &vk::DeviceImageMemoryRequirements,
471 out: &mut [vk::SparseImageMemoryRequirements2],
472 ) {
473 let mut count = out.len() as u32;
474 (self
475 .device_fn_1_3
476 .get_device_image_sparse_memory_requirements)(
477 self.handle,
478 create_info,
479 &mut count,
480 out.as_mut_ptr(),
481 );
482 assert_eq!(count as usize, out.len());
483 }
484}
485
486#[allow(non_camel_case_types)]
488impl Device {
489 pub fn fp_v1_2(&self) -> &vk::DeviceFnV1_2 {
490 &self.device_fn_1_2
491 }
492
493 pub unsafe fn cmd_draw_indirect_count(
495 &self,
496 command_buffer: vk::CommandBuffer,
497 buffer: vk::Buffer,
498 offset: vk::DeviceSize,
499 count_buffer: vk::Buffer,
500 count_buffer_offset: vk::DeviceSize,
501 max_draw_count: u32,
502 stride: u32,
503 ) {
504 (self.device_fn_1_2.cmd_draw_indirect_count)(
505 command_buffer,
506 buffer,
507 offset,
508 count_buffer,
509 count_buffer_offset,
510 max_draw_count,
511 stride,
512 );
513 }
514
515 pub unsafe fn cmd_draw_indexed_indirect_count(
517 &self,
518 command_buffer: vk::CommandBuffer,
519 buffer: vk::Buffer,
520 offset: vk::DeviceSize,
521 count_buffer: vk::Buffer,
522 count_buffer_offset: vk::DeviceSize,
523 max_draw_count: u32,
524 stride: u32,
525 ) {
526 (self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
527 command_buffer,
528 buffer,
529 offset,
530 count_buffer,
531 count_buffer_offset,
532 max_draw_count,
533 stride,
534 );
535 }
536
537 pub unsafe fn create_render_pass2(
539 &self,
540 create_info: &vk::RenderPassCreateInfo2,
541 allocation_callbacks: Option<&vk::AllocationCallbacks>,
542 ) -> VkResult<vk::RenderPass> {
543 let mut renderpass = mem::zeroed();
544 (self.device_fn_1_2.create_render_pass2)(
545 self.handle(),
546 create_info,
547 allocation_callbacks.as_raw_ptr(),
548 &mut renderpass,
549 )
550 .result_with_success(renderpass)
551 }
552
553 pub unsafe fn cmd_begin_render_pass2(
555 &self,
556 command_buffer: vk::CommandBuffer,
557 render_pass_begin_info: &vk::RenderPassBeginInfo,
558 subpass_begin_info: &vk::SubpassBeginInfo,
559 ) {
560 (self.device_fn_1_2.cmd_begin_render_pass2)(
561 command_buffer,
562 render_pass_begin_info,
563 subpass_begin_info,
564 );
565 }
566
567 pub unsafe fn cmd_next_subpass2(
569 &self,
570 command_buffer: vk::CommandBuffer,
571 subpass_begin_info: &vk::SubpassBeginInfo,
572 subpass_end_info: &vk::SubpassEndInfo,
573 ) {
574 (self.device_fn_1_2.cmd_next_subpass2)(
575 command_buffer,
576 subpass_begin_info,
577 subpass_end_info,
578 );
579 }
580
581 pub unsafe fn cmd_end_render_pass2(
583 &self,
584 command_buffer: vk::CommandBuffer,
585 subpass_end_info: &vk::SubpassEndInfo,
586 ) {
587 (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
588 }
589
590 pub unsafe fn reset_query_pool(
592 &self,
593 query_pool: vk::QueryPool,
594 first_query: u32,
595 query_count: u32,
596 ) {
597 (self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count);
598 }
599
600 pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
602 let mut value = 0;
603 (self.device_fn_1_2.get_semaphore_counter_value)(self.handle(), semaphore, &mut value)
604 .result_with_success(value)
605 }
606
607 pub unsafe fn wait_semaphores(
609 &self,
610 wait_info: &vk::SemaphoreWaitInfo,
611 timeout: u64,
612 ) -> VkResult<()> {
613 (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result()
614 }
615
616 pub unsafe fn signal_semaphore(&self, signal_info: &vk::SemaphoreSignalInfo) -> VkResult<()> {
618 (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result()
619 }
620
621 pub unsafe fn get_buffer_device_address(
623 &self,
624 info: &vk::BufferDeviceAddressInfo,
625 ) -> vk::DeviceAddress {
626 (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info)
627 }
628
629 pub unsafe fn get_buffer_opaque_capture_address(
631 &self,
632 info: &vk::BufferDeviceAddressInfo,
633 ) -> u64 {
634 (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info)
635 }
636
637 pub unsafe fn get_device_memory_opaque_capture_address(
639 &self,
640 info: &vk::DeviceMemoryOpaqueCaptureAddressInfo,
641 ) -> u64 {
642 (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
643 }
644}
645
646#[allow(non_camel_case_types)]
648impl Device {
649 pub fn fp_v1_1(&self) -> &vk::DeviceFnV1_1 {
650 &self.device_fn_1_1
651 }
652
653 pub unsafe fn bind_buffer_memory2(
655 &self,
656 bind_infos: &[vk::BindBufferMemoryInfo],
657 ) -> VkResult<()> {
658 (self.device_fn_1_1.bind_buffer_memory2)(
659 self.handle(),
660 bind_infos.len() as _,
661 bind_infos.as_ptr(),
662 )
663 .result()
664 }
665
666 pub unsafe fn bind_image_memory2(
668 &self,
669 bind_infos: &[vk::BindImageMemoryInfo],
670 ) -> VkResult<()> {
671 (self.device_fn_1_1.bind_image_memory2)(
672 self.handle(),
673 bind_infos.len() as _,
674 bind_infos.as_ptr(),
675 )
676 .result()
677 }
678
679 pub unsafe fn get_device_group_peer_memory_features(
681 &self,
682 heap_index: u32,
683 local_device_index: u32,
684 remote_device_index: u32,
685 ) -> vk::PeerMemoryFeatureFlags {
686 let mut peer_memory_features = mem::zeroed();
687 (self.device_fn_1_1.get_device_group_peer_memory_features)(
688 self.handle(),
689 heap_index,
690 local_device_index,
691 remote_device_index,
692 &mut peer_memory_features,
693 );
694 peer_memory_features
695 }
696
697 pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
699 (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
700 }
701
702 pub unsafe fn cmd_dispatch_base(
704 &self,
705 command_buffer: vk::CommandBuffer,
706 base_group_x: u32,
707 base_group_y: u32,
708 base_group_z: u32,
709 group_count_x: u32,
710 group_count_y: u32,
711 group_count_z: u32,
712 ) {
713 (self.device_fn_1_1.cmd_dispatch_base)(
714 command_buffer,
715 base_group_x,
716 base_group_y,
717 base_group_z,
718 group_count_x,
719 group_count_y,
720 group_count_z,
721 );
722 }
723
724 pub unsafe fn get_image_memory_requirements2(
726 &self,
727 info: &vk::ImageMemoryRequirementsInfo2,
728 out: &mut vk::MemoryRequirements2,
729 ) {
730 (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
731 }
732
733 pub unsafe fn get_buffer_memory_requirements2(
735 &self,
736 info: &vk::BufferMemoryRequirementsInfo2,
737 out: &mut vk::MemoryRequirements2,
738 ) {
739 (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
740 }
741
742 pub unsafe fn get_image_sparse_memory_requirements2_len(
744 &self,
745 info: &vk::ImageSparseMemoryRequirementsInfo2,
746 ) -> usize {
747 let mut count = 0;
748 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
749 self.handle(),
750 info,
751 &mut count,
752 ptr::null_mut(),
753 );
754 count as usize
755 }
756
757 pub unsafe fn get_image_sparse_memory_requirements2(
762 &self,
763 info: &vk::ImageSparseMemoryRequirementsInfo2,
764 out: &mut [vk::SparseImageMemoryRequirements2],
765 ) {
766 let mut count = out.len() as u32;
767 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
768 self.handle(),
769 info,
770 &mut count,
771 out.as_mut_ptr(),
772 );
773 assert_eq!(count as usize, out.len());
774 }
775
776 pub unsafe fn trim_command_pool(
778 &self,
779 command_pool: vk::CommandPool,
780 flags: vk::CommandPoolTrimFlags,
781 ) {
782 (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
783 }
784
785 pub unsafe fn create_sampler_ycbcr_conversion(
787 &self,
788 create_info: &vk::SamplerYcbcrConversionCreateInfo,
789 allocation_callbacks: Option<&vk::AllocationCallbacks>,
790 ) -> VkResult<vk::SamplerYcbcrConversion> {
791 let mut ycbcr_conversion = mem::zeroed();
792 (self.device_fn_1_1.create_sampler_ycbcr_conversion)(
793 self.handle(),
794 create_info,
795 allocation_callbacks.as_raw_ptr(),
796 &mut ycbcr_conversion,
797 )
798 .result_with_success(ycbcr_conversion)
799 }
800
801 pub unsafe fn destroy_sampler_ycbcr_conversion(
803 &self,
804 ycbcr_conversion: vk::SamplerYcbcrConversion,
805 allocation_callbacks: Option<&vk::AllocationCallbacks>,
806 ) {
807 (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
808 self.handle(),
809 ycbcr_conversion,
810 allocation_callbacks.as_raw_ptr(),
811 );
812 }
813
814 pub unsafe fn create_descriptor_update_template(
816 &self,
817 create_info: &vk::DescriptorUpdateTemplateCreateInfo,
818 allocation_callbacks: Option<&vk::AllocationCallbacks>,
819 ) -> VkResult<vk::DescriptorUpdateTemplate> {
820 let mut descriptor_update_template = mem::zeroed();
821 (self.device_fn_1_1.create_descriptor_update_template)(
822 self.handle(),
823 create_info,
824 allocation_callbacks.as_raw_ptr(),
825 &mut descriptor_update_template,
826 )
827 .result_with_success(descriptor_update_template)
828 }
829
830 pub unsafe fn destroy_descriptor_update_template(
832 &self,
833 descriptor_update_template: vk::DescriptorUpdateTemplate,
834 allocation_callbacks: Option<&vk::AllocationCallbacks>,
835 ) {
836 (self.device_fn_1_1.destroy_descriptor_update_template)(
837 self.handle(),
838 descriptor_update_template,
839 allocation_callbacks.as_raw_ptr(),
840 );
841 }
842
843 pub unsafe fn update_descriptor_set_with_template(
845 &self,
846 descriptor_set: vk::DescriptorSet,
847 descriptor_update_template: vk::DescriptorUpdateTemplate,
848 data: *const c_void,
849 ) {
850 (self.device_fn_1_1.update_descriptor_set_with_template)(
851 self.handle(),
852 descriptor_set,
853 descriptor_update_template,
854 data,
855 );
856 }
857
858 pub unsafe fn get_descriptor_set_layout_support(
860 &self,
861 create_info: &vk::DescriptorSetLayoutCreateInfo,
862 out: &mut vk::DescriptorSetLayoutSupport,
863 ) {
864 (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
865 }
866}
867
868#[allow(non_camel_case_types)]
870impl Device {
871 pub fn fp_v1_0(&self) -> &vk::DeviceFnV1_0 {
872 &self.device_fn_1_0
873 }
874
875 pub unsafe fn destroy_device(&self, allocation_callbacks: Option<&vk::AllocationCallbacks>) {
877 (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
878 }
879
880 pub unsafe fn destroy_sampler(
882 &self,
883 sampler: vk::Sampler,
884 allocation_callbacks: Option<&vk::AllocationCallbacks>,
885 ) {
886 (self.device_fn_1_0.destroy_sampler)(
887 self.handle(),
888 sampler,
889 allocation_callbacks.as_raw_ptr(),
890 );
891 }
892
893 pub unsafe fn free_memory(
895 &self,
896 memory: vk::DeviceMemory,
897 allocation_callbacks: Option<&vk::AllocationCallbacks>,
898 ) {
899 (self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr());
900 }
901
902 pub unsafe fn free_command_buffers(
904 &self,
905 command_pool: vk::CommandPool,
906 command_buffers: &[vk::CommandBuffer],
907 ) {
908 (self.device_fn_1_0.free_command_buffers)(
909 self.handle(),
910 command_pool,
911 command_buffers.len() as u32,
912 command_buffers.as_ptr(),
913 );
914 }
915
916 pub unsafe fn create_event(
918 &self,
919 create_info: &vk::EventCreateInfo,
920 allocation_callbacks: Option<&vk::AllocationCallbacks>,
921 ) -> VkResult<vk::Event> {
922 let mut event = mem::zeroed();
923 (self.device_fn_1_0.create_event)(
924 self.handle(),
925 create_info,
926 allocation_callbacks.as_raw_ptr(),
927 &mut event,
928 )
929 .result_with_success(event)
930 }
931
932 pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
936 let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
937 match err_code {
938 vk::Result::EVENT_SET => Ok(true),
939 vk::Result::EVENT_RESET => Ok(false),
940 _ => Err(err_code),
941 }
942 }
943
944 pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
946 (self.device_fn_1_0.set_event)(self.handle(), event).result()
947 }
948
949 pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
951 (self.device_fn_1_0.reset_event)(self.handle(), event).result()
952 }
953 pub unsafe fn cmd_set_event(
955 &self,
956 command_buffer: vk::CommandBuffer,
957 event: vk::Event,
958 stage_mask: vk::PipelineStageFlags,
959 ) {
960 (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
961 }
962 pub unsafe fn cmd_reset_event(
964 &self,
965 command_buffer: vk::CommandBuffer,
966 event: vk::Event,
967 stage_mask: vk::PipelineStageFlags,
968 ) {
969 (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
970 }
971
972 pub unsafe fn cmd_wait_events(
974 &self,
975 command_buffer: vk::CommandBuffer,
976 events: &[vk::Event],
977 src_stage_mask: vk::PipelineStageFlags,
978 dst_stage_mask: vk::PipelineStageFlags,
979 memory_barriers: &[vk::MemoryBarrier],
980 buffer_memory_barriers: &[vk::BufferMemoryBarrier],
981 image_memory_barriers: &[vk::ImageMemoryBarrier],
982 ) {
983 (self.device_fn_1_0.cmd_wait_events)(
984 command_buffer,
985 events.len() as _,
986 events.as_ptr(),
987 src_stage_mask,
988 dst_stage_mask,
989 memory_barriers.len() as _,
990 memory_barriers.as_ptr(),
991 buffer_memory_barriers.len() as _,
992 buffer_memory_barriers.as_ptr(),
993 image_memory_barriers.len() as _,
994 image_memory_barriers.as_ptr(),
995 );
996 }
997
998 pub unsafe fn destroy_fence(
1000 &self,
1001 fence: vk::Fence,
1002 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1003 ) {
1004 (self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr());
1005 }
1006
1007 pub unsafe fn destroy_event(
1009 &self,
1010 event: vk::Event,
1011 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1012 ) {
1013 (self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr());
1014 }
1015
1016 pub unsafe fn destroy_image(
1018 &self,
1019 image: vk::Image,
1020 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1021 ) {
1022 (self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr());
1023 }
1024
1025 pub unsafe fn destroy_command_pool(
1027 &self,
1028 pool: vk::CommandPool,
1029 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1030 ) {
1031 (self.device_fn_1_0.destroy_command_pool)(
1032 self.handle(),
1033 pool,
1034 allocation_callbacks.as_raw_ptr(),
1035 );
1036 }
1037
1038 pub unsafe fn destroy_image_view(
1040 &self,
1041 image_view: vk::ImageView,
1042 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1043 ) {
1044 (self.device_fn_1_0.destroy_image_view)(
1045 self.handle(),
1046 image_view,
1047 allocation_callbacks.as_raw_ptr(),
1048 );
1049 }
1050
1051 pub unsafe fn destroy_render_pass(
1053 &self,
1054 renderpass: vk::RenderPass,
1055 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1056 ) {
1057 (self.device_fn_1_0.destroy_render_pass)(
1058 self.handle(),
1059 renderpass,
1060 allocation_callbacks.as_raw_ptr(),
1061 );
1062 }
1063
1064 pub unsafe fn destroy_framebuffer(
1066 &self,
1067 framebuffer: vk::Framebuffer,
1068 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1069 ) {
1070 (self.device_fn_1_0.destroy_framebuffer)(
1071 self.handle(),
1072 framebuffer,
1073 allocation_callbacks.as_raw_ptr(),
1074 );
1075 }
1076
1077 pub unsafe fn destroy_pipeline_layout(
1079 &self,
1080 pipeline_layout: vk::PipelineLayout,
1081 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1082 ) {
1083 (self.device_fn_1_0.destroy_pipeline_layout)(
1084 self.handle(),
1085 pipeline_layout,
1086 allocation_callbacks.as_raw_ptr(),
1087 );
1088 }
1089
1090 pub unsafe fn destroy_pipeline_cache(
1092 &self,
1093 pipeline_cache: vk::PipelineCache,
1094 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1095 ) {
1096 (self.device_fn_1_0.destroy_pipeline_cache)(
1097 self.handle(),
1098 pipeline_cache,
1099 allocation_callbacks.as_raw_ptr(),
1100 );
1101 }
1102
1103 pub unsafe fn destroy_buffer(
1105 &self,
1106 buffer: vk::Buffer,
1107 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1108 ) {
1109 (self.device_fn_1_0.destroy_buffer)(
1110 self.handle(),
1111 buffer,
1112 allocation_callbacks.as_raw_ptr(),
1113 );
1114 }
1115
1116 pub unsafe fn destroy_shader_module(
1118 &self,
1119 shader: vk::ShaderModule,
1120 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1121 ) {
1122 (self.device_fn_1_0.destroy_shader_module)(
1123 self.handle(),
1124 shader,
1125 allocation_callbacks.as_raw_ptr(),
1126 );
1127 }
1128
1129 pub unsafe fn destroy_pipeline(
1131 &self,
1132 pipeline: vk::Pipeline,
1133 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1134 ) {
1135 (self.device_fn_1_0.destroy_pipeline)(
1136 self.handle(),
1137 pipeline,
1138 allocation_callbacks.as_raw_ptr(),
1139 );
1140 }
1141
1142 pub unsafe fn destroy_semaphore(
1144 &self,
1145 semaphore: vk::Semaphore,
1146 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1147 ) {
1148 (self.device_fn_1_0.destroy_semaphore)(
1149 self.handle(),
1150 semaphore,
1151 allocation_callbacks.as_raw_ptr(),
1152 );
1153 }
1154
1155 pub unsafe fn destroy_descriptor_pool(
1157 &self,
1158 pool: vk::DescriptorPool,
1159 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1160 ) {
1161 (self.device_fn_1_0.destroy_descriptor_pool)(
1162 self.handle(),
1163 pool,
1164 allocation_callbacks.as_raw_ptr(),
1165 );
1166 }
1167
1168 pub unsafe fn destroy_query_pool(
1170 &self,
1171 pool: vk::QueryPool,
1172 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1173 ) {
1174 (self.device_fn_1_0.destroy_query_pool)(
1175 self.handle(),
1176 pool,
1177 allocation_callbacks.as_raw_ptr(),
1178 );
1179 }
1180
1181 pub unsafe fn destroy_descriptor_set_layout(
1183 &self,
1184 layout: vk::DescriptorSetLayout,
1185 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1186 ) {
1187 (self.device_fn_1_0.destroy_descriptor_set_layout)(
1188 self.handle(),
1189 layout,
1190 allocation_callbacks.as_raw_ptr(),
1191 );
1192 }
1193
1194 pub unsafe fn free_descriptor_sets(
1196 &self,
1197 pool: vk::DescriptorPool,
1198 descriptor_sets: &[vk::DescriptorSet],
1199 ) -> VkResult<()> {
1200 (self.device_fn_1_0.free_descriptor_sets)(
1201 self.handle(),
1202 pool,
1203 descriptor_sets.len() as u32,
1204 descriptor_sets.as_ptr(),
1205 )
1206 .result()
1207 }
1208
1209 pub unsafe fn update_descriptor_sets(
1211 &self,
1212 descriptor_writes: &[vk::WriteDescriptorSet],
1213 descriptor_copies: &[vk::CopyDescriptorSet],
1214 ) {
1215 (self.device_fn_1_0.update_descriptor_sets)(
1216 self.handle(),
1217 descriptor_writes.len() as u32,
1218 descriptor_writes.as_ptr(),
1219 descriptor_copies.len() as u32,
1220 descriptor_copies.as_ptr(),
1221 );
1222 }
1223
1224 pub unsafe fn create_sampler(
1226 &self,
1227 create_info: &vk::SamplerCreateInfo,
1228 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1229 ) -> VkResult<vk::Sampler> {
1230 let mut sampler = mem::zeroed();
1231 (self.device_fn_1_0.create_sampler)(
1232 self.handle(),
1233 create_info,
1234 allocation_callbacks.as_raw_ptr(),
1235 &mut sampler,
1236 )
1237 .result_with_success(sampler)
1238 }
1239
1240 pub unsafe fn cmd_blit_image(
1242 &self,
1243 command_buffer: vk::CommandBuffer,
1244 src_image: vk::Image,
1245 src_image_layout: vk::ImageLayout,
1246 dst_image: vk::Image,
1247 dst_image_layout: vk::ImageLayout,
1248 regions: &[vk::ImageBlit],
1249 filter: vk::Filter,
1250 ) {
1251 (self.device_fn_1_0.cmd_blit_image)(
1252 command_buffer,
1253 src_image,
1254 src_image_layout,
1255 dst_image,
1256 dst_image_layout,
1257 regions.len() as _,
1258 regions.as_ptr(),
1259 filter,
1260 );
1261 }
1262
1263 pub unsafe fn cmd_resolve_image(
1265 &self,
1266 command_buffer: vk::CommandBuffer,
1267 src_image: vk::Image,
1268 src_image_layout: vk::ImageLayout,
1269 dst_image: vk::Image,
1270 dst_image_layout: vk::ImageLayout,
1271 regions: &[vk::ImageResolve],
1272 ) {
1273 (self.device_fn_1_0.cmd_resolve_image)(
1274 command_buffer,
1275 src_image,
1276 src_image_layout,
1277 dst_image,
1278 dst_image_layout,
1279 regions.len() as u32,
1280 regions.as_ptr(),
1281 );
1282 }
1283
1284 pub unsafe fn cmd_fill_buffer(
1286 &self,
1287 command_buffer: vk::CommandBuffer,
1288 buffer: vk::Buffer,
1289 offset: vk::DeviceSize,
1290 size: vk::DeviceSize,
1291 data: u32,
1292 ) {
1293 (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
1294 }
1295
1296 pub unsafe fn cmd_update_buffer(
1298 &self,
1299 command_buffer: vk::CommandBuffer,
1300 buffer: vk::Buffer,
1301 offset: vk::DeviceSize,
1302 data: &[u8],
1303 ) {
1304 (self.device_fn_1_0.cmd_update_buffer)(
1305 command_buffer,
1306 buffer,
1307 offset,
1308 data.len() as u64,
1309 data.as_ptr() as _,
1310 );
1311 }
1312
1313 pub unsafe fn cmd_copy_buffer(
1315 &self,
1316 command_buffer: vk::CommandBuffer,
1317 src_buffer: vk::Buffer,
1318 dst_buffer: vk::Buffer,
1319 regions: &[vk::BufferCopy],
1320 ) {
1321 (self.device_fn_1_0.cmd_copy_buffer)(
1322 command_buffer,
1323 src_buffer,
1324 dst_buffer,
1325 regions.len() as u32,
1326 regions.as_ptr(),
1327 );
1328 }
1329
1330 pub unsafe fn cmd_copy_image_to_buffer(
1332 &self,
1333 command_buffer: vk::CommandBuffer,
1334 src_image: vk::Image,
1335 src_image_layout: vk::ImageLayout,
1336 dst_buffer: vk::Buffer,
1337 regions: &[vk::BufferImageCopy],
1338 ) {
1339 (self.device_fn_1_0.cmd_copy_image_to_buffer)(
1340 command_buffer,
1341 src_image,
1342 src_image_layout,
1343 dst_buffer,
1344 regions.len() as u32,
1345 regions.as_ptr(),
1346 );
1347 }
1348
1349 pub unsafe fn cmd_copy_buffer_to_image(
1351 &self,
1352 command_buffer: vk::CommandBuffer,
1353 src_buffer: vk::Buffer,
1354 dst_image: vk::Image,
1355 dst_image_layout: vk::ImageLayout,
1356 regions: &[vk::BufferImageCopy],
1357 ) {
1358 (self.device_fn_1_0.cmd_copy_buffer_to_image)(
1359 command_buffer,
1360 src_buffer,
1361 dst_image,
1362 dst_image_layout,
1363 regions.len() as u32,
1364 regions.as_ptr(),
1365 );
1366 }
1367
1368 pub unsafe fn cmd_copy_image(
1370 &self,
1371 command_buffer: vk::CommandBuffer,
1372 src_image: vk::Image,
1373 src_image_layout: vk::ImageLayout,
1374 dst_image: vk::Image,
1375 dst_image_layout: vk::ImageLayout,
1376 regions: &[vk::ImageCopy],
1377 ) {
1378 (self.device_fn_1_0.cmd_copy_image)(
1379 command_buffer,
1380 src_image,
1381 src_image_layout,
1382 dst_image,
1383 dst_image_layout,
1384 regions.len() as u32,
1385 regions.as_ptr(),
1386 );
1387 }
1388
1389 pub unsafe fn allocate_descriptor_sets(
1391 &self,
1392 create_info: &vk::DescriptorSetAllocateInfo,
1393 ) -> VkResult<Vec<vk::DescriptorSet>> {
1394 let mut desc_set = Vec::with_capacity(create_info.descriptor_set_count as usize);
1395 let err_code = (self.device_fn_1_0.allocate_descriptor_sets)(
1396 self.handle(),
1397 create_info,
1398 desc_set.as_mut_ptr(),
1399 );
1400
1401 desc_set.set_len(create_info.descriptor_set_count as usize);
1402 err_code.result_with_success(desc_set)
1403 }
1404
1405 pub unsafe fn create_descriptor_set_layout(
1407 &self,
1408 create_info: &vk::DescriptorSetLayoutCreateInfo,
1409 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1410 ) -> VkResult<vk::DescriptorSetLayout> {
1411 let mut layout = mem::zeroed();
1412 (self.device_fn_1_0.create_descriptor_set_layout)(
1413 self.handle(),
1414 create_info,
1415 allocation_callbacks.as_raw_ptr(),
1416 &mut layout,
1417 )
1418 .result_with_success(layout)
1419 }
1420
1421 pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
1423 (self.device_fn_1_0.device_wait_idle)(self.handle()).result()
1424 }
1425
1426 pub unsafe fn create_descriptor_pool(
1428 &self,
1429 create_info: &vk::DescriptorPoolCreateInfo,
1430 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1431 ) -> VkResult<vk::DescriptorPool> {
1432 let mut pool = mem::zeroed();
1433 (self.device_fn_1_0.create_descriptor_pool)(
1434 self.handle(),
1435 create_info,
1436 allocation_callbacks.as_raw_ptr(),
1437 &mut pool,
1438 )
1439 .result_with_success(pool)
1440 }
1441
1442 pub unsafe fn reset_descriptor_pool(
1444 &self,
1445 pool: vk::DescriptorPool,
1446 flags: vk::DescriptorPoolResetFlags,
1447 ) -> VkResult<()> {
1448 (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result()
1449 }
1450
1451 pub unsafe fn reset_command_pool(
1453 &self,
1454 command_pool: vk::CommandPool,
1455 flags: vk::CommandPoolResetFlags,
1456 ) -> VkResult<()> {
1457 (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
1458 }
1459
1460 pub unsafe fn reset_command_buffer(
1462 &self,
1463 command_buffer: vk::CommandBuffer,
1464 flags: vk::CommandBufferResetFlags,
1465 ) -> VkResult<()> {
1466 (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result()
1467 }
1468
1469 pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
1471 (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
1472 .result()
1473 }
1474
1475 pub unsafe fn cmd_bind_index_buffer(
1477 &self,
1478 command_buffer: vk::CommandBuffer,
1479 buffer: vk::Buffer,
1480 offset: vk::DeviceSize,
1481 index_type: vk::IndexType,
1482 ) {
1483 (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
1484 }
1485
1486 pub unsafe fn cmd_clear_color_image(
1488 &self,
1489 command_buffer: vk::CommandBuffer,
1490 image: vk::Image,
1491 image_layout: vk::ImageLayout,
1492 clear_color_value: &vk::ClearColorValue,
1493 ranges: &[vk::ImageSubresourceRange],
1494 ) {
1495 (self.device_fn_1_0.cmd_clear_color_image)(
1496 command_buffer,
1497 image,
1498 image_layout,
1499 clear_color_value,
1500 ranges.len() as u32,
1501 ranges.as_ptr(),
1502 );
1503 }
1504
1505 pub unsafe fn cmd_clear_depth_stencil_image(
1507 &self,
1508 command_buffer: vk::CommandBuffer,
1509 image: vk::Image,
1510 image_layout: vk::ImageLayout,
1511 clear_depth_stencil_value: &vk::ClearDepthStencilValue,
1512 ranges: &[vk::ImageSubresourceRange],
1513 ) {
1514 (self.device_fn_1_0.cmd_clear_depth_stencil_image)(
1515 command_buffer,
1516 image,
1517 image_layout,
1518 clear_depth_stencil_value,
1519 ranges.len() as u32,
1520 ranges.as_ptr(),
1521 );
1522 }
1523
1524 pub unsafe fn cmd_clear_attachments(
1526 &self,
1527 command_buffer: vk::CommandBuffer,
1528 attachments: &[vk::ClearAttachment],
1529 rects: &[vk::ClearRect],
1530 ) {
1531 (self.device_fn_1_0.cmd_clear_attachments)(
1532 command_buffer,
1533 attachments.len() as u32,
1534 attachments.as_ptr(),
1535 rects.len() as u32,
1536 rects.as_ptr(),
1537 );
1538 }
1539
1540 pub unsafe fn cmd_draw_indexed(
1542 &self,
1543 command_buffer: vk::CommandBuffer,
1544 index_count: u32,
1545 instance_count: u32,
1546 first_index: u32,
1547 vertex_offset: i32,
1548 first_instance: u32,
1549 ) {
1550 (self.device_fn_1_0.cmd_draw_indexed)(
1551 command_buffer,
1552 index_count,
1553 instance_count,
1554 first_index,
1555 vertex_offset,
1556 first_instance,
1557 );
1558 }
1559
1560 pub unsafe fn cmd_draw_indexed_indirect(
1562 &self,
1563 command_buffer: vk::CommandBuffer,
1564 buffer: vk::Buffer,
1565 offset: vk::DeviceSize,
1566 draw_count: u32,
1567 stride: u32,
1568 ) {
1569 (self.device_fn_1_0.cmd_draw_indexed_indirect)(
1570 command_buffer,
1571 buffer,
1572 offset,
1573 draw_count,
1574 stride,
1575 );
1576 }
1577
1578 pub unsafe fn cmd_execute_commands(
1580 &self,
1581 primary_command_buffer: vk::CommandBuffer,
1582 secondary_command_buffers: &[vk::CommandBuffer],
1583 ) {
1584 (self.device_fn_1_0.cmd_execute_commands)(
1585 primary_command_buffer,
1586 secondary_command_buffers.len() as u32,
1587 secondary_command_buffers.as_ptr(),
1588 );
1589 }
1590
1591 pub unsafe fn cmd_bind_descriptor_sets(
1593 &self,
1594 command_buffer: vk::CommandBuffer,
1595 pipeline_bind_point: vk::PipelineBindPoint,
1596 layout: vk::PipelineLayout,
1597 first_set: u32,
1598 descriptor_sets: &[vk::DescriptorSet],
1599 dynamic_offsets: &[u32],
1600 ) {
1601 (self.device_fn_1_0.cmd_bind_descriptor_sets)(
1602 command_buffer,
1603 pipeline_bind_point,
1604 layout,
1605 first_set,
1606 descriptor_sets.len() as u32,
1607 descriptor_sets.as_ptr(),
1608 dynamic_offsets.len() as u32,
1609 dynamic_offsets.as_ptr(),
1610 );
1611 }
1612
1613 pub unsafe fn cmd_copy_query_pool_results(
1615 &self,
1616 command_buffer: vk::CommandBuffer,
1617 query_pool: vk::QueryPool,
1618 first_query: u32,
1619 query_count: u32,
1620 dst_buffer: vk::Buffer,
1621 dst_offset: vk::DeviceSize,
1622 stride: vk::DeviceSize,
1623 flags: vk::QueryResultFlags,
1624 ) {
1625 (self.device_fn_1_0.cmd_copy_query_pool_results)(
1626 command_buffer,
1627 query_pool,
1628 first_query,
1629 query_count,
1630 dst_buffer,
1631 dst_offset,
1632 stride,
1633 flags,
1634 );
1635 }
1636
1637 pub unsafe fn cmd_push_constants(
1639 &self,
1640 command_buffer: vk::CommandBuffer,
1641 layout: vk::PipelineLayout,
1642 stage_flags: vk::ShaderStageFlags,
1643 offset: u32,
1644 constants: &[u8],
1645 ) {
1646 (self.device_fn_1_0.cmd_push_constants)(
1647 command_buffer,
1648 layout,
1649 stage_flags,
1650 offset,
1651 constants.len() as _,
1652 constants.as_ptr() as _,
1653 );
1654 }
1655
1656 pub unsafe fn cmd_begin_render_pass(
1658 &self,
1659 command_buffer: vk::CommandBuffer,
1660 create_info: &vk::RenderPassBeginInfo,
1661 contents: vk::SubpassContents,
1662 ) {
1663 (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, create_info, contents);
1664 }
1665
1666 pub unsafe fn cmd_next_subpass(
1668 &self,
1669 command_buffer: vk::CommandBuffer,
1670 contents: vk::SubpassContents,
1671 ) {
1672 (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
1673 }
1674
1675 pub unsafe fn cmd_bind_pipeline(
1677 &self,
1678 command_buffer: vk::CommandBuffer,
1679 pipeline_bind_point: vk::PipelineBindPoint,
1680 pipeline: vk::Pipeline,
1681 ) {
1682 (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
1683 }
1684
1685 pub unsafe fn cmd_set_scissor(
1687 &self,
1688 command_buffer: vk::CommandBuffer,
1689 first_scissor: u32,
1690 scissors: &[vk::Rect2D],
1691 ) {
1692 (self.device_fn_1_0.cmd_set_scissor)(
1693 command_buffer,
1694 first_scissor,
1695 scissors.len() as u32,
1696 scissors.as_ptr(),
1697 );
1698 }
1699
1700 pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
1702 (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
1703 }
1704
1705 pub unsafe fn cmd_bind_vertex_buffers(
1707 &self,
1708 command_buffer: vk::CommandBuffer,
1709 first_binding: u32,
1710 buffers: &[vk::Buffer],
1711 offsets: &[vk::DeviceSize],
1712 ) {
1713 debug_assert_eq!(buffers.len(), offsets.len());
1714 (self.device_fn_1_0.cmd_bind_vertex_buffers)(
1715 command_buffer,
1716 first_binding,
1717 buffers.len() as u32,
1718 buffers.as_ptr(),
1719 offsets.as_ptr(),
1720 );
1721 }
1722
1723 pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
1725 (self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
1726 }
1727
1728 pub unsafe fn cmd_draw(
1730 &self,
1731 command_buffer: vk::CommandBuffer,
1732 vertex_count: u32,
1733 instance_count: u32,
1734 first_vertex: u32,
1735 first_instance: u32,
1736 ) {
1737 (self.device_fn_1_0.cmd_draw)(
1738 command_buffer,
1739 vertex_count,
1740 instance_count,
1741 first_vertex,
1742 first_instance,
1743 );
1744 }
1745
1746 pub unsafe fn cmd_draw_indirect(
1748 &self,
1749 command_buffer: vk::CommandBuffer,
1750 buffer: vk::Buffer,
1751 offset: vk::DeviceSize,
1752 draw_count: u32,
1753 stride: u32,
1754 ) {
1755 (self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
1756 }
1757
1758 pub unsafe fn cmd_dispatch(
1760 &self,
1761 command_buffer: vk::CommandBuffer,
1762 group_count_x: u32,
1763 group_count_y: u32,
1764 group_count_z: u32,
1765 ) {
1766 (self.device_fn_1_0.cmd_dispatch)(
1767 command_buffer,
1768 group_count_x,
1769 group_count_y,
1770 group_count_z,
1771 );
1772 }
1773
1774 pub unsafe fn cmd_dispatch_indirect(
1776 &self,
1777 command_buffer: vk::CommandBuffer,
1778 buffer: vk::Buffer,
1779 offset: vk::DeviceSize,
1780 ) {
1781 (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
1782 }
1783
1784 pub unsafe fn cmd_set_viewport(
1786 &self,
1787 command_buffer: vk::CommandBuffer,
1788 first_viewport: u32,
1789 viewports: &[vk::Viewport],
1790 ) {
1791 (self.device_fn_1_0.cmd_set_viewport)(
1792 command_buffer,
1793 first_viewport,
1794 viewports.len() as u32,
1795 viewports.as_ptr(),
1796 );
1797 }
1798
1799 pub unsafe fn cmd_set_depth_bias(
1801 &self,
1802 command_buffer: vk::CommandBuffer,
1803 constant_factor: f32,
1804 clamp: f32,
1805 slope_factor: f32,
1806 ) {
1807 (self.device_fn_1_0.cmd_set_depth_bias)(
1808 command_buffer,
1809 constant_factor,
1810 clamp,
1811 slope_factor,
1812 );
1813 }
1814
1815 pub unsafe fn cmd_set_blend_constants(
1817 &self,
1818 command_buffer: vk::CommandBuffer,
1819 blend_constants: &[f32; 4],
1820 ) {
1821 (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
1822 }
1823
1824 pub unsafe fn cmd_set_depth_bounds(
1826 &self,
1827 command_buffer: vk::CommandBuffer,
1828 min_depth_bounds: f32,
1829 max_depth_bounds: f32,
1830 ) {
1831 (self.device_fn_1_0.cmd_set_depth_bounds)(
1832 command_buffer,
1833 min_depth_bounds,
1834 max_depth_bounds,
1835 );
1836 }
1837
1838 pub unsafe fn cmd_set_stencil_compare_mask(
1840 &self,
1841 command_buffer: vk::CommandBuffer,
1842 face_mask: vk::StencilFaceFlags,
1843 compare_mask: u32,
1844 ) {
1845 (self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
1846 }
1847
1848 pub unsafe fn cmd_set_stencil_write_mask(
1850 &self,
1851 command_buffer: vk::CommandBuffer,
1852 face_mask: vk::StencilFaceFlags,
1853 write_mask: u32,
1854 ) {
1855 (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
1856 }
1857
1858 pub unsafe fn cmd_set_stencil_reference(
1860 &self,
1861 command_buffer: vk::CommandBuffer,
1862 face_mask: vk::StencilFaceFlags,
1863 reference: u32,
1864 ) {
1865 (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
1866 }
1867
1868 pub unsafe fn get_query_pool_results<T>(
1870 &self,
1871 query_pool: vk::QueryPool,
1872 first_query: u32,
1873 query_count: u32,
1874 data: &mut [T],
1875 flags: vk::QueryResultFlags,
1876 ) -> VkResult<()> {
1877 let data_length = query_count as usize;
1878 assert!(
1879 data_length <= data.len(),
1880 "query_count was higher than the length of the slice"
1881 );
1882 let data_size = mem::size_of::<T>() * data_length;
1883 (self.device_fn_1_0.get_query_pool_results)(
1884 self.handle(),
1885 query_pool,
1886 first_query,
1887 query_count,
1888 data_size,
1889 data.as_mut_ptr() as *mut _,
1890 mem::size_of::<T>() as _,
1891 flags,
1892 )
1893 .result()
1894 }
1895
1896 pub unsafe fn cmd_begin_query(
1898 &self,
1899 command_buffer: vk::CommandBuffer,
1900 query_pool: vk::QueryPool,
1901 query: u32,
1902 flags: vk::QueryControlFlags,
1903 ) {
1904 (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
1905 }
1906
1907 pub unsafe fn cmd_end_query(
1909 &self,
1910 command_buffer: vk::CommandBuffer,
1911 query_pool: vk::QueryPool,
1912 query: u32,
1913 ) {
1914 (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
1915 }
1916
1917 pub unsafe fn cmd_reset_query_pool(
1919 &self,
1920 command_buffer: vk::CommandBuffer,
1921 pool: vk::QueryPool,
1922 first_query: u32,
1923 query_count: u32,
1924 ) {
1925 (self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count);
1926 }
1927
1928 pub unsafe fn cmd_write_timestamp(
1930 &self,
1931 command_buffer: vk::CommandBuffer,
1932 pipeline_stage: vk::PipelineStageFlags,
1933 query_pool: vk::QueryPool,
1934 query: u32,
1935 ) {
1936 (self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query);
1937 }
1938
1939 pub unsafe fn create_semaphore(
1941 &self,
1942 create_info: &vk::SemaphoreCreateInfo,
1943 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1944 ) -> VkResult<vk::Semaphore> {
1945 let mut semaphore = mem::zeroed();
1946 (self.device_fn_1_0.create_semaphore)(
1947 self.handle(),
1948 create_info,
1949 allocation_callbacks.as_raw_ptr(),
1950 &mut semaphore,
1951 )
1952 .result_with_success(semaphore)
1953 }
1954
1955 pub unsafe fn create_graphics_pipelines(
1957 &self,
1958 pipeline_cache: vk::PipelineCache,
1959 create_infos: &[vk::GraphicsPipelineCreateInfo],
1960 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1961 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
1962 let mut pipelines = Vec::with_capacity(create_infos.len());
1963 let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
1964 self.handle(),
1965 pipeline_cache,
1966 create_infos.len() as u32,
1967 create_infos.as_ptr(),
1968 allocation_callbacks.as_raw_ptr(),
1969 pipelines.as_mut_ptr(),
1970 );
1971 pipelines.set_len(create_infos.len());
1972 match err_code {
1973 vk::Result::SUCCESS => Ok(pipelines),
1974 _ => Err((pipelines, err_code)),
1975 }
1976 }
1977
1978 pub unsafe fn create_compute_pipelines(
1980 &self,
1981 pipeline_cache: vk::PipelineCache,
1982 create_infos: &[vk::ComputePipelineCreateInfo],
1983 allocation_callbacks: Option<&vk::AllocationCallbacks>,
1984 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
1985 let mut pipelines = Vec::with_capacity(create_infos.len());
1986 let err_code = (self.device_fn_1_0.create_compute_pipelines)(
1987 self.handle(),
1988 pipeline_cache,
1989 create_infos.len() as u32,
1990 create_infos.as_ptr(),
1991 allocation_callbacks.as_raw_ptr(),
1992 pipelines.as_mut_ptr(),
1993 );
1994 pipelines.set_len(create_infos.len());
1995 match err_code {
1996 vk::Result::SUCCESS => Ok(pipelines),
1997 _ => Err((pipelines, err_code)),
1998 }
1999 }
2000
2001 pub unsafe fn create_buffer(
2003 &self,
2004 create_info: &vk::BufferCreateInfo,
2005 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2006 ) -> VkResult<vk::Buffer> {
2007 let mut buffer = mem::zeroed();
2008 (self.device_fn_1_0.create_buffer)(
2009 self.handle(),
2010 create_info,
2011 allocation_callbacks.as_raw_ptr(),
2012 &mut buffer,
2013 )
2014 .result_with_success(buffer)
2015 }
2016
2017 pub unsafe fn create_pipeline_layout(
2019 &self,
2020 create_info: &vk::PipelineLayoutCreateInfo,
2021 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2022 ) -> VkResult<vk::PipelineLayout> {
2023 let mut pipeline_layout = mem::zeroed();
2024 (self.device_fn_1_0.create_pipeline_layout)(
2025 self.handle(),
2026 create_info,
2027 allocation_callbacks.as_raw_ptr(),
2028 &mut pipeline_layout,
2029 )
2030 .result_with_success(pipeline_layout)
2031 }
2032
2033 pub unsafe fn create_pipeline_cache(
2035 &self,
2036 create_info: &vk::PipelineCacheCreateInfo,
2037 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2038 ) -> VkResult<vk::PipelineCache> {
2039 let mut pipeline_cache = mem::zeroed();
2040 (self.device_fn_1_0.create_pipeline_cache)(
2041 self.handle(),
2042 create_info,
2043 allocation_callbacks.as_raw_ptr(),
2044 &mut pipeline_cache,
2045 )
2046 .result_with_success(pipeline_cache)
2047 }
2048
2049 pub unsafe fn get_pipeline_cache_data(
2051 &self,
2052 pipeline_cache: vk::PipelineCache,
2053 ) -> VkResult<Vec<u8>> {
2054 read_into_uninitialized_vector(|count, data| {
2055 (self.device_fn_1_0.get_pipeline_cache_data)(
2056 self.handle(),
2057 pipeline_cache,
2058 count,
2059 data as _,
2060 )
2061 })
2062 }
2063
2064 pub unsafe fn merge_pipeline_caches(
2066 &self,
2067 dst_cache: vk::PipelineCache,
2068 src_caches: &[vk::PipelineCache],
2069 ) -> VkResult<()> {
2070 (self.device_fn_1_0.merge_pipeline_caches)(
2071 self.handle(),
2072 dst_cache,
2073 src_caches.len() as u32,
2074 src_caches.as_ptr(),
2075 )
2076 .result()
2077 }
2078
2079 pub unsafe fn map_memory(
2081 &self,
2082 memory: vk::DeviceMemory,
2083 offset: vk::DeviceSize,
2084 size: vk::DeviceSize,
2085 flags: vk::MemoryMapFlags,
2086 ) -> VkResult<*mut c_void> {
2087 let mut data: *mut c_void = ptr::null_mut();
2088 (self.device_fn_1_0.map_memory)(self.handle(), memory, offset, size, flags, &mut data)
2089 .result_with_success(data)
2090 }
2091
2092 pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
2094 (self.device_fn_1_0.unmap_memory)(self.handle(), memory);
2095 }
2096
2097 pub unsafe fn invalidate_mapped_memory_ranges(
2099 &self,
2100 ranges: &[vk::MappedMemoryRange],
2101 ) -> VkResult<()> {
2102 (self.device_fn_1_0.invalidate_mapped_memory_ranges)(
2103 self.handle(),
2104 ranges.len() as u32,
2105 ranges.as_ptr(),
2106 )
2107 .result()
2108 }
2109
2110 pub unsafe fn flush_mapped_memory_ranges(
2112 &self,
2113 ranges: &[vk::MappedMemoryRange],
2114 ) -> VkResult<()> {
2115 (self.device_fn_1_0.flush_mapped_memory_ranges)(
2116 self.handle(),
2117 ranges.len() as u32,
2118 ranges.as_ptr(),
2119 )
2120 .result()
2121 }
2122
2123 pub unsafe fn create_framebuffer(
2125 &self,
2126 create_info: &vk::FramebufferCreateInfo,
2127 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2128 ) -> VkResult<vk::Framebuffer> {
2129 let mut framebuffer = mem::zeroed();
2130 (self.device_fn_1_0.create_framebuffer)(
2131 self.handle(),
2132 create_info,
2133 allocation_callbacks.as_raw_ptr(),
2134 &mut framebuffer,
2135 )
2136 .result_with_success(framebuffer)
2137 }
2138
2139 pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
2141 let mut queue = mem::zeroed();
2142 (self.device_fn_1_0.get_device_queue)(
2143 self.handle(),
2144 queue_family_index,
2145 queue_index,
2146 &mut queue,
2147 );
2148 queue
2149 }
2150
2151 pub unsafe fn cmd_pipeline_barrier(
2153 &self,
2154 command_buffer: vk::CommandBuffer,
2155 src_stage_mask: vk::PipelineStageFlags,
2156 dst_stage_mask: vk::PipelineStageFlags,
2157 dependency_flags: vk::DependencyFlags,
2158 memory_barriers: &[vk::MemoryBarrier],
2159 buffer_memory_barriers: &[vk::BufferMemoryBarrier],
2160 image_memory_barriers: &[vk::ImageMemoryBarrier],
2161 ) {
2162 (self.device_fn_1_0.cmd_pipeline_barrier)(
2163 command_buffer,
2164 src_stage_mask,
2165 dst_stage_mask,
2166 dependency_flags,
2167 memory_barriers.len() as u32,
2168 memory_barriers.as_ptr(),
2169 buffer_memory_barriers.len() as u32,
2170 buffer_memory_barriers.as_ptr(),
2171 image_memory_barriers.len() as u32,
2172 image_memory_barriers.as_ptr(),
2173 );
2174 }
2175
2176 pub unsafe fn create_render_pass(
2178 &self,
2179 create_info: &vk::RenderPassCreateInfo,
2180 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2181 ) -> VkResult<vk::RenderPass> {
2182 let mut renderpass = mem::zeroed();
2183 (self.device_fn_1_0.create_render_pass)(
2184 self.handle(),
2185 create_info,
2186 allocation_callbacks.as_raw_ptr(),
2187 &mut renderpass,
2188 )
2189 .result_with_success(renderpass)
2190 }
2191
2192 pub unsafe fn begin_command_buffer(
2194 &self,
2195 command_buffer: vk::CommandBuffer,
2196 begin_info: &vk::CommandBufferBeginInfo,
2197 ) -> VkResult<()> {
2198 (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result()
2199 }
2200
2201 pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
2203 (self.device_fn_1_0.end_command_buffer)(command_buffer).result()
2204 }
2205
2206 pub unsafe fn wait_for_fences(
2208 &self,
2209 fences: &[vk::Fence],
2210 wait_all: bool,
2211 timeout: u64,
2212 ) -> VkResult<()> {
2213 (self.device_fn_1_0.wait_for_fences)(
2214 self.handle(),
2215 fences.len() as u32,
2216 fences.as_ptr(),
2217 wait_all as u32,
2218 timeout,
2219 )
2220 .result()
2221 }
2222
2223 pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
2225 let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
2226 match err_code {
2227 vk::Result::SUCCESS => Ok(true),
2228 vk::Result::NOT_READY => Ok(false),
2229 _ => Err(err_code),
2230 }
2231 }
2232
2233 pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
2235 (self.device_fn_1_0.queue_wait_idle)(queue).result()
2236 }
2237
2238 pub unsafe fn queue_submit(
2240 &self,
2241 queue: vk::Queue,
2242 submits: &[vk::SubmitInfo],
2243 fence: vk::Fence,
2244 ) -> VkResult<()> {
2245 (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
2246 .result()
2247 }
2248
2249 pub unsafe fn queue_bind_sparse(
2251 &self,
2252 queue: vk::Queue,
2253 bind_info: &[vk::BindSparseInfo],
2254 fence: vk::Fence,
2255 ) -> VkResult<()> {
2256 (self.device_fn_1_0.queue_bind_sparse)(
2257 queue,
2258 bind_info.len() as u32,
2259 bind_info.as_ptr(),
2260 fence,
2261 )
2262 .result()
2263 }
2264
2265 pub unsafe fn create_buffer_view(
2267 &self,
2268 create_info: &vk::BufferViewCreateInfo,
2269 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2270 ) -> VkResult<vk::BufferView> {
2271 let mut buffer_view = mem::zeroed();
2272 (self.device_fn_1_0.create_buffer_view)(
2273 self.handle(),
2274 create_info,
2275 allocation_callbacks.as_raw_ptr(),
2276 &mut buffer_view,
2277 )
2278 .result_with_success(buffer_view)
2279 }
2280
2281 pub unsafe fn destroy_buffer_view(
2283 &self,
2284 buffer_view: vk::BufferView,
2285 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2286 ) {
2287 (self.device_fn_1_0.destroy_buffer_view)(
2288 self.handle(),
2289 buffer_view,
2290 allocation_callbacks.as_raw_ptr(),
2291 );
2292 }
2293
2294 pub unsafe fn create_image_view(
2296 &self,
2297 create_info: &vk::ImageViewCreateInfo,
2298 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2299 ) -> VkResult<vk::ImageView> {
2300 let mut image_view = mem::zeroed();
2301 (self.device_fn_1_0.create_image_view)(
2302 self.handle(),
2303 create_info,
2304 allocation_callbacks.as_raw_ptr(),
2305 &mut image_view,
2306 )
2307 .result_with_success(image_view)
2308 }
2309
2310 pub unsafe fn allocate_command_buffers(
2312 &self,
2313 create_info: &vk::CommandBufferAllocateInfo,
2314 ) -> VkResult<Vec<vk::CommandBuffer>> {
2315 let mut buffers = Vec::with_capacity(create_info.command_buffer_count as usize);
2316 let err_code = (self.device_fn_1_0.allocate_command_buffers)(
2317 self.handle(),
2318 create_info,
2319 buffers.as_mut_ptr(),
2320 );
2321 buffers.set_len(create_info.command_buffer_count as usize);
2322 err_code.result_with_success(buffers)
2323 }
2324
2325 pub unsafe fn create_command_pool(
2327 &self,
2328 create_info: &vk::CommandPoolCreateInfo,
2329 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2330 ) -> VkResult<vk::CommandPool> {
2331 let mut pool = mem::zeroed();
2332 (self.device_fn_1_0.create_command_pool)(
2333 self.handle(),
2334 create_info,
2335 allocation_callbacks.as_raw_ptr(),
2336 &mut pool,
2337 )
2338 .result_with_success(pool)
2339 }
2340
2341 pub unsafe fn create_query_pool(
2343 &self,
2344 create_info: &vk::QueryPoolCreateInfo,
2345 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2346 ) -> VkResult<vk::QueryPool> {
2347 let mut pool = mem::zeroed();
2348 (self.device_fn_1_0.create_query_pool)(
2349 self.handle(),
2350 create_info,
2351 allocation_callbacks.as_raw_ptr(),
2352 &mut pool,
2353 )
2354 .result_with_success(pool)
2355 }
2356
2357 pub unsafe fn create_image(
2359 &self,
2360 create_info: &vk::ImageCreateInfo,
2361 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2362 ) -> VkResult<vk::Image> {
2363 let mut image = mem::zeroed();
2364 (self.device_fn_1_0.create_image)(
2365 self.handle(),
2366 create_info,
2367 allocation_callbacks.as_raw_ptr(),
2368 &mut image,
2369 )
2370 .result_with_success(image)
2371 }
2372
2373 pub unsafe fn get_image_subresource_layout(
2375 &self,
2376 image: vk::Image,
2377 subresource: vk::ImageSubresource,
2378 ) -> vk::SubresourceLayout {
2379 let mut layout = mem::zeroed();
2380 (self.device_fn_1_0.get_image_subresource_layout)(
2381 self.handle(),
2382 image,
2383 &subresource,
2384 &mut layout,
2385 );
2386 layout
2387 }
2388
2389 pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
2391 let mut mem_req = mem::zeroed();
2392 (self.device_fn_1_0.get_image_memory_requirements)(self.handle(), image, &mut mem_req);
2393 mem_req
2394 }
2395
2396 pub unsafe fn get_buffer_memory_requirements(
2398 &self,
2399 buffer: vk::Buffer,
2400 ) -> vk::MemoryRequirements {
2401 let mut mem_req = mem::zeroed();
2402 (self.device_fn_1_0.get_buffer_memory_requirements)(self.handle(), buffer, &mut mem_req);
2403 mem_req
2404 }
2405
2406 pub unsafe fn allocate_memory(
2408 &self,
2409 create_info: &vk::MemoryAllocateInfo,
2410 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2411 ) -> VkResult<vk::DeviceMemory> {
2412 let mut memory = mem::zeroed();
2413 (self.device_fn_1_0.allocate_memory)(
2414 self.handle(),
2415 create_info,
2416 allocation_callbacks.as_raw_ptr(),
2417 &mut memory,
2418 )
2419 .result_with_success(memory)
2420 }
2421
2422 pub unsafe fn create_shader_module(
2424 &self,
2425 create_info: &vk::ShaderModuleCreateInfo,
2426 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2427 ) -> VkResult<vk::ShaderModule> {
2428 let mut shader = mem::zeroed();
2429 (self.device_fn_1_0.create_shader_module)(
2430 self.handle(),
2431 create_info,
2432 allocation_callbacks.as_raw_ptr(),
2433 &mut shader,
2434 )
2435 .result_with_success(shader)
2436 }
2437
2438 pub unsafe fn create_fence(
2440 &self,
2441 create_info: &vk::FenceCreateInfo,
2442 allocation_callbacks: Option<&vk::AllocationCallbacks>,
2443 ) -> VkResult<vk::Fence> {
2444 let mut fence = mem::zeroed();
2445 (self.device_fn_1_0.create_fence)(
2446 self.handle(),
2447 create_info,
2448 allocation_callbacks.as_raw_ptr(),
2449 &mut fence,
2450 )
2451 .result_with_success(fence)
2452 }
2453
2454 pub unsafe fn bind_buffer_memory(
2456 &self,
2457 buffer: vk::Buffer,
2458 device_memory: vk::DeviceMemory,
2459 offset: vk::DeviceSize,
2460 ) -> VkResult<()> {
2461 (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
2462 .result()
2463 }
2464
2465 pub unsafe fn bind_image_memory(
2467 &self,
2468 image: vk::Image,
2469 device_memory: vk::DeviceMemory,
2470 offset: vk::DeviceSize,
2471 ) -> VkResult<()> {
2472 (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result()
2473 }
2474
2475 pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
2477 let mut granularity = mem::zeroed();
2478 (self.device_fn_1_0.get_render_area_granularity)(
2479 self.handle(),
2480 render_pass,
2481 &mut granularity,
2482 );
2483 granularity
2484 }
2485
2486 pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
2488 let mut committed_memory_in_bytes = 0;
2489 (self.device_fn_1_0.get_device_memory_commitment)(
2490 self.handle(),
2491 memory,
2492 &mut committed_memory_in_bytes,
2493 );
2494 committed_memory_in_bytes
2495 }
2496
2497 pub unsafe fn get_image_sparse_memory_requirements(
2499 &self,
2500 image: vk::Image,
2501 ) -> Vec<vk::SparseImageMemoryRequirements> {
2502 read_into_uninitialized_vector(|count, data| {
2503 (self.device_fn_1_0.get_image_sparse_memory_requirements)(
2504 self.handle(),
2505 image,
2506 count,
2507 data,
2508 );
2509 vk::Result::SUCCESS
2510 })
2511 .unwrap()
2513 }
2514}