Quelle extensions_generated.rs
Sprache: unbekannt
|
|
#![allow(unused_imports)]
use crate::vk::*;
use core::ffi::*;
#[doc = "Extensions tagged AMD"]
pub mod amd {
#[doc = "VK_AMD_rasterization_order"]
pub mod rasterization_order {
use super::super::*;
pub use {
crate::vk::AMD_RASTERIZATION_ORDER_NAME as NAME,
crate::vk::AMD_RASTERIZATION_ORDER_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_trinary_minmax"]
pub mod shader_trinary_minmax {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_TRINARY_MINMAX_NAME as NAME,
crate::vk::AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_explicit_vertex_parameter"]
pub mod shader_explicit_vertex_parameter {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_NAME as NAME,
crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_gcn_shader"]
pub mod gcn_shader {
use super::super::*;
pub use {
crate::vk::AMD_GCN_SHADER_NAME as NAME,
crate::vk::AMD_GCN_SHADER_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_draw_indirect_count"]
pub mod draw_indirect_count {
use super::super::*;
pub use {
crate::vk::AMD_DRAW_INDIRECT_COUNT_NAME as NAME,
crate::vk::AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_AMD_draw_indirect_count device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_AMD_draw_indirect_count device-level function pointers"]
pub struct DeviceFn {
pub cmd_draw_indirect_count_amd: PFN_vkCmdDrawIndirectCount,
pub cmd_draw_indexed_indirect_count_amd: PFN_vkCmdDrawIndexedIndirectCount,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
cmd_draw_indirect_count_amd: unsafe {
unsafe extern "system" fn cmd_draw_indirect_count_amd(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indirect_count_amd)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCountAMD\0");
let val = _f(cname);
if val.is_null() {
cmd_draw_indirect_count_amd
} else {
::core::mem::transmute(val)
}
},
cmd_draw_indexed_indirect_count_amd: unsafe {
unsafe extern "system" fn cmd_draw_indexed_indirect_count_amd(
_command_buffer: CommandBuffer,
_buffer: Buffer,
_offset: DeviceSize,
_count_buffer: Buffer,
_count_buffer_offset: DeviceSize,
_max_draw_count: u32,
_stride: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indexed_indirect_count_amd)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdDrawIndexedIndirectCountAMD\0",
);
let val = _f(cname);
if val.is_null() {
cmd_draw_indexed_indirect_count_amd
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_AMD_negative_viewport_height"]
pub mod negative_viewport_height {
use super::super::*;
pub use {
crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_NAME as NAME,
crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_gpu_shader_half_float"]
pub mod gpu_shader_half_float {
use super::super::*;
pub use {
crate::vk::AMD_GPU_SHADER_HALF_FLOAT_NAME as NAME,
crate::vk::AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_ballot"]
pub mod shader_ballot {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_BALLOT_NAME as NAME,
crate::vk::AMD_SHADER_BALLOT_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_texture_gather_bias_lod"]
pub mod texture_gather_bias_lod {
use super::super::*;
pub use {
crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_NAME as NAME,
crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_info"]
pub mod shader_info {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_INFO_NAME as NAME,
crate::vk::AMD_SHADER_INFO_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_AMD_shader_info device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_AMD_shader_info device-level function pointers"]
pub struct DeviceFn {
pub get_shader_info_amd: PFN_vkGetShaderInfoAMD,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
get_shader_info_amd: unsafe {
unsafe extern "system" fn get_shader_info_amd(
_device: crate::vk::Device,
_pipeline: Pipeline,
_shader_stage: ShaderStageFlags,
_info_type: ShaderInfoTypeAMD,
_p_info_size: *mut usize,
_p_info: *mut c_void,
) -> Result {
panic!(concat!("Unable to load ", stringify!(get_shader_info_amd)))
}
let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetShaderInfoAMD\0");
let val = _f(cname);
if val.is_null() {
get_shader_info_amd
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_AMD_shader_image_load_store_lod"]
pub mod shader_image_load_store_lod {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_NAME as NAME,
crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_gpu_shader_int16"]
pub mod gpu_shader_int16 {
use super::super::*;
pub use {
crate::vk::AMD_GPU_SHADER_INT16_NAME as NAME,
crate::vk::AMD_GPU_SHADER_INT16_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_mixed_attachment_samples"]
pub mod mixed_attachment_samples {
use super::super::*;
pub use {
crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_NAME as NAME,
crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_fragment_mask"]
pub mod shader_fragment_mask {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_FRAGMENT_MASK_NAME as NAME,
crate::vk::AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_buffer_marker"]
pub mod buffer_marker {
use super::super::*;
pub use {
crate::vk::AMD_BUFFER_MARKER_NAME as NAME,
crate::vk::AMD_BUFFER_MARKER_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_AMD_buffer_marker device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_AMD_buffer_marker device-level function pointers"]
pub struct DeviceFn {
pub cmd_write_buffer_marker_amd: PFN_vkCmdWriteBufferMarkerAMD,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
cmd_write_buffer_marker_amd: unsafe {
unsafe extern "system" fn cmd_write_buffer_marker_amd(
_command_buffer: CommandBuffer,
_pipeline_stage: PipelineStageFlags,
_dst_buffer: Buffer,
_dst_offset: DeviceSize,
_marker: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_write_buffer_marker_amd)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteBufferMarkerAMD\0");
let val = _f(cname);
if val.is_null() {
cmd_write_buffer_marker_amd
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_AMD_pipeline_compiler_control"]
pub mod pipeline_compiler_control {
use super::super::*;
pub use {
crate::vk::AMD_PIPELINE_COMPILER_CONTROL_NAME as NAME,
crate::vk::AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_core_properties"]
pub mod shader_core_properties {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_CORE_PROPERTIES_NAME as NAME,
crate::vk::AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_memory_overallocation_behavior"]
pub mod memory_overallocation_behavior {
use super::super::*;
pub use {
crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_NAME as NAME,
crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_display_native_hdr"]
pub mod display_native_hdr {
use super::super::*;
pub use {
crate::vk::AMD_DISPLAY_NATIVE_HDR_NAME as NAME,
crate::vk::AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_AMD_display_native_hdr device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_AMD_display_native_hdr device-level function pointers"]
pub struct DeviceFn {
pub set_local_dimming_amd: PFN_vkSetLocalDimmingAMD,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
set_local_dimming_amd: unsafe {
unsafe extern "system" fn set_local_dimming_amd(
_device: crate::vk::Device,
_swap_chain: SwapchainKHR,
_local_dimming_enable: Bool32,
) {
panic!(concat!(
"Unable to load ",
stringify!(set_local_dimming_amd)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetLocalDimmingAMD\0");
let val = _f(cname);
if val.is_null() {
set_local_dimming_amd
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_AMD_shader_core_properties2"]
pub mod shader_core_properties2 {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_CORE_PROPERTIES2_NAME as NAME,
crate::vk::AMD_SHADER_CORE_PROPERTIES2_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_device_coherent_memory"]
pub mod device_coherent_memory {
use super::super::*;
pub use {
crate::vk::AMD_DEVICE_COHERENT_MEMORY_NAME as NAME,
crate::vk::AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_AMD_shader_early_and_late_fragment_tests"]
pub mod shader_early_and_late_fragment_tests {
use super::super::*;
pub use {
crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_NAME as NAME,
crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION as SPEC_VERSION,
};
}
}
#[doc = "Extensions tagged AMDX"]
pub mod amdx {
#[doc = "VK_AMDX_shader_enqueue"]
pub mod shader_enqueue {
use super::super::*;
pub use {
crate::vk::AMDX_SHADER_ENQUEUE_NAME as NAME,
crate::vk::AMDX_SHADER_ENQUEUE_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_AMDX_shader_enqueue device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_AMDX_shader_enqueue device-level function pointers"]
pub struct DeviceFn {
pub create_execution_graph_pipelines_amdx: PFN_vkCreateExecutionGraphPipelinesAMDX,
pub get_execution_graph_pipeline_scratch_size_amdx:
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX,
pub get_execution_graph_pipeline_node_index_amdx:
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX,
pub cmd_initialize_graph_scratch_memory_amdx: PFN_vkCmdInitializeGraphScratchMemoryAMDX,
pub cmd_dispatch_graph_amdx: PFN_vkCmdDispatchGraphAMDX,
pub cmd_dispatch_graph_indirect_amdx: PFN_vkCmdDispatchGraphIndirectAMDX,
pub cmd_dispatch_graph_indirect_count_amdx: PFN_vkCmdDispatchGraphIndirectCountAMDX,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
create_execution_graph_pipelines_amdx: unsafe {
unsafe extern "system" fn create_execution_graph_pipelines_amdx(
_device: crate::vk::Device,
_pipeline_cache: PipelineCache,
_create_info_count: u32,
_p_create_infos: *const ExecutionGraphPipelineCreateInfoAMDX<'_>,
_p_allocator: *const AllocationCallbacks<'_>,
_p_pipelines: *mut Pipeline,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_execution_graph_pipelines_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCreateExecutionGraphPipelinesAMDX\0",
);
let val = _f(cname);
if val.is_null() {
create_execution_graph_pipelines_amdx
} else {
::core::mem::transmute(val)
}
},
get_execution_graph_pipeline_scratch_size_amdx: unsafe {
unsafe extern "system" fn get_execution_graph_pipeline_scratch_size_amdx(
_device: crate::vk::Device,
_execution_graph: Pipeline,
_p_size_info: *mut ExecutionGraphPipelineScratchSizeAMDX<'_>,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_execution_graph_pipeline_scratch_size_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetExecutionGraphPipelineScratchSizeAMDX\0",
);
let val = _f(cname);
if val.is_null() {
get_execution_graph_pipeline_scratch_size_amdx
} else {
::core::mem::transmute(val)
}
},
get_execution_graph_pipeline_node_index_amdx: unsafe {
unsafe extern "system" fn get_execution_graph_pipeline_node_index_amdx(
_device: crate::vk::Device,
_execution_graph: Pipeline,
_p_node_info: *const PipelineShaderStageNodeCreateInfoAMDX<'_>,
_p_node_index: *mut u32,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_execution_graph_pipeline_node_index_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetExecutionGraphPipelineNodeIndexAMDX\0",
);
let val = _f(cname);
if val.is_null() {
get_execution_graph_pipeline_node_index_amdx
} else {
::core::mem::transmute(val)
}
},
cmd_initialize_graph_scratch_memory_amdx: unsafe {
unsafe extern "system" fn cmd_initialize_graph_scratch_memory_amdx(
_command_buffer: CommandBuffer,
_scratch: DeviceAddress,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_initialize_graph_scratch_memory_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdInitializeGraphScratchMemoryAMDX\0",
);
let val = _f(cname);
if val.is_null() {
cmd_initialize_graph_scratch_memory_amdx
} else {
::core::mem::transmute(val)
}
},
cmd_dispatch_graph_amdx: unsafe {
unsafe extern "system" fn cmd_dispatch_graph_amdx(
_command_buffer: CommandBuffer,
_scratch: DeviceAddress,
_p_count_info: *const DispatchGraphCountInfoAMDX,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_dispatch_graph_amdx)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchGraphAMDX\0");
let val = _f(cname);
if val.is_null() {
cmd_dispatch_graph_amdx
} else {
::core::mem::transmute(val)
}
},
cmd_dispatch_graph_indirect_amdx: unsafe {
unsafe extern "system" fn cmd_dispatch_graph_indirect_amdx(
_command_buffer: CommandBuffer,
_scratch: DeviceAddress,
_p_count_info: *const DispatchGraphCountInfoAMDX,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_dispatch_graph_indirect_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdDispatchGraphIndirectAMDX\0",
);
let val = _f(cname);
if val.is_null() {
cmd_dispatch_graph_indirect_amdx
} else {
::core::mem::transmute(val)
}
},
cmd_dispatch_graph_indirect_count_amdx: unsafe {
unsafe extern "system" fn cmd_dispatch_graph_indirect_count_amdx(
_command_buffer: CommandBuffer,
_scratch: DeviceAddress,
_count_info: DeviceAddress,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_dispatch_graph_indirect_count_amdx)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdDispatchGraphIndirectCountAMDX\0",
);
let val = _f(cname);
if val.is_null() {
cmd_dispatch_graph_indirect_count_amdx
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
}
#[doc = "Extensions tagged ANDROID"]
pub mod android {
#[doc = "VK_ANDROID_native_buffer"]
pub mod native_buffer {
use super::super::*;
pub use {
crate::vk::ANDROID_NATIVE_BUFFER_NAME as NAME,
crate::vk::ANDROID_NATIVE_BUFFER_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_ANDROID_native_buffer device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_ANDROID_native_buffer device-level function pointers"]
pub struct DeviceFn {
pub get_swapchain_gralloc_usage_android: PFN_vkGetSwapchainGrallocUsageANDROID,
pub acquire_image_android: PFN_vkAcquireImageANDROID,
pub queue_signal_release_image_android: PFN_vkQueueSignalReleaseImageANDROID,
pub get_swapchain_gralloc_usage2_android: PFN_vkGetSwapchainGrallocUsage2ANDROID,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
get_swapchain_gralloc_usage_android: unsafe {
unsafe extern "system" fn get_swapchain_gralloc_usage_android(
_device: crate::vk::Device,
_format: Format,
_image_usage: ImageUsageFlags,
_gralloc_usage: *mut c_int,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_gralloc_usage_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetSwapchainGrallocUsageANDROID\0",
);
let val = _f(cname);
if val.is_null() {
get_swapchain_gralloc_usage_android
} else {
::core::mem::transmute(val)
}
},
acquire_image_android: unsafe {
unsafe extern "system" fn acquire_image_android(
_device: crate::vk::Device,
_image: Image,
_native_fence_fd: c_int,
_semaphore: Semaphore,
_fence: Fence,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(acquire_image_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireImageANDROID\0");
let val = _f(cname);
if val.is_null() {
acquire_image_android
} else {
::core::mem::transmute(val)
}
},
queue_signal_release_image_android: unsafe {
unsafe extern "system" fn queue_signal_release_image_android(
_queue: Queue,
_wait_semaphore_count: u32,
_p_wait_semaphores: *const Semaphore,
_image: Image,
_p_native_fence_fd: *mut c_int,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(queue_signal_release_image_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkQueueSignalReleaseImageANDROID\0",
);
let val = _f(cname);
if val.is_null() {
queue_signal_release_image_android
} else {
::core::mem::transmute(val)
}
},
get_swapchain_gralloc_usage2_android: unsafe {
unsafe extern "system" fn get_swapchain_gralloc_usage2_android(
_device: crate::vk::Device,
_format: Format,
_image_usage: ImageUsageFlags,
_swapchain_image_usage: SwapchainImageUsageFlagsANDROID,
_gralloc_consumer_usage: *mut u64,
_gralloc_producer_usage: *mut u64,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_swapchain_gralloc_usage2_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetSwapchainGrallocUsage2ANDROID\0",
);
let val = _f(cname);
if val.is_null() {
get_swapchain_gralloc_usage2_android
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_ANDROID_external_memory_android_hardware_buffer"]
pub mod external_memory_android_hardware_buffer {
use super::super::*;
pub use {
crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_NAME as NAME,
crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_ANDROID_external_memory_android_hardware_buffer device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_ANDROID_external_memory_android_hardware_buffer device-level function pointers"]
pub struct DeviceFn {
pub get_android_hardware_buffer_properties_android:
PFN_vkGetAndroidHardwareBufferPropertiesANDROID,
pub get_memory_android_hardware_buffer_android:
PFN_vkGetMemoryAndroidHardwareBufferANDROID,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
get_android_hardware_buffer_properties_android: unsafe {
unsafe extern "system" fn get_android_hardware_buffer_properties_android(
_device: crate::vk::Device,
_buffer: *const AHardwareBuffer,
_p_properties: *mut AndroidHardwareBufferPropertiesANDROID<'_>,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_android_hardware_buffer_properties_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetAndroidHardwareBufferPropertiesANDROID\0",
);
let val = _f(cname);
if val.is_null() {
get_android_hardware_buffer_properties_android
} else {
::core::mem::transmute(val)
}
},
get_memory_android_hardware_buffer_android: unsafe {
unsafe extern "system" fn get_memory_android_hardware_buffer_android(
_device: crate::vk::Device,
_p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID<'_>,
_p_buffer: *mut *mut AHardwareBuffer,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(get_memory_android_hardware_buffer_android)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkGetMemoryAndroidHardwareBufferANDROID\0",
);
let val = _f(cname);
if val.is_null() {
get_memory_android_hardware_buffer_android
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_ANDROID_external_format_resolve"]
pub mod external_format_resolve {
use super::super::*;
pub use {
crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_NAME as NAME,
crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION as SPEC_VERSION,
};
}
}
#[doc = "Extensions tagged ARM"]
pub mod arm {
#[doc = "VK_ARM_rasterization_order_attachment_access"]
pub mod rasterization_order_attachment_access {
use super::super::*;
pub use {
crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_NAME as NAME,
crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_ARM_shader_core_properties"]
pub mod shader_core_properties {
use super::super::*;
pub use {
crate::vk::ARM_SHADER_CORE_PROPERTIES_NAME as NAME,
crate::vk::ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_ARM_scheduling_controls"]
pub mod scheduling_controls {
use super::super::*;
pub use {
crate::vk::ARM_SCHEDULING_CONTROLS_NAME as NAME,
crate::vk::ARM_SCHEDULING_CONTROLS_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_ARM_render_pass_striped"]
pub mod render_pass_striped {
use super::super::*;
pub use {
crate::vk::ARM_RENDER_PASS_STRIPED_NAME as NAME,
crate::vk::ARM_RENDER_PASS_STRIPED_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_ARM_shader_core_builtins"]
pub mod shader_core_builtins {
use super::super::*;
pub use {
crate::vk::ARM_SHADER_CORE_BUILTINS_NAME as NAME,
crate::vk::ARM_SHADER_CORE_BUILTINS_SPEC_VERSION as SPEC_VERSION,
};
}
}
#[doc = "Extensions tagged EXT"]
pub mod ext {
#[doc = "VK_EXT_debug_report"]
pub mod debug_report {
use super::super::*;
pub use {
crate::vk::EXT_DEBUG_REPORT_NAME as NAME,
crate::vk::EXT_DEBUG_REPORT_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_EXT_debug_report instance-level functions"]
#[derive(Clone)]
pub struct Instance {
pub(crate) fp: InstanceFn,
pub(crate) handle: crate::vk::Instance,
}
impl Instance {
pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
let handle = instance.handle();
let fp = InstanceFn::load(|name| unsafe {
core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &InstanceFn {
&self.fp
}
#[inline]
pub fn instance(&self) -> crate::vk::Instance {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_EXT_debug_report instance-level function pointers"]
pub struct InstanceFn {
pub create_debug_report_callback_ext: PFN_vkCreateDebugReportCallbackEXT,
pub destroy_debug_report_callback_ext: PFN_vkDestroyDebugReportCallbackEXT,
pub debug_report_message_ext: PFN_vkDebugReportMessageEXT,
}
unsafe impl Send for InstanceFn {}
unsafe impl Sync for InstanceFn {}
impl InstanceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
create_debug_report_callback_ext: unsafe {
unsafe extern "system" fn create_debug_report_callback_ext(
_instance: crate::vk::Instance,
_p_create_info: *const DebugReportCallbackCreateInfoEXT<'_>,
_p_allocator: *const AllocationCallbacks<'_>,
_p_callback: *mut DebugReportCallbackEXT,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(create_debug_report_callback_ext)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCreateDebugReportCallbackEXT\0",
);
let val = _f(cname);
if val.is_null() {
create_debug_report_callback_ext
} else {
::core::mem::transmute(val)
}
},
destroy_debug_report_callback_ext: unsafe {
unsafe extern "system" fn destroy_debug_report_callback_ext(
_instance: crate::vk::Instance,
_callback: DebugReportCallbackEXT,
_p_allocator: *const AllocationCallbacks<'_>,
) {
panic!(concat!(
"Unable to load ",
stringify!(destroy_debug_report_callback_ext)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkDestroyDebugReportCallbackEXT\0",
);
let val = _f(cname);
if val.is_null() {
destroy_debug_report_callback_ext
} else {
::core::mem::transmute(val)
}
},
debug_report_message_ext: unsafe {
unsafe extern "system" fn debug_report_message_ext(
_instance: crate::vk::Instance,
_flags: DebugReportFlagsEXT,
_object_type: DebugReportObjectTypeEXT,
_object: u64,
_location: usize,
_message_code: i32,
_p_layer_prefix: *const c_char,
_p_message: *const c_char,
) {
panic!(concat!(
"Unable to load ",
stringify!(debug_report_message_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkDebugReportMessageEXT\0");
let val = _f(cname);
if val.is_null() {
debug_report_message_ext
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_EXT_depth_range_unrestricted"]
pub mod depth_range_unrestricted {
use super::super::*;
pub use {
crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_NAME as NAME,
crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_debug_marker"]
pub mod debug_marker {
use super::super::*;
pub use {
crate::vk::EXT_DEBUG_MARKER_NAME as NAME,
crate::vk::EXT_DEBUG_MARKER_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_EXT_debug_marker device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_EXT_debug_marker device-level function pointers"]
pub struct DeviceFn {
pub debug_marker_set_object_tag_ext: PFN_vkDebugMarkerSetObjectTagEXT,
pub debug_marker_set_object_name_ext: PFN_vkDebugMarkerSetObjectNameEXT,
pub cmd_debug_marker_begin_ext: PFN_vkCmdDebugMarkerBeginEXT,
pub cmd_debug_marker_end_ext: PFN_vkCmdDebugMarkerEndEXT,
pub cmd_debug_marker_insert_ext: PFN_vkCmdDebugMarkerInsertEXT,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
debug_marker_set_object_tag_ext: unsafe {
unsafe extern "system" fn debug_marker_set_object_tag_ext(
_device: crate::vk::Device,
_p_tag_info: *const DebugMarkerObjectTagInfoEXT<'_>,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(debug_marker_set_object_tag_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkDebugMarkerSetObjectTagEXT\0");
let val = _f(cname);
if val.is_null() {
debug_marker_set_object_tag_ext
} else {
::core::mem::transmute(val)
}
},
debug_marker_set_object_name_ext: unsafe {
unsafe extern "system" fn debug_marker_set_object_name_ext(
_device: crate::vk::Device,
_p_name_info: *const DebugMarkerObjectNameInfoEXT<'_>,
) -> Result {
panic!(concat!(
"Unable to load ",
stringify!(debug_marker_set_object_name_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkDebugMarkerSetObjectNameEXT\0");
let val = _f(cname);
if val.is_null() {
debug_marker_set_object_name_ext
} else {
::core::mem::transmute(val)
}
},
cmd_debug_marker_begin_ext: unsafe {
unsafe extern "system" fn cmd_debug_marker_begin_ext(
_command_buffer: CommandBuffer,
_p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_begin_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerBeginEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_debug_marker_begin_ext
} else {
::core::mem::transmute(val)
}
},
cmd_debug_marker_end_ext: unsafe {
unsafe extern "system" fn cmd_debug_marker_end_ext(
_command_buffer: CommandBuffer,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_end_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerEndEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_debug_marker_end_ext
} else {
::core::mem::transmute(val)
}
},
cmd_debug_marker_insert_ext: unsafe {
unsafe extern "system" fn cmd_debug_marker_insert_ext(
_command_buffer: CommandBuffer,
_p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_debug_marker_insert_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerInsertEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_debug_marker_insert_ext
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_EXT_transform_feedback"]
pub mod transform_feedback {
use super::super::*;
pub use {
crate::vk::EXT_TRANSFORM_FEEDBACK_NAME as NAME,
crate::vk::EXT_TRANSFORM_FEEDBACK_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_EXT_transform_feedback device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_EXT_transform_feedback device-level function pointers"]
pub struct DeviceFn {
pub cmd_bind_transform_feedback_buffers_ext: PFN_vkCmdBindTransformFeedbackBuffersEXT,
pub cmd_begin_transform_feedback_ext: PFN_vkCmdBeginTransformFeedbackEXT,
pub cmd_end_transform_feedback_ext: PFN_vkCmdEndTransformFeedbackEXT,
pub cmd_begin_query_indexed_ext: PFN_vkCmdBeginQueryIndexedEXT,
pub cmd_end_query_indexed_ext: PFN_vkCmdEndQueryIndexedEXT,
pub cmd_draw_indirect_byte_count_ext: PFN_vkCmdDrawIndirectByteCountEXT,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
cmd_bind_transform_feedback_buffers_ext: unsafe {
unsafe extern "system" fn cmd_bind_transform_feedback_buffers_ext(
_command_buffer: CommandBuffer,
_first_binding: u32,
_binding_count: u32,
_p_buffers: *const Buffer,
_p_offsets: *const DeviceSize,
_p_sizes: *const DeviceSize,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_bind_transform_feedback_buffers_ext)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdBindTransformFeedbackBuffersEXT\0",
);
let val = _f(cname);
if val.is_null() {
cmd_bind_transform_feedback_buffers_ext
} else {
::core::mem::transmute(val)
}
},
cmd_begin_transform_feedback_ext: unsafe {
unsafe extern "system" fn cmd_begin_transform_feedback_ext(
_command_buffer: CommandBuffer,
_first_counter_buffer: u32,
_counter_buffer_count: u32,
_p_counter_buffers: *const Buffer,
_p_counter_buffer_offsets: *const DeviceSize,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_transform_feedback_ext)
))
}
let cname = CStr::from_bytes_with_nul_unchecked(
b"vkCmdBeginTransformFeedbackEXT\0",
);
let val = _f(cname);
if val.is_null() {
cmd_begin_transform_feedback_ext
} else {
::core::mem::transmute(val)
}
},
cmd_end_transform_feedback_ext: unsafe {
unsafe extern "system" fn cmd_end_transform_feedback_ext(
_command_buffer: CommandBuffer,
_first_counter_buffer: u32,
_counter_buffer_count: u32,
_p_counter_buffers: *const Buffer,
_p_counter_buffer_offsets: *const DeviceSize,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_transform_feedback_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdEndTransformFeedbackEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_end_transform_feedback_ext
} else {
::core::mem::transmute(val)
}
},
cmd_begin_query_indexed_ext: unsafe {
unsafe extern "system" fn cmd_begin_query_indexed_ext(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
_flags: QueryControlFlags,
_index: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_begin_query_indexed_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginQueryIndexedEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_begin_query_indexed_ext
} else {
::core::mem::transmute(val)
}
},
cmd_end_query_indexed_ext: unsafe {
unsafe extern "system" fn cmd_end_query_indexed_ext(
_command_buffer: CommandBuffer,
_query_pool: QueryPool,
_query: u32,
_index: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_end_query_indexed_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdEndQueryIndexedEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_end_query_indexed_ext
} else {
::core::mem::transmute(val)
}
},
cmd_draw_indirect_byte_count_ext: unsafe {
unsafe extern "system" fn cmd_draw_indirect_byte_count_ext(
_command_buffer: CommandBuffer,
_instance_count: u32,
_first_instance: u32,
_counter_buffer: Buffer,
_counter_buffer_offset: DeviceSize,
_counter_offset: u32,
_vertex_stride: u32,
) {
panic!(concat!(
"Unable to load ",
stringify!(cmd_draw_indirect_byte_count_ext)
))
}
let cname =
CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectByteCountEXT\0");
let val = _f(cname);
if val.is_null() {
cmd_draw_indirect_byte_count_ext
} else {
::core::mem::transmute(val)
}
},
}
}
}
}
#[doc = "VK_EXT_validation_flags"]
pub mod validation_flags {
use super::super::*;
pub use {
crate::vk::EXT_VALIDATION_FLAGS_NAME as NAME,
crate::vk::EXT_VALIDATION_FLAGS_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_shader_subgroup_ballot"]
pub mod shader_subgroup_ballot {
use super::super::*;
pub use {
crate::vk::EXT_SHADER_SUBGROUP_BALLOT_NAME as NAME,
crate::vk::EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_shader_subgroup_vote"]
pub mod shader_subgroup_vote {
use super::super::*;
pub use {
crate::vk::EXT_SHADER_SUBGROUP_VOTE_NAME as NAME,
crate::vk::EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_texture_compression_astc_hdr"]
pub mod texture_compression_astc_hdr {
use super::super::*;
pub use {
crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_NAME as NAME,
crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_astc_decode_mode"]
pub mod astc_decode_mode {
use super::super::*;
pub use {
crate::vk::EXT_ASTC_DECODE_MODE_NAME as NAME,
crate::vk::EXT_ASTC_DECODE_MODE_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_pipeline_robustness"]
pub mod pipeline_robustness {
use super::super::*;
pub use {
crate::vk::EXT_PIPELINE_ROBUSTNESS_NAME as NAME,
crate::vk::EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION,
};
}
#[doc = "VK_EXT_conditional_rendering"]
pub mod conditional_rendering {
use super::super::*;
pub use {
crate::vk::EXT_CONDITIONAL_RENDERING_NAME as NAME,
crate::vk::EXT_CONDITIONAL_RENDERING_SPEC_VERSION as SPEC_VERSION,
};
#[doc = "VK_EXT_conditional_rendering device-level functions"]
#[derive(Clone)]
pub struct Device {
pub(crate) fp: DeviceFn,
pub(crate) handle: crate::vk::Device,
}
impl Device {
pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
let handle = device.handle();
let fp = DeviceFn::load(|name| unsafe {
core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
});
Self { handle, fp }
}
#[inline]
pub fn fp(&self) -> &DeviceFn {
&self.fp
}
#[inline]
pub fn device(&self) -> crate::vk::Device {
self.handle
}
}
#[derive(Clone)]
#[doc = "Raw VK_EXT_conditional_rendering device-level function pointers"]
pub struct DeviceFn {
pub cmd_begin_conditional_rendering_ext: PFN_vkCmdBeginConditionalRenderingEXT,
pub cmd_end_conditional_rendering_ext: PFN_vkCmdEndConditionalRenderingEXT,
}
unsafe impl Send for DeviceFn {}
unsafe impl Sync for DeviceFn {}
impl DeviceFn {
pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
Self::load_erased(&mut f)
}
fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
Self {
cmd_begin_conditional_rendering_ext: unsafe {
unsafe extern "system" fn cmd_begin_conditional_rendering_ext(
_command_buffer: CommandBuffer,
_p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT<
'_,
>,
) {
panic!(concat!(
"Unable to load ",
--> --------------------
--> maximum size reached
--> --------------------
[ Dauer der Verarbeitung: 0.19 Sekunden
(vorverarbeitet)
]
|
2026-04-02
|