| // Copyright 2015-2024 The Khronos Group Inc. |
| // |
| // SPDX-License-Identifier: Apache-2.0 OR MIT |
| // |
| |
| // This header is generated from the Khronos Vulkan XML API Registry. |
| |
| #ifndef VULKAN_HPP |
| #define VULKAN_HPP |
| |
| #include <vulkan/vulkan_hpp_macros.hpp> |
| |
| #if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) |
| # include <string.h> |
| import VULKAN_HPP_STD_MODULE; |
| #else |
| # include <algorithm> |
| # include <array> // ArrayWrapperND |
| # include <string.h> // strnlen |
| # include <string> // std::string |
| # include <utility> // std::exchange |
| #endif |
| #include <vulkan/vulkan.h> |
| |
| #if 17 <= VULKAN_HPP_CPP_VERSION && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) |
| # include <string_view> |
| #endif |
| |
| #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) |
| # include <tuple> // std::tie |
| # include <vector> // std::vector |
| #endif |
| |
| #if !defined( VULKAN_HPP_NO_EXCEPTIONS ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) |
| # include <system_error> // std::is_error_code_enum |
| #endif |
| |
| #if ( VULKAN_HPP_ASSERT == assert ) |
| # include <cassert> |
| #endif |
| |
| #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 |
| # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) |
| # include <dlfcn.h> |
| # elif defined( _WIN32 ) && !defined( VULKAN_HPP_NO_WIN32_PROTOTYPES ) |
| using HINSTANCE = struct HINSTANCE__ *; |
| # if defined( _WIN64 ) |
| using FARPROC = int64_t( __stdcall * )(); |
| # else |
| using FARPROC = int( __stdcall * )(); |
| # endif |
| extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); |
| extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); |
| extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); |
| # endif |
| #endif |
| |
| #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) |
| # include <compare> |
| #endif |
| |
| #if defined( VULKAN_HPP_SUPPORT_SPAN ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) |
| # include <span> |
| #endif |
| |
| static_assert( VK_HEADER_VERSION == 299, "Wrong VK_HEADER_VERSION!" ); |
| |
| // <tuple> includes <sys/sysmacros.h> through some other header |
| // this results in major(x) being resolved to gnu_dev_major(x) |
| // which is an expression in a constructor initializer list. |
| #if defined( major ) |
| # undef major |
| #endif |
| #if defined( minor ) |
| # undef minor |
| #endif |
| |
| // Windows defines MemoryBarrier which is deprecated and collides |
| // with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. |
| #if defined( MemoryBarrier ) |
| # undef MemoryBarrier |
| #endif |
| |
| // XLib.h defines True/False, which collides with our vk::True/vk::False |
| // -> undef them and provide some namepace-secure constexpr |
| #if defined( True ) |
| # undef True |
| constexpr int True = 1; |
| #endif |
| #if defined( False ) |
| # undef False |
| constexpr int False = 0; |
| #endif |
| |
| namespace VULKAN_HPP_NAMESPACE |
| { |
| template <typename T, size_t N> |
| class ArrayWrapper1D : public std::array<T, N> |
| { |
| public: |
| VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array<T, N>() {} |
| |
| VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {} |
| |
| template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> |
| VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT |
| { |
| copy( data.data(), data.length() ); |
| } |
| |
| #if 17 <= VULKAN_HPP_CPP_VERSION |
| template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> |
| VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT |
| { |
| copy( data.data(), data.length() ); |
| } |
| #endif |
| |
| #if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) |
| // on 32 bit compiles, needs overloads on index type int to resolve ambiguities |
| VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT |
| { |
| return std::array<T, N>::operator[]( index ); |
| } |
| |
| T & operator[]( int index ) VULKAN_HPP_NOEXCEPT |
| { |
| return std::array<T, N>::operator[]( index ); |
| } |
| #endif |
| |
| operator T const *() const VULKAN_HPP_NOEXCEPT |
| { |
| return this->data(); |
| } |
| |
| operator T *() VULKAN_HPP_NOEXCEPT |
| { |
| return this->data(); |
| } |
| |
| template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> |
| operator std::string() const |
| { |
| return std::string( this->data(), strnlen( this->data(), N ) ); |
| } |
| |
| #if 17 <= VULKAN_HPP_CPP_VERSION |
| template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0> |
| operator std::string_view() const |
| { |
| return std::string_view( this->data(), strnlen( this->data(), N ) ); |
| } |
| #endif |
| |
| private: |
| VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT |
| { |
| size_t n = ( std::min )( N - 1, len ); |
| for ( size_t i = 0; i < n; ++i ) |
| { |
| ( *this )[i] = data[i]; |
| } |
| ( *this )[n] = 0; |
| } |
| }; |
| |
| // relational operators between ArrayWrapper1D of chars with potentially different sizes |
| #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) |
| template <size_t N, size_t M> |
| std::strong_ordering operator<=>( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| int result = strcmp( lhs.data(), rhs.data() ); |
| return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); |
| } |
| #else |
| template <size_t N, size_t M> |
| bool operator<( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) < 0; |
| } |
| |
| template <size_t N, size_t M> |
| bool operator<=( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) <= 0; |
| } |
| |
| template <size_t N, size_t M> |
| bool operator>( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) > 0; |
| } |
| |
| template <size_t N, size_t M> |
| bool operator>=( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) >= 0; |
| } |
| #endif |
| |
| template <size_t N, size_t M> |
| bool operator==( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) == 0; |
| } |
| |
| template <size_t N, size_t M> |
| bool operator!=( ArrayWrapper1D<char, N> const & lhs, ArrayWrapper1D<char, M> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return strcmp( lhs.data(), rhs.data() ) != 0; |
| } |
| |
| // specialization of relational operators between std::string and arrays of chars |
| #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) |
| template <size_t N> |
| std::strong_ordering operator<=>( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs <=> rhs.data(); |
| } |
| #else |
| template <size_t N> |
| bool operator<( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs < rhs.data(); |
| } |
| |
| template <size_t N> |
| bool operator<=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs <= rhs.data(); |
| } |
| |
| template <size_t N> |
| bool operator>( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs > rhs.data(); |
| } |
| |
| template <size_t N> |
| bool operator>=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs >= rhs.data(); |
| } |
| #endif |
| |
| template <size_t N> |
| bool operator==( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs == rhs.data(); |
| } |
| |
| template <size_t N> |
| bool operator!=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| return lhs != rhs.data(); |
| } |
| |
| template <typename T, size_t N, size_t M> |
| class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N> |
| { |
| public: |
| VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array<ArrayWrapper1D<T, M>, N>() {} |
| |
| VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT |
| : std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) ) |
| { |
| } |
| }; |
| |
| #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) |
| template <typename T> |
| class ArrayProxy |
| { |
| public: |
| VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT |
| : m_count( 0 ) |
| , m_ptr( nullptr ) |
| { |
| } |
| |
| VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT |
| : m_count( 0 ) |
| , m_ptr( nullptr ) |
| { |
| } |
| |
| ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT |
| : m_count( 1 ) |
| , m_ptr( &value ) |
| { |
| } |
| |
| ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT |
| : m_count( count ) |
| , m_ptr( ptr ) |
| { |
| } |
| |
| template <std::size_t C> |
| ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT |
| : m_count( C ) |
| , m_ptr( ptr ) |
| { |
| } |
| |
| # if __GNUC__ >= 9 |
| # pragma GCC diagnostic push |
| # pragma GCC diagnostic ignored "-Winit-list-lifetime" |
| # endif |
| |
| ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT |
| : m_count( static_cast<uint32_t>( list.size() ) ) |
| , m_ptr( list.begin() ) |
| { |
| } |
| |
| template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0> |
| ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT |
| : m_count( static_cast<uint32_t>( list.size() ) ) |
| , m_ptr( list.begin() ) |
| { |
| } |
| |
| # if __GNUC__ >= 9 |
| # pragma GCC diagnostic pop |
| # endif |
| |
| // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly |
| // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement. |
| template <typename V, |
| typename std::enable_if<std::is_convertible<decltype( std::declval<V>().data() ), T *>::value && |
| std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr> |
| ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT |
| : m_count( static_cast<uint32_t>( v.size() ) ) |
| , m_ptr( v.data() ) |
| { |
| } |
| |
| const T * begin() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| const T * end() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr + m_count; |
| } |
| |
| const T & front() const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( m_count && m_ptr ); |
| return *m_ptr; |
| } |
| |
| const T & back() const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( m_count && m_ptr ); |
| return *( m_ptr + m_count - 1 ); |
| } |
| |
| bool empty() const VULKAN_HPP_NOEXCEPT |
| { |
| return ( m_count == 0 ); |
| } |
| |
| uint32_t size() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_count; |
| } |
| |
| T const * data() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| private: |
| uint32_t m_count; |
| T const * m_ptr; |
| }; |
| |
| template <typename T> |
| class ArrayProxyNoTemporaries |
| { |
| public: |
| VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT |
| : m_count( 0 ) |
| , m_ptr( nullptr ) |
| { |
| } |
| |
| VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT |
| : m_count( 0 ) |
| , m_ptr( nullptr ) |
| { |
| } |
| |
| template <typename B = T, typename std::enable_if<std::is_convertible<B, T>::value && std::is_lvalue_reference<B>::value, int>::type = 0> |
| ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT |
| : m_count( 1 ) |
| , m_ptr( &value ) |
| { |
| } |
| |
| ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT |
| : m_count( count ) |
| , m_ptr( ptr ) |
| { |
| } |
| |
| template <std::size_t C> |
| ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT |
| : m_count( C ) |
| , m_ptr( ptr ) |
| { |
| } |
| |
| template <std::size_t C> |
| ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; |
| |
| // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. |
| template <typename V, |
| typename std::enable_if<!std::is_convertible<decltype( std::declval<V>().begin() ), T *>::value && |
| std::is_convertible<decltype( std::declval<V>().data() ), T *>::value && |
| std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value && std::is_lvalue_reference<V>::value, |
| int>::type = 0> |
| ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT |
| : m_count( static_cast<uint32_t>( v.size() ) ) |
| , m_ptr( v.data() ) |
| { |
| } |
| |
| // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. |
| template <typename V, |
| typename std::enable_if<std::is_convertible<decltype( std::declval<V>().begin() ), T *>::value && |
| std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value && std::is_lvalue_reference<V>::value, |
| int>::type = 0> |
| ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT |
| : m_count( static_cast<uint32_t>( v.size() ) ) |
| , m_ptr( v.begin() ) |
| { |
| } |
| |
| const T * begin() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| const T * end() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr + m_count; |
| } |
| |
| const T & front() const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( m_count && m_ptr ); |
| return *m_ptr; |
| } |
| |
| const T & back() const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( m_count && m_ptr ); |
| return *( m_ptr + m_count - 1 ); |
| } |
| |
| bool empty() const VULKAN_HPP_NOEXCEPT |
| { |
| return ( m_count == 0 ); |
| } |
| |
| uint32_t size() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_count; |
| } |
| |
| T * data() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| private: |
| uint32_t m_count; |
| T * m_ptr; |
| }; |
| |
| template <typename T> |
| class StridedArrayProxy : protected ArrayProxy<T> |
| { |
| public: |
| using ArrayProxy<T>::ArrayProxy; |
| |
| StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT |
| : ArrayProxy<T>( count, ptr ) |
| , m_stride( stride ) |
| { |
| VULKAN_HPP_ASSERT( sizeof( T ) <= stride ); |
| } |
| |
| using ArrayProxy<T>::begin; |
| |
| const T * end() const VULKAN_HPP_NOEXCEPT |
| { |
| return reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + size() * m_stride ); |
| } |
| |
| using ArrayProxy<T>::front; |
| |
| const T & back() const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( begin() && size() ); |
| return *reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + ( size() - 1 ) * m_stride ); |
| } |
| |
| using ArrayProxy<T>::empty; |
| using ArrayProxy<T>::size; |
| using ArrayProxy<T>::data; |
| |
| uint32_t stride() const |
| { |
| return m_stride; |
| } |
| |
| private: |
| uint32_t m_stride = sizeof( T ); |
| }; |
| |
| template <typename RefType> |
| class Optional |
| { |
| public: |
| Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT |
| { |
| m_ptr = &reference; |
| } |
| |
| Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT |
| { |
| m_ptr = ptr; |
| } |
| |
| Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT |
| { |
| m_ptr = nullptr; |
| } |
| |
| operator RefType *() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| RefType const * operator->() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_ptr; |
| } |
| |
| explicit operator bool() const VULKAN_HPP_NOEXCEPT |
| { |
| return !!m_ptr; |
| } |
| |
| private: |
| RefType * m_ptr; |
| }; |
| |
| template <typename X, typename Y> |
| struct StructExtends |
| { |
| enum |
| { |
| value = false |
| }; |
| }; |
| |
| template <typename Type, class...> |
| struct IsPartOfStructureChain |
| { |
| static const bool valid = false; |
| }; |
| |
| template <typename Type, typename Head, typename... Tail> |
| struct IsPartOfStructureChain<Type, Head, Tail...> |
| { |
| static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid; |
| }; |
| |
| template <size_t Index, typename T, typename... ChainElements> |
| struct StructureChainContains |
| { |
| static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value || |
| StructureChainContains<Index - 1, T, ChainElements...>::value; |
| }; |
| |
| template <typename T, typename... ChainElements> |
| struct StructureChainContains<0, T, ChainElements...> |
| { |
| static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value; |
| }; |
| |
| template <size_t Index, typename... ChainElements> |
| struct StructureChainValidation |
| { |
| using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type; |
| static const bool valid = StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value && |
| ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) && |
| StructureChainValidation<Index - 1, ChainElements...>::valid; |
| }; |
| |
| template <typename... ChainElements> |
| struct StructureChainValidation<0, ChainElements...> |
| { |
| static const bool valid = true; |
| }; |
| |
| template <typename... ChainElements> |
| class StructureChain : public std::tuple<ChainElements...> |
| { |
| // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. |
| // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. |
| public: |
| StructureChain() VULKAN_HPP_NOEXCEPT |
| { |
| static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" ); |
| link<sizeof...( ChainElements ) - 1>(); |
| } |
| |
| StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs ) |
| { |
| static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" ); |
| link( &std::get<0>( *this ), |
| &std::get<0>( rhs ), |
| reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ), |
| reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) ); |
| } |
| |
| StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... ) |
| { |
| static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" ); |
| link<sizeof...( ChainElements ) - 1>(); |
| } |
| |
| StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| std::tuple<ChainElements...>::operator=( rhs ); |
| link( &std::get<0>( *this ), |
| &std::get<0>( rhs ), |
| reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ), |
| reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) ); |
| return *this; |
| } |
| |
| template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0> |
| T & get() VULKAN_HPP_NOEXCEPT |
| { |
| return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> &>( *this ) ); |
| } |
| |
| template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0> |
| T const & get() const VULKAN_HPP_NOEXCEPT |
| { |
| return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> const &>( *this ) ); |
| } |
| |
| template <typename T0, typename T1, typename... Ts> |
| std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT |
| { |
| return std::tie( get<T0>(), get<T1>(), get<Ts>()... ); |
| } |
| |
| template <typename T0, typename T1, typename... Ts> |
| std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT |
| { |
| return std::tie( get<T0>(), get<T1>(), get<Ts>()... ); |
| } |
| |
| // assign a complete structure to the StructureChain without modifying the chaining |
| template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0> |
| StructureChain & assign( const T & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| T & lhs = get<T, Which>(); |
| auto pNext = lhs.pNext; |
| lhs = rhs; |
| lhs.pNext = pNext; |
| return *this; |
| } |
| |
| template <typename ClassType, size_t Which = 0> |
| typename std::enable_if<std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value && ( Which == 0 ), bool>::type |
| isLinked() const VULKAN_HPP_NOEXCEPT |
| { |
| return true; |
| } |
| |
| template <typename ClassType, size_t Which = 0> |
| typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), bool>::type |
| isLinked() const VULKAN_HPP_NOEXCEPT |
| { |
| static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" ); |
| return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) ); |
| } |
| |
| template <typename ClassType, size_t Which = 0> |
| typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type |
| relink() VULKAN_HPP_NOEXCEPT |
| { |
| static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't relink Structure that's not part of this StructureChain!" ); |
| auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() ); |
| VULKAN_HPP_ASSERT( !isLinked( pNext ) ); |
| auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ); |
| pNext->pNext = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext ); |
| headElement.pNext = pNext; |
| } |
| |
| template <typename ClassType, size_t Which = 0> |
| typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type |
| unlink() VULKAN_HPP_NOEXCEPT |
| { |
| static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" ); |
| unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) ); |
| } |
| |
| private: |
| template <int Index, typename T, int Which, typename, class First, class... Types> |
| struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...> |
| { |
| }; |
| |
| template <int Index, typename T, int Which, class First, class... Types> |
| struct ChainElementIndex<Index, T, Which, typename std::enable_if<!std::is_same<T, First>::value, void>::type, First, Types...> |
| : ChainElementIndex<Index + 1, T, Which, void, Types...> |
| { |
| }; |
| |
| template <int Index, typename T, int Which, class First, class... Types> |
| struct ChainElementIndex<Index, T, Which, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...> |
| : ChainElementIndex<Index + 1, T, Which - 1, void, Types...> |
| { |
| }; |
| |
| template <int Index, typename T, class First, class... Types> |
| struct ChainElementIndex<Index, T, 0, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...> |
| : std::integral_constant<int, Index> |
| { |
| }; |
| |
| bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT |
| { |
| VkBaseInStructure const * elementPtr = |
| reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) ); |
| while ( elementPtr ) |
| { |
| if ( elementPtr->pNext == pNext ) |
| { |
| return true; |
| } |
| elementPtr = elementPtr->pNext; |
| } |
| return false; |
| } |
| |
| template <size_t Index> |
| typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT |
| { |
| auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...> &>( *this ) ); |
| x.pNext = &std::get<Index>( static_cast<std::tuple<ChainElements...> &>( *this ) ); |
| link<Index - 1>(); |
| } |
| |
| template <size_t Index> |
| typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT |
| { |
| } |
| |
| void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src ) |
| { |
| while ( src->pNext ) |
| { |
| std::ptrdiff_t offset = reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase ); |
| dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset ); |
| dst = dst->pNext; |
| src = src->pNext; |
| } |
| dst->pNext = nullptr; |
| } |
| |
| void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT |
| { |
| VkBaseOutStructure * elementPtr = reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) ); |
| while ( elementPtr && ( elementPtr->pNext != pNext ) ) |
| { |
| elementPtr = elementPtr->pNext; |
| } |
| if ( elementPtr ) |
| { |
| elementPtr->pNext = pNext->pNext; |
| } |
| else |
| { |
| VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! |
| } |
| } |
| }; |
| |
| // interupt the VULKAN_HPP_NAMESPACE for a moment to add specializations of std::tuple_size and std::tuple_element for the StructureChain! |
| } |
| |
| namespace std |
| { |
| template <typename... Elements> |
| struct tuple_size<VULKAN_HPP_NAMESPACE::StructureChain<Elements...>> |
| { |
| static constexpr size_t value = std::tuple_size<std::tuple<Elements...>>::value; |
| }; |
| |
| template <std::size_t Index, typename... Elements> |
| struct tuple_element<Index, VULKAN_HPP_NAMESPACE::StructureChain<Elements...>> |
| { |
| using type = typename std::tuple_element<Index, std::tuple<Elements...>>::type; |
| }; |
| } // namespace std |
| |
| namespace VULKAN_HPP_NAMESPACE |
| { |
| |
| # if !defined( VULKAN_HPP_NO_SMART_HANDLE ) |
| template <typename Type, typename Dispatch> |
| class UniqueHandleTraits; |
| |
| template <typename Type, typename Dispatch> |
| class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter |
| { |
| private: |
| using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter; |
| |
| public: |
| using element_type = Type; |
| |
| UniqueHandle() : Deleter(), m_value() {} |
| |
| explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT |
| : Deleter( deleter ) |
| , m_value( value ) |
| { |
| } |
| |
| UniqueHandle( UniqueHandle const & ) = delete; |
| |
| UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT |
| : Deleter( std::move( static_cast<Deleter &>( other ) ) ) |
| , m_value( other.release() ) |
| { |
| } |
| |
| ~UniqueHandle() VULKAN_HPP_NOEXCEPT |
| { |
| if ( m_value ) |
| { |
| this->destroy( m_value ); |
| } |
| } |
| |
| UniqueHandle & operator=( UniqueHandle const & ) = delete; |
| |
| UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT |
| { |
| reset( other.release() ); |
| *static_cast<Deleter *>( this ) = std::move( static_cast<Deleter &>( other ) ); |
| return *this; |
| } |
| |
| explicit operator bool() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_value.operator bool(); |
| } |
| |
| # if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST ) |
| operator Type() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_value; |
| } |
| # endif |
| |
| Type const * operator->() const VULKAN_HPP_NOEXCEPT |
| { |
| return &m_value; |
| } |
| |
| Type * operator->() VULKAN_HPP_NOEXCEPT |
| { |
| return &m_value; |
| } |
| |
| Type const & operator*() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_value; |
| } |
| |
| Type & operator*() VULKAN_HPP_NOEXCEPT |
| { |
| return m_value; |
| } |
| |
| const Type & get() const VULKAN_HPP_NOEXCEPT |
| { |
| return m_value; |
| } |
| |
| Type & get() VULKAN_HPP_NOEXCEPT |
| { |
| return m_value; |
| } |
| |
| void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT |
| { |
| if ( m_value != value ) |
| { |
| if ( m_value ) |
| { |
| this->destroy( m_value ); |
| } |
| m_value = value; |
| } |
| } |
| |
| Type release() VULKAN_HPP_NOEXCEPT |
| { |
| Type value = m_value; |
| m_value = nullptr; |
| return value; |
| } |
| |
| void swap( UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| std::swap( m_value, rhs.m_value ); |
| std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) ); |
| } |
| |
| private: |
| Type m_value; |
| }; |
| |
| template <typename UniqueType> |
| VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw( std::vector<UniqueType> const & handles ) |
| { |
| std::vector<typename UniqueType::element_type> newBuffer( handles.size() ); |
| std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); |
| return newBuffer; |
| } |
| |
| template <typename Type, typename Dispatch> |
| VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs, UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT |
| { |
| lhs.swap( rhs ); |
| } |
| # endif |
| #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE |
| |
| class DispatchLoaderBase |
| { |
| public: |
| DispatchLoaderBase() = default; |
| DispatchLoaderBase( std::nullptr_t ) |
| #if !defined( NDEBUG ) |
| : m_valid( false ) |
| #endif |
| { |
| } |
| |
| #if !defined( NDEBUG ) |
| size_t getVkHeaderVersion() const |
| { |
| VULKAN_HPP_ASSERT( m_valid ); |
| return vkHeaderVersion; |
| } |
| |
| private: |
| size_t vkHeaderVersion = VK_HEADER_VERSION; |
| bool m_valid = true; |
| #endif |
| }; |
| |
| #if !defined( VK_NO_PROTOTYPES ) |
| class DispatchLoaderStatic : public DispatchLoaderBase |
| { |
| public: |
| //=== VK_VERSION_1_0 === |
| |
| VkResult |
| vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); |
| } |
| |
| void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyInstance( instance, pAllocator ); |
| } |
| |
| VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); |
| } |
| |
| void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); |
| } |
| |
| void |
| vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); |
| } |
| |
| VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, |
| VkFormat format, |
| VkImageType type, |
| VkImageTiling tiling, |
| VkImageUsageFlags usage, |
| VkImageCreateFlags flags, |
| VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); |
| } |
| |
| void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); |
| } |
| |
| void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, |
| uint32_t * pQueueFamilyPropertyCount, |
| VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); |
| } |
| |
| void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); |
| } |
| |
| PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetInstanceProcAddr( instance, pName ); |
| } |
| |
| PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceProcAddr( device, pName ); |
| } |
| |
| VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, |
| const VkDeviceCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); |
| } |
| |
| void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyDevice( device, pAllocator ); |
| } |
| |
| VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, |
| uint32_t * pPropertyCount, |
| VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, |
| const char * pLayerName, |
| uint32_t * pPropertyCount, |
| VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); |
| } |
| |
| VkResult |
| vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); |
| } |
| |
| void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); |
| } |
| |
| VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); |
| } |
| |
| VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkQueueWaitIdle( queue ); |
| } |
| |
| VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDeviceWaitIdle( device ); |
| } |
| |
| VkResult vkAllocateMemory( VkDevice device, |
| const VkMemoryAllocateInfo * pAllocateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); |
| } |
| |
| void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkFreeMemory( device, memory, pAllocator ); |
| } |
| |
| VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkMapMemory( device, memory, offset, size, flags, ppData ); |
| } |
| |
| void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkUnmapMemory( device, memory ); |
| } |
| |
| VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); |
| } |
| |
| VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); |
| } |
| |
| void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); |
| } |
| |
| VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); |
| } |
| |
| VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkBindImageMemory( device, image, memory, memoryOffset ); |
| } |
| |
| void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); |
| } |
| |
| void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); |
| } |
| |
| void vkGetImageSparseMemoryRequirements( VkDevice device, |
| VkImage image, |
| uint32_t * pSparseMemoryRequirementCount, |
| VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); |
| } |
| |
| void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, |
| VkFormat format, |
| VkImageType type, |
| VkSampleCountFlagBits samples, |
| VkImageUsageFlags usage, |
| VkImageTiling tiling, |
| uint32_t * pPropertyCount, |
| VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); |
| } |
| |
| VkResult vkCreateFence( VkDevice device, |
| const VkFenceCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkFence * pFence ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); |
| } |
| |
| void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyFence( device, fence, pAllocator ); |
| } |
| |
| VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetFences( device, fenceCount, pFences ); |
| } |
| |
| VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetFenceStatus( device, fence ); |
| } |
| |
| VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); |
| } |
| |
| VkResult vkCreateSemaphore( VkDevice device, |
| const VkSemaphoreCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); |
| } |
| |
| void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroySemaphore( device, semaphore, pAllocator ); |
| } |
| |
| VkResult vkCreateEvent( VkDevice device, |
| const VkEventCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); |
| } |
| |
| void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyEvent( device, event, pAllocator ); |
| } |
| |
| VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetEventStatus( device, event ); |
| } |
| |
| VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkSetEvent( device, event ); |
| } |
| |
| VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetEvent( device, event ); |
| } |
| |
| VkResult vkCreateQueryPool( VkDevice device, |
| const VkQueryPoolCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); |
| } |
| |
| void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyQueryPool( device, queryPool, pAllocator ); |
| } |
| |
| VkResult vkGetQueryPoolResults( VkDevice device, |
| VkQueryPool queryPool, |
| uint32_t firstQuery, |
| uint32_t queryCount, |
| size_t dataSize, |
| void * pData, |
| VkDeviceSize stride, |
| VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); |
| } |
| |
| VkResult vkCreateBuffer( VkDevice device, |
| const VkBufferCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); |
| } |
| |
| void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyBuffer( device, buffer, pAllocator ); |
| } |
| |
| VkResult vkCreateBufferView( VkDevice device, |
| const VkBufferViewCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); |
| } |
| |
| void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyBufferView( device, bufferView, pAllocator ); |
| } |
| |
| VkResult vkCreateImage( VkDevice device, |
| const VkImageCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkImage * pImage ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); |
| } |
| |
| void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyImage( device, image, pAllocator ); |
| } |
| |
| void vkGetImageSubresourceLayout( VkDevice device, |
| VkImage image, |
| const VkImageSubresource * pSubresource, |
| VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); |
| } |
| |
| VkResult vkCreateImageView( VkDevice device, |
| const VkImageViewCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkImageView * pView ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); |
| } |
| |
| void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyImageView( device, imageView, pAllocator ); |
| } |
| |
| VkResult vkCreateShaderModule( VkDevice device, |
| const VkShaderModuleCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); |
| } |
| |
| void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); |
| } |
| |
| VkResult vkCreatePipelineCache( VkDevice device, |
| const VkPipelineCacheCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); |
| } |
| |
| void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); |
| } |
| |
| VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); |
| } |
| |
| VkResult |
| vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); |
| } |
| |
| VkResult vkCreateGraphicsPipelines( VkDevice device, |
| VkPipelineCache pipelineCache, |
| uint32_t createInfoCount, |
| const VkGraphicsPipelineCreateInfo * pCreateInfos, |
| const VkAllocationCallbacks * pAllocator, |
| VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); |
| } |
| |
| VkResult vkCreateComputePipelines( VkDevice device, |
| VkPipelineCache pipelineCache, |
| uint32_t createInfoCount, |
| const VkComputePipelineCreateInfo * pCreateInfos, |
| const VkAllocationCallbacks * pAllocator, |
| VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); |
| } |
| |
| void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyPipeline( device, pipeline, pAllocator ); |
| } |
| |
| VkResult vkCreatePipelineLayout( VkDevice device, |
| const VkPipelineLayoutCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); |
| } |
| |
| void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); |
| } |
| |
| VkResult vkCreateSampler( VkDevice device, |
| const VkSamplerCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); |
| } |
| |
| void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroySampler( device, sampler, pAllocator ); |
| } |
| |
| VkResult vkCreateDescriptorSetLayout( VkDevice device, |
| const VkDescriptorSetLayoutCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); |
| } |
| |
| void vkDestroyDescriptorSetLayout( VkDevice device, |
| VkDescriptorSetLayout descriptorSetLayout, |
| const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); |
| } |
| |
| VkResult vkCreateDescriptorPool( VkDevice device, |
| const VkDescriptorPoolCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); |
| } |
| |
| void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); |
| } |
| |
| VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetDescriptorPool( device, descriptorPool, flags ); |
| } |
| |
| VkResult vkAllocateDescriptorSets( VkDevice device, |
| const VkDescriptorSetAllocateInfo * pAllocateInfo, |
| VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); |
| } |
| |
| VkResult vkFreeDescriptorSets( VkDevice device, |
| VkDescriptorPool descriptorPool, |
| uint32_t descriptorSetCount, |
| const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); |
| } |
| |
| void vkUpdateDescriptorSets( VkDevice device, |
| uint32_t descriptorWriteCount, |
| const VkWriteDescriptorSet * pDescriptorWrites, |
| uint32_t descriptorCopyCount, |
| const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); |
| } |
| |
| VkResult vkCreateFramebuffer( VkDevice device, |
| const VkFramebufferCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); |
| } |
| |
| void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); |
| } |
| |
| VkResult vkCreateRenderPass( VkDevice device, |
| const VkRenderPassCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); |
| } |
| |
| void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyRenderPass( device, renderPass, pAllocator ); |
| } |
| |
| void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); |
| } |
| |
| VkResult vkCreateCommandPool( VkDevice device, |
| const VkCommandPoolCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); |
| } |
| |
| void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyCommandPool( device, commandPool, pAllocator ); |
| } |
| |
| VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetCommandPool( device, commandPool, flags ); |
| } |
| |
| VkResult vkAllocateCommandBuffers( VkDevice device, |
| const VkCommandBufferAllocateInfo * pAllocateInfo, |
| VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); |
| } |
| |
| void vkFreeCommandBuffers( VkDevice device, |
| VkCommandPool commandPool, |
| uint32_t commandBufferCount, |
| const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); |
| } |
| |
| VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); |
| } |
| |
| VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEndCommandBuffer( commandBuffer ); |
| } |
| |
| VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetCommandBuffer( commandBuffer, flags ); |
| } |
| |
| void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); |
| } |
| |
| void |
| vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); |
| } |
| |
| void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); |
| } |
| |
| void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); |
| } |
| |
| void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, |
| float depthBiasConstantFactor, |
| float depthBiasClamp, |
| float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); |
| } |
| |
| void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); |
| } |
| |
| void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); |
| } |
| |
| void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); |
| } |
| |
| void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); |
| } |
| |
| void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); |
| } |
| |
| void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, |
| VkPipelineBindPoint pipelineBindPoint, |
| VkPipelineLayout layout, |
| uint32_t firstSet, |
| uint32_t descriptorSetCount, |
| const VkDescriptorSet * pDescriptorSets, |
| uint32_t dynamicOffsetCount, |
| const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBindDescriptorSets( |
| commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); |
| } |
| |
| void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); |
| } |
| |
| void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, |
| uint32_t firstBinding, |
| uint32_t bindingCount, |
| const VkBuffer * pBuffers, |
| const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); |
| } |
| |
| void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); |
| } |
| |
| void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, |
| uint32_t indexCount, |
| uint32_t instanceCount, |
| uint32_t firstIndex, |
| int32_t vertexOffset, |
| uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); |
| } |
| |
| void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); |
| } |
| |
| void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); |
| } |
| |
| void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); |
| } |
| |
| void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); |
| } |
| |
| void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); |
| } |
| |
| void vkCmdCopyImage( VkCommandBuffer commandBuffer, |
| VkImage srcImage, |
| VkImageLayout srcImageLayout, |
| VkImage dstImage, |
| VkImageLayout dstImageLayout, |
| uint32_t regionCount, |
| const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); |
| } |
| |
| void vkCmdBlitImage( VkCommandBuffer commandBuffer, |
| VkImage srcImage, |
| VkImageLayout srcImageLayout, |
| VkImage dstImage, |
| VkImageLayout dstImageLayout, |
| uint32_t regionCount, |
| const VkImageBlit * pRegions, |
| VkFilter filter ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); |
| } |
| |
| void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, |
| VkBuffer srcBuffer, |
| VkImage dstImage, |
| VkImageLayout dstImageLayout, |
| uint32_t regionCount, |
| const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); |
| } |
| |
| void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, |
| VkImage srcImage, |
| VkImageLayout srcImageLayout, |
| VkBuffer dstBuffer, |
| uint32_t regionCount, |
| const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); |
| } |
| |
| void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); |
| } |
| |
| void |
| vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); |
| } |
| |
| void vkCmdClearColorImage( VkCommandBuffer commandBuffer, |
| VkImage image, |
| VkImageLayout imageLayout, |
| const VkClearColorValue * pColor, |
| uint32_t rangeCount, |
| const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); |
| } |
| |
| void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, |
| VkImage image, |
| VkImageLayout imageLayout, |
| const VkClearDepthStencilValue * pDepthStencil, |
| uint32_t rangeCount, |
| const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); |
| } |
| |
| void vkCmdClearAttachments( VkCommandBuffer commandBuffer, |
| uint32_t attachmentCount, |
| const VkClearAttachment * pAttachments, |
| uint32_t rectCount, |
| const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); |
| } |
| |
| void vkCmdResolveImage( VkCommandBuffer commandBuffer, |
| VkImage srcImage, |
| VkImageLayout srcImageLayout, |
| VkImage dstImage, |
| VkImageLayout dstImageLayout, |
| uint32_t regionCount, |
| const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); |
| } |
| |
| void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetEvent( commandBuffer, event, stageMask ); |
| } |
| |
| void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdResetEvent( commandBuffer, event, stageMask ); |
| } |
| |
| void vkCmdWaitEvents( VkCommandBuffer commandBuffer, |
| uint32_t eventCount, |
| const VkEvent * pEvents, |
| VkPipelineStageFlags srcStageMask, |
| VkPipelineStageFlags dstStageMask, |
| uint32_t memoryBarrierCount, |
| const VkMemoryBarrier * pMemoryBarriers, |
| uint32_t bufferMemoryBarrierCount, |
| const VkBufferMemoryBarrier * pBufferMemoryBarriers, |
| uint32_t imageMemoryBarrierCount, |
| const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdWaitEvents( commandBuffer, |
| eventCount, |
| pEvents, |
| srcStageMask, |
| dstStageMask, |
| memoryBarrierCount, |
| pMemoryBarriers, |
| bufferMemoryBarrierCount, |
| pBufferMemoryBarriers, |
| imageMemoryBarrierCount, |
| pImageMemoryBarriers ); |
| } |
| |
| void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, |
| VkPipelineStageFlags srcStageMask, |
| VkPipelineStageFlags dstStageMask, |
| VkDependencyFlags dependencyFlags, |
| uint32_t memoryBarrierCount, |
| const VkMemoryBarrier * pMemoryBarriers, |
| uint32_t bufferMemoryBarrierCount, |
| const VkBufferMemoryBarrier * pBufferMemoryBarriers, |
| uint32_t imageMemoryBarrierCount, |
| const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdPipelineBarrier( commandBuffer, |
| srcStageMask, |
| dstStageMask, |
| dependencyFlags, |
| memoryBarrierCount, |
| pMemoryBarriers, |
| bufferMemoryBarrierCount, |
| pBufferMemoryBarriers, |
| imageMemoryBarrierCount, |
| pImageMemoryBarriers ); |
| } |
| |
| void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); |
| } |
| |
| void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdEndQuery( commandBuffer, queryPool, query ); |
| } |
| |
| void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); |
| } |
| |
| void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, |
| VkPipelineStageFlagBits pipelineStage, |
| VkQueryPool queryPool, |
| uint32_t query ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); |
| } |
| |
| void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, |
| VkQueryPool queryPool, |
| uint32_t firstQuery, |
| uint32_t queryCount, |
| VkBuffer dstBuffer, |
| VkDeviceSize dstOffset, |
| VkDeviceSize stride, |
| VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); |
| } |
| |
| void vkCmdPushConstants( VkCommandBuffer commandBuffer, |
| VkPipelineLayout layout, |
| VkShaderStageFlags stageFlags, |
| uint32_t offset, |
| uint32_t size, |
| const void * pValues ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); |
| } |
| |
| void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, |
| const VkRenderPassBeginInfo * pRenderPassBegin, |
| VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); |
| } |
| |
| void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdNextSubpass( commandBuffer, contents ); |
| } |
| |
| void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdEndRenderPass( commandBuffer ); |
| } |
| |
| void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); |
| } |
| |
| //=== VK_VERSION_1_1 === |
| |
| VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumerateInstanceVersion( pApiVersion ); |
| } |
| |
| VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); |
| } |
| |
| VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); |
| } |
| |
| void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, |
| uint32_t heapIndex, |
| uint32_t localDeviceIndex, |
| uint32_t remoteDeviceIndex, |
| VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); |
| } |
| |
| void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); |
| } |
| |
| void vkCmdDispatchBase( VkCommandBuffer commandBuffer, |
| uint32_t baseGroupX, |
| uint32_t baseGroupY, |
| uint32_t baseGroupZ, |
| uint32_t groupCountX, |
| uint32_t groupCountY, |
| uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); |
| } |
| |
| VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, |
| uint32_t * pPhysicalDeviceGroupCount, |
| VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); |
| } |
| |
| void vkGetImageMemoryRequirements2( VkDevice device, |
| const VkImageMemoryRequirementsInfo2 * pInfo, |
| VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); |
| } |
| |
| void vkGetBufferMemoryRequirements2( VkDevice device, |
| const VkBufferMemoryRequirementsInfo2 * pInfo, |
| VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); |
| } |
| |
| void vkGetImageSparseMemoryRequirements2( VkDevice device, |
| const VkImageSparseMemoryRequirementsInfo2 * pInfo, |
| uint32_t * pSparseMemoryRequirementCount, |
| VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); |
| } |
| |
| void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); |
| } |
| |
| void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); |
| } |
| |
| void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, |
| VkFormat format, |
| VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); |
| } |
| |
| VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, |
| const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, |
| VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); |
| } |
| |
| void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, |
| uint32_t * pQueueFamilyPropertyCount, |
| VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); |
| } |
| |
| void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, |
| VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); |
| } |
| |
| void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, |
| const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, |
| uint32_t * pPropertyCount, |
| VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); |
| } |
| |
| void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkTrimCommandPool( device, commandPool, flags ); |
| } |
| |
| void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); |
| } |
| |
| VkResult vkCreateSamplerYcbcrConversion( VkDevice device, |
| const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); |
| } |
| |
| void vkDestroySamplerYcbcrConversion( VkDevice device, |
| VkSamplerYcbcrConversion ycbcrConversion, |
| const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); |
| } |
| |
| VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, |
| const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); |
| } |
| |
| void vkDestroyDescriptorUpdateTemplate( VkDevice device, |
| VkDescriptorUpdateTemplate descriptorUpdateTemplate, |
| const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); |
| } |
| |
| void vkUpdateDescriptorSetWithTemplate( VkDevice device, |
| VkDescriptorSet descriptorSet, |
| VkDescriptorUpdateTemplate descriptorUpdateTemplate, |
| const void * pData ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); |
| } |
| |
| void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, |
| const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, |
| VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); |
| } |
| |
| void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, |
| const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, |
| VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); |
| } |
| |
| void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, |
| const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, |
| VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); |
| } |
| |
| void vkGetDescriptorSetLayoutSupport( VkDevice device, |
| const VkDescriptorSetLayoutCreateInfo * pCreateInfo, |
| VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); |
| } |
| |
| //=== VK_VERSION_1_2 === |
| |
| void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, |
| VkBuffer buffer, |
| VkDeviceSize offset, |
| VkBuffer countBuffer, |
| VkDeviceSize countBufferOffset, |
| uint32_t maxDrawCount, |
| uint32_t stride ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); |
| } |
| |
| void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, |
| VkBuffer buffer, |
| VkDeviceSize offset, |
| VkBuffer countBuffer, |
| VkDeviceSize countBufferOffset, |
| uint32_t maxDrawCount, |
| uint32_t stride ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); |
| } |
| |
| VkResult vkCreateRenderPass2( VkDevice device, |
| const VkRenderPassCreateInfo2 * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); |
| } |
| |
| void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, |
| const VkRenderPassBeginInfo * pRenderPassBegin, |
| const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); |
| } |
| |
| void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, |
| const VkSubpassBeginInfo * pSubpassBeginInfo, |
| const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); |
| } |
| |
| void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); |
| } |
| |
| void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); |
| } |
| |
| VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); |
| } |
| |
| VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkWaitSemaphores( device, pWaitInfo, timeout ); |
| } |
| |
| VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkSignalSemaphore( device, pSignalInfo ); |
| } |
| |
| VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetBufferDeviceAddress( device, pInfo ); |
| } |
| |
| uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); |
| } |
| |
| uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); |
| } |
| |
| //=== VK_VERSION_1_3 === |
| |
| VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, |
| uint32_t * pToolCount, |
| VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); |
| } |
| |
| VkResult vkCreatePrivateDataSlot( VkDevice device, |
| const VkPrivateDataSlotCreateInfo * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); |
| } |
| |
| void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); |
| } |
| |
| VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); |
| } |
| |
| void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const |
| VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); |
| } |
| |
| void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); |
| } |
| |
| void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); |
| } |
| |
| void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, |
| uint32_t eventCount, |
| const VkEvent * pEvents, |
| const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); |
| } |
| |
| void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); |
| } |
| |
| void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); |
| } |
| |
| VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); |
| } |
| |
| void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); |
| } |
| |
| void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); |
| } |
| |
| void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); |
| } |
| |
| void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); |
| } |
| |
| void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); |
| } |
| |
| void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); |
| } |
| |
| void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); |
| } |
| |
| void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdEndRendering( commandBuffer ); |
| } |
| |
| void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetCullMode( commandBuffer, cullMode ); |
| } |
| |
| void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetFrontFace( commandBuffer, frontFace ); |
| } |
| |
| void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); |
| } |
| |
| void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); |
| } |
| |
| void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); |
| } |
| |
| void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, |
| uint32_t firstBinding, |
| uint32_t bindingCount, |
| const VkBuffer * pBuffers, |
| const VkDeviceSize * pOffsets, |
| const VkDeviceSize * pSizes, |
| const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); |
| } |
| |
| void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); |
| } |
| |
| void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); |
| } |
| |
| void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); |
| } |
| |
| void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); |
| } |
| |
| void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); |
| } |
| |
| void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, |
| VkStencilFaceFlags faceMask, |
| VkStencilOp failOp, |
| VkStencilOp passOp, |
| VkStencilOp depthFailOp, |
| VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); |
| } |
| |
| void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); |
| } |
| |
| void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); |
| } |
| |
| void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); |
| } |
| |
| void vkGetDeviceBufferMemoryRequirements( VkDevice device, |
| const VkDeviceBufferMemoryRequirements * pInfo, |
| VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); |
| } |
| |
| void vkGetDeviceImageMemoryRequirements( VkDevice device, |
| const VkDeviceImageMemoryRequirements * pInfo, |
| VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); |
| } |
| |
| void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, |
| const VkDeviceImageMemoryRequirements * pInfo, |
| uint32_t * pSparseMemoryRequirementCount, |
| VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); |
| } |
| |
| //=== VK_KHR_surface === |
| |
| void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); |
| } |
| |
| VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, |
| uint32_t queueFamilyIndex, |
| VkSurfaceKHR surface, |
| VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); |
| } |
| |
| VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, |
| VkSurfaceKHR surface, |
| VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); |
| } |
| |
| VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, |
| VkSurfaceKHR surface, |
| uint32_t * pSurfaceFormatCount, |
| VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); |
| } |
| |
| VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, |
| VkSurfaceKHR surface, |
| uint32_t * pPresentModeCount, |
| VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); |
| } |
| |
| //=== VK_KHR_swapchain === |
| |
| VkResult vkCreateSwapchainKHR( VkDevice device, |
| const VkSwapchainCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); |
| } |
| |
| void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); |
| } |
| |
| VkResult vkGetSwapchainImagesKHR( VkDevice device, |
| VkSwapchainKHR swapchain, |
| uint32_t * pSwapchainImageCount, |
| VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); |
| } |
| |
| VkResult vkAcquireNextImageKHR( |
| VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); |
| } |
| |
| VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkQueuePresentKHR( queue, pPresentInfo ); |
| } |
| |
| VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, |
| VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); |
| } |
| |
| VkResult |
| vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); |
| } |
| |
| VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, |
| VkSurfaceKHR surface, |
| uint32_t * pRectCount, |
| VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); |
| } |
| |
| VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); |
| } |
| |
| //=== VK_KHR_display === |
| |
| VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, |
| uint32_t * pPropertyCount, |
| VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, |
| uint32_t * pPropertyCount, |
| VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, |
| uint32_t planeIndex, |
| uint32_t * pDisplayCount, |
| VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); |
| } |
| |
| VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, |
| VkDisplayKHR display, |
| uint32_t * pPropertyCount, |
| VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); |
| } |
| |
| VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, |
| VkDisplayKHR display, |
| const VkDisplayModeCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); |
| } |
| |
| VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, |
| VkDisplayModeKHR mode, |
| uint32_t planeIndex, |
| VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); |
| } |
| |
| VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, |
| const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); |
| } |
| |
| //=== VK_KHR_display_swapchain === |
| |
| VkResult vkCreateSharedSwapchainsKHR( VkDevice device, |
| uint32_t swapchainCount, |
| const VkSwapchainCreateInfoKHR * pCreateInfos, |
| const VkAllocationCallbacks * pAllocator, |
| VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); |
| } |
| |
| # if defined( VK_USE_PLATFORM_XLIB_KHR ) |
| //=== VK_KHR_xlib_surface === |
| |
| VkResult vkCreateXlibSurfaceKHR( VkInstance instance, |
| const VkXlibSurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); |
| } |
| |
| VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, |
| uint32_t queueFamilyIndex, |
| Display * dpy, |
| VisualID visualID ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); |
| } |
| # endif /*VK_USE_PLATFORM_XLIB_KHR*/ |
| |
| # if defined( VK_USE_PLATFORM_XCB_KHR ) |
| //=== VK_KHR_xcb_surface === |
| |
| VkResult vkCreateXcbSurfaceKHR( VkInstance instance, |
| const VkXcbSurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); |
| } |
| |
| VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, |
| uint32_t queueFamilyIndex, |
| xcb_connection_t * connection, |
| xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); |
| } |
| # endif /*VK_USE_PLATFORM_XCB_KHR*/ |
| |
| # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) |
| //=== VK_KHR_wayland_surface === |
| |
| VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, |
| const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); |
| } |
| |
| VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, |
| uint32_t queueFamilyIndex, |
| struct wl_display * display ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); |
| } |
| # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ |
| |
| # if defined( VK_USE_PLATFORM_ANDROID_KHR ) |
| //=== VK_KHR_android_surface === |
| |
| VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, |
| const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); |
| } |
| # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ |
| |
| # if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_KHR_win32_surface === |
| |
| VkResult vkCreateWin32SurfaceKHR( VkInstance instance, |
| const VkWin32SurfaceCreateInfoKHR * pCreateInfo, |
| const VkAllocationCallbacks * pAllocator, |
| VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT |
| { |
| return |